diff --git "a/valid.csv" "b/valid.csv" new file mode 100644--- /dev/null +++ "b/valid.csv" @@ -0,0 +1,3001 @@ +,source,target +0,"public void itemClick ( ItemClickEvent event ) { if ( event . isDoubleClick ( ) ) { } } +","public void itemClick ( ItemClickEvent event ) { if ( event . isDoubleClick ( ) ) { log . log ( ""Double Click "" + event . getItemId ( ) ) ; } } +" +1,"public void appendJsonLog ( ActionData actionData ) { if ( isJsonLogActive ( ) ) { String dir = ""gamelogsJson"" ; File saveDir = new File ( dir ) ; if ( ! saveDir . exists ( ) ) { saveDir . mkdirs ( ) ; } actionData . sessionId = getSessionId ( ) ; String logFileName = dir + File . separator + ""game-"" + actionData . gameId + "".json"" ; try ( PrintWriter out = new PrintWriter ( new BufferedWriter ( new FileWriter ( logFileName , true ) ) ) ) { out . println ( actionData . toJson ( ) ) ; } catch ( IOException e ) { } } } +","public void appendJsonLog ( ActionData actionData ) { if ( isJsonLogActive ( ) ) { String dir = ""gamelogsJson"" ; File saveDir = new File ( dir ) ; if ( ! saveDir . exists ( ) ) { saveDir . mkdirs ( ) ; } actionData . sessionId = getSessionId ( ) ; String logFileName = dir + File . separator + ""game-"" + actionData . gameId + "".json"" ; try ( PrintWriter out = new PrintWriter ( new BufferedWriter ( new FileWriter ( logFileName , true ) ) ) ) { out . println ( actionData . toJson ( ) ) ; } catch ( IOException e ) { logger . error ( ""Cant write JSON game log file - "" + logFileName , e ) ; } } } +" +2,"public < K extends TBase < ? , ? > , V extends TBase < ? , ? > > V put ( ThriftCacheKey < K > key , V t ) throws BlurException { Tracer trace = Trace . trace ( THRIFT_CACHE_PUT , Trace . param ( KEY , key ) ) ; try { synchronized ( _lastModTimestamps ) { Long lastModTimestamp = _lastModTimestamps . get ( key . getTable ( ) ) ; if ( lastModTimestamp != null && key . getTimestamp ( ) < lastModTimestamp ) { return t ; } } _cacheMap . put ( key , new ThriftCacheValue < V > ( t ) ) ; return t ; } finally { trace . done ( ) ; } } +","public < K extends TBase < ? , ? > , V extends TBase < ? , ? > > V put ( ThriftCacheKey < K > key , V t ) throws BlurException { Tracer trace = Trace . trace ( THRIFT_CACHE_PUT , Trace . param ( KEY , key ) ) ; try { synchronized ( _lastModTimestamps ) { Long lastModTimestamp = _lastModTimestamps . get ( key . getTable ( ) ) ; if ( lastModTimestamp != null && key . getTimestamp ( ) < lastModTimestamp ) { return t ; } } LOG . debug ( ""Inserting into cache [{0}] with key [{1}]"" , t , key ) ; _cacheMap . put ( key , new ThriftCacheValue < V > ( t ) ) ; return t ; } finally { trace . done ( ) ; } } +" +3,"public void setName ( final String s ) { name = s ; } +","public void setName ( final String s ) { logger . trace ( ""setting name: {}"" , s ) ; name = s ; } +" +4,"private String deleteAssetGroupDetails ( final DeleteAssetGroupRequest deleteAssetGroupRequest ) throws PacManException { if ( assetGroupRepository . existsById ( deleteAssetGroupRequest . getGroupId ( ) ) ) { AssetGroupDetails assetGroupDetails = assetGroupRepository . findById ( deleteAssetGroupRequest . getGroupId ( ) ) . get ( ) ; boolean isDeleted = deleteAssetGroupAlias ( assetGroupDetails ) ; if ( isDeleted ) { try { assetGroupRepository . delete ( assetGroupDetails ) ; return ASSET_GROUP_DELETE_SUCCESS ; } catch ( Exception exception ) { commonService . invokeAPI ( ""POST"" , ALIASES , assetGroupDetails . getAliasQuery ( ) ) ; throw new PacManException ( UNEXPECTED_ERROR_OCCURRED . concat ( "": "" ) . concat ( exception . getMessage ( ) ) ) ; } } else { return ASSET_GROUP_DELETE_FAILED ; } } else { throw new PacManException ( ASSET_GROUP_NOT_EXITS ) ; } } +","private String deleteAssetGroupDetails ( final DeleteAssetGroupRequest deleteAssetGroupRequest ) throws PacManException { if ( assetGroupRepository . existsById ( deleteAssetGroupRequest . getGroupId ( ) ) ) { AssetGroupDetails assetGroupDetails = assetGroupRepository . findById ( deleteAssetGroupRequest . getGroupId ( ) ) . get ( ) ; boolean isDeleted = deleteAssetGroupAlias ( assetGroupDetails ) ; if ( isDeleted ) { try { assetGroupRepository . delete ( assetGroupDetails ) ; return ASSET_GROUP_DELETE_SUCCESS ; } catch ( Exception exception ) { log . error ( UNEXPECTED_ERROR_OCCURRED , exception ) ; commonService . invokeAPI ( ""POST"" , ALIASES , assetGroupDetails . getAliasQuery ( ) ) ; throw new PacManException ( UNEXPECTED_ERROR_OCCURRED . concat ( "": "" ) . concat ( exception . getMessage ( ) ) ) ; } } else { return ASSET_GROUP_DELETE_FAILED ; } } else { throw new PacManException ( ASSET_GROUP_NOT_EXITS ) ; } } +" +5,"public void setAsTag ( String refId ) { try ( PreparedStatement statement = prepareStatement ( ""UPDATE refs SET tag = true WHERE (refId = ? OR refName = ?) AND deleted = false"" ) ) { statement . setString ( 1 , sanitizeRefId ( refId ) ) ; statement . setString ( 2 , sanitizeRefId ( refId ) ) ; statement . executeUpdate ( ) ; execUpdate ( String . format ( ""REVOKE INSERT, UPDATE, DELETE ON nodes%1$s, edges%1$s, artifacts%1$s FROM %2$s"" , sanitizeRefId ( refId ) , EmsConfig . get ( ""pg.user"" ) ) ) ; } catch ( Exception e ) { } finally { close ( ) ; } } +","public void setAsTag ( String refId ) { try ( PreparedStatement statement = prepareStatement ( ""UPDATE refs SET tag = true WHERE (refId = ? OR refName = ?) AND deleted = false"" ) ) { statement . setString ( 1 , sanitizeRefId ( refId ) ) ; statement . setString ( 2 , sanitizeRefId ( refId ) ) ; statement . executeUpdate ( ) ; execUpdate ( String . format ( ""REVOKE INSERT, UPDATE, DELETE ON nodes%1$s, edges%1$s, artifacts%1$s FROM %2$s"" , sanitizeRefId ( refId ) , EmsConfig . get ( ""pg.user"" ) ) ) ; } catch ( Exception e ) { logger . warn ( String . format ( ""%s"" , LogUtil . getStackTrace ( e ) ) ) ; } finally { close ( ) ; } } +" +6,"void execute ( ExoContainer exoContainer ) { try { DocumentEditorProvider editorProvider = documentService . getEditorProvider ( provider ) ; Identity identity = userIdentity ( userId ) ; boolean allowed = editorProvider . isAvailableForUser ( identity ) ; List < String > availableProviders = service . getAllAvailableProviders ( identity ) ; String currentProvider = documentService . getCurrentDocumentProvider ( fileId , workspace ) ; boolean available = allowed && ( currentProvider == null || provider . equals ( currentProvider ) ) ; service . sendCurrentProviderInfo ( fileId , available , availableProviders ) ; if ( currentProvider == null ) { setCurrentDocumentProvider ( fileId , workspace , provider ) ; } } catch ( DocumentEditorProviderNotFoundException | RepositoryException e ) { } } +","void execute ( ExoContainer exoContainer ) { try { DocumentEditorProvider editorProvider = documentService . getEditorProvider ( provider ) ; Identity identity = userIdentity ( userId ) ; boolean allowed = editorProvider . isAvailableForUser ( identity ) ; List < String > availableProviders = service . getAllAvailableProviders ( identity ) ; String currentProvider = documentService . getCurrentDocumentProvider ( fileId , workspace ) ; boolean available = allowed && ( currentProvider == null || provider . equals ( currentProvider ) ) ; service . sendCurrentProviderInfo ( fileId , available , availableProviders ) ; if ( currentProvider == null ) { setCurrentDocumentProvider ( fileId , workspace , provider ) ; } } catch ( DocumentEditorProviderNotFoundException | RepositoryException e ) { LOG . error ( ""Cannot send current provider info for fileId: "" + fileId + "", workspace: "" + workspace , e ) ; } } +" +7,"private boolean isPrevEventMatchingCall ( final BeforeOperationEvent beforeOperationEvent , final AbstractTraceEvent prevEvent , final Class < ? extends CallOperationEvent > callClass ) { if ( ( prevEvent != null ) && callClass . isAssignableFrom ( prevEvent . getClass ( ) ) && ( prevEvent . getOrderIndex ( ) == ( beforeOperationEvent . getOrderIndex ( ) - 1 ) ) ) { if ( this . callsReferencedOperationOf ( ( CallOperationEvent ) prevEvent , beforeOperationEvent ) ) { return true ; } else if ( this . enhanceCallDetection ) { final boolean isConstructor = beforeOperationEvent instanceof BeforeConstructorEvent ; final CallOperationEvent callEvent = ( CallOperationEvent ) prevEvent ; final Signature callSignature = ClassOperationSignaturePair . splitOperationSignatureStr ( callEvent . getCalleeOperationSignature ( ) , isConstructor && this . enhanceJavaConstructors ) . getSignature ( ) ; final Signature afterSignature = ClassOperationSignaturePair . splitOperationSignatureStr ( beforeOperationEvent . getOperationSignature ( ) , isConstructor && this . enhanceJavaConstructors ) . getSignature ( ) ; if ( callSignature . equals ( afterSignature ) && callEvent . getCalleeClassSignature ( ) . equals ( beforeOperationEvent . getClassSignature ( ) ) ) { return true ; } } } return false ; } +","private boolean isPrevEventMatchingCall ( final BeforeOperationEvent beforeOperationEvent , final AbstractTraceEvent prevEvent , final Class < ? extends CallOperationEvent > callClass ) { if ( ( prevEvent != null ) && callClass . isAssignableFrom ( prevEvent . getClass ( ) ) && ( prevEvent . getOrderIndex ( ) == ( beforeOperationEvent . getOrderIndex ( ) - 1 ) ) ) { if ( this . callsReferencedOperationOf ( ( CallOperationEvent ) prevEvent , beforeOperationEvent ) ) { return true ; } else if ( this . enhanceCallDetection ) { final boolean isConstructor = beforeOperationEvent instanceof BeforeConstructorEvent ; final CallOperationEvent callEvent = ( CallOperationEvent ) prevEvent ; final Signature callSignature = ClassOperationSignaturePair . splitOperationSignatureStr ( callEvent . getCalleeOperationSignature ( ) , isConstructor && this . enhanceJavaConstructors ) . getSignature ( ) ; final Signature afterSignature = ClassOperationSignaturePair . splitOperationSignatureStr ( beforeOperationEvent . getOperationSignature ( ) , isConstructor && this . enhanceJavaConstructors ) . getSignature ( ) ; if ( callSignature . equals ( afterSignature ) && callEvent . getCalleeClassSignature ( ) . equals ( beforeOperationEvent . getClassSignature ( ) ) ) { this . logger . debug ( ""Guessed call of \n\t{}\n\t{}"" , callEvent , beforeOperationEvent ) ; return true ; } } } return false ; } +" +8,"public void apply ( ) { try { for ( int i = 0 ; i < TOP_CHANGE_CNT ; i ++ ) { if ( failed . get ( ) ) return ; Collection < String > names = new GridLeanSet < > ( 3 ) ; try { for ( int j = 0 ; j < 3 ; j ++ ) { if ( failed . get ( ) ) return ; String name = UUID . randomUUID ( ) . toString ( ) ; Ignite g = startGrid ( name ) ; names . add ( name ) ; cb . apply ( g ) ; } } finally { for ( String name : names ) stopGrid ( name ) ; } } } catch ( Exception e ) { if ( failed . compareAndSet ( false , true ) ) throw F . wrap ( e ) ; } } +","public void apply ( ) { try { for ( int i = 0 ; i < TOP_CHANGE_CNT ; i ++ ) { if ( failed . get ( ) ) return ; Collection < String > names = new GridLeanSet < > ( 3 ) ; try { for ( int j = 0 ; j < 3 ; j ++ ) { if ( failed . get ( ) ) return ; String name = UUID . randomUUID ( ) . toString ( ) ; log . info ( ""Start node: "" + name ) ; Ignite g = startGrid ( name ) ; names . add ( name ) ; cb . apply ( g ) ; } } finally { for ( String name : names ) stopGrid ( name ) ; } } } catch ( Exception e ) { if ( failed . compareAndSet ( false , true ) ) throw F . wrap ( e ) ; } } +" +9,"public void bridgeStatusChanged ( ThingStatusInfo bridgeStatusInfo ) { Bridge bridge = getBridge ( ) ; if ( bridge != null ) { initializeBridge ( bridge . getHandler ( ) , bridgeStatusInfo . getStatus ( ) ) ; } } +","public void bridgeStatusChanged ( ThingStatusInfo bridgeStatusInfo ) { logger . debug ( ""bridgeStatusChanged {} for thing {}"" , bridgeStatusInfo , getThing ( ) . getUID ( ) ) ; Bridge bridge = getBridge ( ) ; if ( bridge != null ) { initializeBridge ( bridge . getHandler ( ) , bridgeStatusInfo . getStatus ( ) ) ; } } +" +10,"public void update ( EventBean [ ] newEvents , EventBean [ ] oldEvents , EPStatement statement , EPRuntime runtime ) { if ( newEvents == null ) { return ; } for ( int i = 0 ; i < newEvents . length ; i ++ ) { if ( log . isInfoEnabled ( ) ) { } } } +","public void update ( EventBean [ ] newEvents , EventBean [ ] oldEvents , EPStatement statement , EPRuntime runtime ) { if ( newEvents == null ) { return ; } for ( int i = 0 ; i < newEvents . length ; i ++ ) { if ( log . isInfoEnabled ( ) ) { log . info ( ""IPAddress: "" + newEvents [ i ] . get ( ""ipAddress"" ) + "" Avg Duration: "" + newEvents [ i ] . get ( ""avg(duration)"" ) ) ; } } } +" +11,"public void cleanUpTemporaryResources ( ) { if ( generatedCompilerWorkingDirectory != null && generatedCompilerWorkingDirectory . exists ( ) ) { removeAll ( generatedCompilerWorkingDirectory ) ; this . generatedCompilerWorkingDirectory = null ; } if ( generatedBatchappsHomePath != null && generatedBatchappsHomePath . exists ( ) ) { LOG . debug ( ""Deleting temporary batchapps directory: {}"" , generatedBatchappsHomePath ) ; removeAll ( generatedBatchappsHomePath ) ; this . generatedBatchappsHomePath = null ; } } +","public void cleanUpTemporaryResources ( ) { if ( generatedCompilerWorkingDirectory != null && generatedCompilerWorkingDirectory . exists ( ) ) { LOG . debug ( ""Deleting temporary compiler working directory: {}"" , generatedCompilerWorkingDirectory ) ; removeAll ( generatedCompilerWorkingDirectory ) ; this . generatedCompilerWorkingDirectory = null ; } if ( generatedBatchappsHomePath != null && generatedBatchappsHomePath . exists ( ) ) { LOG . debug ( ""Deleting temporary batchapps directory: {}"" , generatedBatchappsHomePath ) ; removeAll ( generatedBatchappsHomePath ) ; this . generatedBatchappsHomePath = null ; } } +" +12,"public void cleanUpTemporaryResources ( ) { if ( generatedCompilerWorkingDirectory != null && generatedCompilerWorkingDirectory . exists ( ) ) { LOG . debug ( ""Deleting temporary compiler working directory: {}"" , generatedCompilerWorkingDirectory ) ; removeAll ( generatedCompilerWorkingDirectory ) ; this . generatedCompilerWorkingDirectory = null ; } if ( generatedBatchappsHomePath != null && generatedBatchappsHomePath . exists ( ) ) { removeAll ( generatedBatchappsHomePath ) ; this . generatedBatchappsHomePath = null ; } } +","public void cleanUpTemporaryResources ( ) { if ( generatedCompilerWorkingDirectory != null && generatedCompilerWorkingDirectory . exists ( ) ) { LOG . debug ( ""Deleting temporary compiler working directory: {}"" , generatedCompilerWorkingDirectory ) ; removeAll ( generatedCompilerWorkingDirectory ) ; this . generatedCompilerWorkingDirectory = null ; } if ( generatedBatchappsHomePath != null && generatedBatchappsHomePath . exists ( ) ) { LOG . debug ( ""Deleting temporary batchapps directory: {}"" , generatedBatchappsHomePath ) ; removeAll ( generatedBatchappsHomePath ) ; this . generatedBatchappsHomePath = null ; } } +" +13,"public Pixel read ( Kryo kryo , Input input , Class < Pixel > type ) { int X = input . readInt ( ) ; int Y = input . readInt ( ) ; int ResolutionX = input . readInt ( ) ; int ResolutionY = input . readInt ( ) ; boolean isDuplicate = input . readBoolean ( ) ; int currentPartitionId = input . readInt ( ) ; Pixel pixel = new Pixel ( X , Y , ResolutionX , ResolutionY , isDuplicate , currentPartitionId ) ; return pixel ; } +","public Pixel read ( Kryo kryo , Input input , Class < Pixel > type ) { log . debug ( ""De-serializing Pixel..."" ) ; int X = input . readInt ( ) ; int Y = input . readInt ( ) ; int ResolutionX = input . readInt ( ) ; int ResolutionY = input . readInt ( ) ; boolean isDuplicate = input . readBoolean ( ) ; int currentPartitionId = input . readInt ( ) ; Pixel pixel = new Pixel ( X , Y , ResolutionX , ResolutionY , isDuplicate , currentPartitionId ) ; return pixel ; } +" +14,"@ SuppressWarnings ( ""unchecked"" ) public List < Role > getRoles ( ) { return entityManager . createQuery ( ""select r from Role r order by name"" ) . getResultList ( ) ; } +","@ SuppressWarnings ( ""unchecked"" ) public List < Role > getRoles ( ) { log . debug ( ""Retrieving all role names..."" ) ; return entityManager . createQuery ( ""select r from Role r order by name"" ) . getResultList ( ) ; } +" +15,"public void updateClusterConfig ( ClusterConfigRequest request ) { if ( clusterManager ( ) != null ) { OHazelcastPlugin plugin = clusterManager ( ) . getHazelcastPlugin ( ) ; ODistributedConfiguration storageCfg = plugin . getDatabaseConfiguration ( GraphStorage . DB_NAME ) ; final OModifiableDistributedConfiguration newCfg = storageCfg . modify ( ) ; for ( ClusterServerConfig server : request . getServers ( ) ) { ServerRole newRole = server . getRole ( ) ; ROLES newORole = ROLES . valueOf ( newRole . name ( ) ) ; ROLES oldRole = newCfg . getServerRole ( server . getName ( ) ) ; if ( oldRole != newORole ) { newCfg . setServerRole ( server . getName ( ) , newORole ) ; } } String newWriteQuorum = request . getWriteQuorum ( ) ; if ( newWriteQuorum != null ) { if ( newWriteQuorum . equalsIgnoreCase ( ""all"" ) || newWriteQuorum . equalsIgnoreCase ( ""majority"" ) ) { newCfg . getDocument ( ) . setProperty ( ""writeQuorum"" , newWriteQuorum ) ; } else { try { int newWriteQuorumInt = Integer . parseInt ( newWriteQuorum ) ; newCfg . getDocument ( ) . setProperty ( ""writeQuorum"" , newWriteQuorumInt ) ; } catch ( Exception e ) { throw new RuntimeException ( ""Unsupported write quorum value {"" + newWriteQuorum + ""}"" ) ; } } } Integer newReadQuorum = request . getReadQuorum ( ) ; if ( newReadQuorum != null ) { newCfg . getDocument ( ) . setProperty ( ""readQuorum"" , newReadQuorum ) ; } plugin . updateCachedDatabaseConfiguration ( GraphStorage . DB_NAME , newCfg , true ) ; } else { throw error ( BAD_REQUEST , ""error_cluster_status_only_available_in_cluster_mode"" ) ; } } +","public void updateClusterConfig ( ClusterConfigRequest request ) { if ( clusterManager ( ) != null ) { OHazelcastPlugin plugin = clusterManager ( ) . getHazelcastPlugin ( ) ; ODistributedConfiguration storageCfg = plugin . getDatabaseConfiguration ( GraphStorage . DB_NAME ) ; final OModifiableDistributedConfiguration newCfg = storageCfg . modify ( ) ; for ( ClusterServerConfig server : request . getServers ( ) ) { ServerRole newRole = server . getRole ( ) ; ROLES newORole = ROLES . valueOf ( newRole . name ( ) ) ; ROLES oldRole = newCfg . getServerRole ( server . getName ( ) ) ; if ( oldRole != newORole ) { log . debug ( ""Updating server role {"" + server . getName ( ) + ""} from {"" + oldRole + ""} to {"" + newRole + ""}"" ) ; newCfg . setServerRole ( server . getName ( ) , newORole ) ; } } String newWriteQuorum = request . getWriteQuorum ( ) ; if ( newWriteQuorum != null ) { if ( newWriteQuorum . equalsIgnoreCase ( ""all"" ) || newWriteQuorum . equalsIgnoreCase ( ""majority"" ) ) { newCfg . getDocument ( ) . setProperty ( ""writeQuorum"" , newWriteQuorum ) ; } else { try { int newWriteQuorumInt = Integer . parseInt ( newWriteQuorum ) ; newCfg . getDocument ( ) . setProperty ( ""writeQuorum"" , newWriteQuorumInt ) ; } catch ( Exception e ) { throw new RuntimeException ( ""Unsupported write quorum value {"" + newWriteQuorum + ""}"" ) ; } } } Integer newReadQuorum = request . getReadQuorum ( ) ; if ( newReadQuorum != null ) { newCfg . getDocument ( ) . setProperty ( ""readQuorum"" , newReadQuorum ) ; } plugin . updateCachedDatabaseConfiguration ( GraphStorage . DB_NAME , newCfg , true ) ; } else { throw error ( BAD_REQUEST , ""error_cluster_status_only_available_in_cluster_mode"" ) ; } } +" +16,"private void drawPoint ( PdfContext context , Rectangle iconRect , Color fillColor , Color strokeColor ) { float baseWidth = iconRect . getWidth ( ) / 10 ; SymbolInfo symbol = styleInfo . getSymbol ( ) ; if ( symbol . getImage ( ) != null ) { try { Image pointImage = Image . getInstance ( symbol . getImage ( ) . getHref ( ) ) ; context . drawImage ( pointImage , iconRect , iconRect ) ; } catch ( Exception ex ) { } } else if ( symbol . getRect ( ) != null ) { context . fillRectangle ( iconRect , fillColor ) ; context . strokeRectangle ( iconRect , strokeColor , baseWidth / 2 ) ; } else { context . fillEllipse ( iconRect , fillColor ) ; context . strokeEllipse ( iconRect , strokeColor , baseWidth / 2 ) ; } } +","private void drawPoint ( PdfContext context , Rectangle iconRect , Color fillColor , Color strokeColor ) { float baseWidth = iconRect . getWidth ( ) / 10 ; SymbolInfo symbol = styleInfo . getSymbol ( ) ; if ( symbol . getImage ( ) != null ) { try { Image pointImage = Image . getInstance ( symbol . getImage ( ) . getHref ( ) ) ; context . drawImage ( pointImage , iconRect , iconRect ) ; } catch ( Exception ex ) { log . error ( ""Not able to create image for POINT Symbol"" , ex ) ; } } else if ( symbol . getRect ( ) != null ) { context . fillRectangle ( iconRect , fillColor ) ; context . strokeRectangle ( iconRect , strokeColor , baseWidth / 2 ) ; } else { context . fillEllipse ( iconRect , fillColor ) ; context . strokeEllipse ( iconRect , strokeColor , baseWidth / 2 ) ; } } +" +17,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +18,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +19,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +20,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +21,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +22,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +23,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +24,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +25,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +26,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +27,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +28,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +29,"@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +","@ Test public void badInputUnzipFileStr ( ) throws Exception { File destFile = new File ( destDir , ""badInputFileStr"" ) ; try { try { ZipUtil . unzip ( ( File ) null , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File is null"" ) ; Assert . fail ( ""Unzip should fail when input File is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( dummieFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when input File doesn't exist"" ) ; Assert . fail ( ""Unzip should fail when input File doesn't exist"" ) ; } catch ( FileNotFoundException e ) { logger . debug ( ""Detecting non-existing input File (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , ( String ) null ) ; logger . error ( ""Unzip should fail when destination filename is null"" ) ; Assert . fail ( ""Unzip should fail when destination filename is null"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting null destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip . getCanonicalPath ( ) , """" ) ; logger . error ( ""Unzip should fail when destination filename is empty"" ) ; Assert . fail ( ""Unzip should fail when destination filename is empty"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting empty destination filename (File, String): OK"" ) ; } try { ZipUtil . unzip ( srcFile , destFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the input File does not represent a zip file"" ) ; Assert . fail ( ""Unzip should fail when the input File does not represent a zip file"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting input File not representing a valid zip file (File, String): OK"" ) ; } try { ZipUtil . unzip ( sampleZip , srcFile . getCanonicalPath ( ) ) ; logger . error ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; Assert . fail ( ""Unzip should fail when the destination filename does not represent a directory"" ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Detecting destination filename not representing a directory (File, String): OK"" ) ; } } catch ( Exception e ) { logger . error ( ""Another exception was expected, but got {} instead: {}"" , e . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; Assert . fail ( ""Another exception was expected, but got "" + e . getClass ( ) . getName ( ) + ""instead: "" + e . getMessage ( ) ) ; } } +" +30,"protected Type loadType ( Schema schema , XSType type , String fieldName ) throws TypeBindingException { String name ; if ( type . getName ( ) == null || type . isLocal ( ) ) { name = getAnonymousTypeName ( type , fieldName ) ; if ( name == null ) { return null ; } } else { name = type . getName ( ) ; } Type ecmType = getType ( name ) ; if ( ecmType != null ) { return ecmType ; } ecmType = schema . getType ( name ) ; if ( ecmType != null ) { return ecmType ; } if ( type . getTargetNamespace ( ) . equals ( NS_XSD ) ) { ecmType = XSDTypes . getType ( name ) ; if ( ecmType == null ) { log . warn ( ""Cannot use unknown XSD type: "" + name ) ; } return ecmType ; } if ( type . isSimpleType ( ) ) { if ( type instanceof XSListSimpleType ) { ecmType = loadListType ( schema , ( XSListSimpleType ) type ) ; } else { ecmType = loadSimpleType ( schema , type , fieldName ) ; } } else { ecmType = loadComplexType ( schema , name , type . asComplexType ( ) ) ; } if ( ecmType != null ) { schema . registerType ( ecmType ) ; } else { log . warn ( ""loadType for "" + fieldName + "" of "" + type + "" returns null"" ) ; } return ecmType ; } +","protected Type loadType ( Schema schema , XSType type , String fieldName ) throws TypeBindingException { String name ; if ( type . getName ( ) == null || type . isLocal ( ) ) { name = getAnonymousTypeName ( type , fieldName ) ; if ( name == null ) { log . warn ( ""Unable to load type - no name found"" ) ; return null ; } } else { name = type . getName ( ) ; } Type ecmType = getType ( name ) ; if ( ecmType != null ) { return ecmType ; } ecmType = schema . getType ( name ) ; if ( ecmType != null ) { return ecmType ; } if ( type . getTargetNamespace ( ) . equals ( NS_XSD ) ) { ecmType = XSDTypes . getType ( name ) ; if ( ecmType == null ) { log . warn ( ""Cannot use unknown XSD type: "" + name ) ; } return ecmType ; } if ( type . isSimpleType ( ) ) { if ( type instanceof XSListSimpleType ) { ecmType = loadListType ( schema , ( XSListSimpleType ) type ) ; } else { ecmType = loadSimpleType ( schema , type , fieldName ) ; } } else { ecmType = loadComplexType ( schema , name , type . asComplexType ( ) ) ; } if ( ecmType != null ) { schema . registerType ( ecmType ) ; } else { log . warn ( ""loadType for "" + fieldName + "" of "" + type + "" returns null"" ) ; } return ecmType ; } +" +31,"protected Type loadType ( Schema schema , XSType type , String fieldName ) throws TypeBindingException { String name ; if ( type . getName ( ) == null || type . isLocal ( ) ) { name = getAnonymousTypeName ( type , fieldName ) ; if ( name == null ) { log . warn ( ""Unable to load type - no name found"" ) ; return null ; } } else { name = type . getName ( ) ; } Type ecmType = getType ( name ) ; if ( ecmType != null ) { return ecmType ; } ecmType = schema . getType ( name ) ; if ( ecmType != null ) { return ecmType ; } if ( type . getTargetNamespace ( ) . equals ( NS_XSD ) ) { ecmType = XSDTypes . getType ( name ) ; if ( ecmType == null ) { } return ecmType ; } if ( type . isSimpleType ( ) ) { if ( type instanceof XSListSimpleType ) { ecmType = loadListType ( schema , ( XSListSimpleType ) type ) ; } else { ecmType = loadSimpleType ( schema , type , fieldName ) ; } } else { ecmType = loadComplexType ( schema , name , type . asComplexType ( ) ) ; } if ( ecmType != null ) { schema . registerType ( ecmType ) ; } else { log . warn ( ""loadType for "" + fieldName + "" of "" + type + "" returns null"" ) ; } return ecmType ; } +","protected Type loadType ( Schema schema , XSType type , String fieldName ) throws TypeBindingException { String name ; if ( type . getName ( ) == null || type . isLocal ( ) ) { name = getAnonymousTypeName ( type , fieldName ) ; if ( name == null ) { log . warn ( ""Unable to load type - no name found"" ) ; return null ; } } else { name = type . getName ( ) ; } Type ecmType = getType ( name ) ; if ( ecmType != null ) { return ecmType ; } ecmType = schema . getType ( name ) ; if ( ecmType != null ) { return ecmType ; } if ( type . getTargetNamespace ( ) . equals ( NS_XSD ) ) { ecmType = XSDTypes . getType ( name ) ; if ( ecmType == null ) { log . warn ( ""Cannot use unknown XSD type: "" + name ) ; } return ecmType ; } if ( type . isSimpleType ( ) ) { if ( type instanceof XSListSimpleType ) { ecmType = loadListType ( schema , ( XSListSimpleType ) type ) ; } else { ecmType = loadSimpleType ( schema , type , fieldName ) ; } } else { ecmType = loadComplexType ( schema , name , type . asComplexType ( ) ) ; } if ( ecmType != null ) { schema . registerType ( ecmType ) ; } else { log . warn ( ""loadType for "" + fieldName + "" of "" + type + "" returns null"" ) ; } return ecmType ; } +" +32,"protected Type loadType ( Schema schema , XSType type , String fieldName ) throws TypeBindingException { String name ; if ( type . getName ( ) == null || type . isLocal ( ) ) { name = getAnonymousTypeName ( type , fieldName ) ; if ( name == null ) { log . warn ( ""Unable to load type - no name found"" ) ; return null ; } } else { name = type . getName ( ) ; } Type ecmType = getType ( name ) ; if ( ecmType != null ) { return ecmType ; } ecmType = schema . getType ( name ) ; if ( ecmType != null ) { return ecmType ; } if ( type . getTargetNamespace ( ) . equals ( NS_XSD ) ) { ecmType = XSDTypes . getType ( name ) ; if ( ecmType == null ) { log . warn ( ""Cannot use unknown XSD type: "" + name ) ; } return ecmType ; } if ( type . isSimpleType ( ) ) { if ( type instanceof XSListSimpleType ) { ecmType = loadListType ( schema , ( XSListSimpleType ) type ) ; } else { ecmType = loadSimpleType ( schema , type , fieldName ) ; } } else { ecmType = loadComplexType ( schema , name , type . asComplexType ( ) ) ; } if ( ecmType != null ) { schema . registerType ( ecmType ) ; } else { } return ecmType ; } +","protected Type loadType ( Schema schema , XSType type , String fieldName ) throws TypeBindingException { String name ; if ( type . getName ( ) == null || type . isLocal ( ) ) { name = getAnonymousTypeName ( type , fieldName ) ; if ( name == null ) { log . warn ( ""Unable to load type - no name found"" ) ; return null ; } } else { name = type . getName ( ) ; } Type ecmType = getType ( name ) ; if ( ecmType != null ) { return ecmType ; } ecmType = schema . getType ( name ) ; if ( ecmType != null ) { return ecmType ; } if ( type . getTargetNamespace ( ) . equals ( NS_XSD ) ) { ecmType = XSDTypes . getType ( name ) ; if ( ecmType == null ) { log . warn ( ""Cannot use unknown XSD type: "" + name ) ; } return ecmType ; } if ( type . isSimpleType ( ) ) { if ( type instanceof XSListSimpleType ) { ecmType = loadListType ( schema , ( XSListSimpleType ) type ) ; } else { ecmType = loadSimpleType ( schema , type , fieldName ) ; } } else { ecmType = loadComplexType ( schema , name , type . asComplexType ( ) ) ; } if ( ecmType != null ) { schema . registerType ( ecmType ) ; } else { log . warn ( ""loadType for "" + fieldName + "" of "" + type + "" returns null"" ) ; } return ecmType ; } +" +33,"protected void reconfigureService ( Collection < HostGeoInfo > targetHosts ) { if ( ! isConfigured ) { this . rememberedTargetHosts = MutableSet . copyOf ( targetHosts ) ; return ; } webClient . login ( username , password ) ; Domain primaryDomain = webClient . getPrimaryDomain ( primaryDomainName ) ; if ( primaryDomain == null ) throw new NullPointerException ( this + "" got null from web client for primary domain "" + primaryDomainName ) ; SmartSubdomain smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; if ( smartSubdomain == null ) { primaryDomain . createSmartSubdomain ( smartSubdomainName ) ; smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; } if ( smartSubdomain != null ) { log . debug ( ""GeoScaling {} being reconfigured to use {}"" , this , targetHosts ) ; String script = GeoscalingScriptGenerator . generateScriptString ( targetHosts ) ; smartSubdomain . configure ( PROVIDE_CITY_INFO , script ) ; if ( targetHosts . isEmpty ( ) ) { setServiceState ( Lifecycle . CREATED ) ; sensors ( ) . set ( ROOT_URL , null ) ; sensors ( ) . set ( MAIN_URI , null ) ; } else { setServiceState ( Lifecycle . RUNNING ) ; String domain = getAttribute ( MANAGED_DOMAIN ) ; if ( ! Strings . isEmpty ( domain ) ) { sensors ( ) . set ( ROOT_URL , ""http://"" + domain + ""/"" ) ; sensors ( ) . set ( MAIN_URI , URI . create ( ""http://"" + domain + ""/"" ) ) ; } } } else { log . warn ( ""Failed to retrieve or create GeoScaling smart subdomain '"" + smartSubdomainName + ""."" + primaryDomainName + ""', aborting attempt to configure service"" ) ; setServiceState ( Lifecycle . ON_FIRE ) ; } webClient . logout ( ) ; } +","protected void reconfigureService ( Collection < HostGeoInfo > targetHosts ) { if ( ! isConfigured ) { this . rememberedTargetHosts = MutableSet . copyOf ( targetHosts ) ; return ; } webClient . login ( username , password ) ; Domain primaryDomain = webClient . getPrimaryDomain ( primaryDomainName ) ; if ( primaryDomain == null ) throw new NullPointerException ( this + "" got null from web client for primary domain "" + primaryDomainName ) ; SmartSubdomain smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; if ( smartSubdomain == null ) { log . info ( ""GeoScaling {} smart subdomain '{}.{}' does not exist, creating it now"" , new Object [ ] { this , smartSubdomainName , primaryDomainName } ) ; primaryDomain . createSmartSubdomain ( smartSubdomainName ) ; smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; } if ( smartSubdomain != null ) { log . debug ( ""GeoScaling {} being reconfigured to use {}"" , this , targetHosts ) ; String script = GeoscalingScriptGenerator . generateScriptString ( targetHosts ) ; smartSubdomain . configure ( PROVIDE_CITY_INFO , script ) ; if ( targetHosts . isEmpty ( ) ) { setServiceState ( Lifecycle . CREATED ) ; sensors ( ) . set ( ROOT_URL , null ) ; sensors ( ) . set ( MAIN_URI , null ) ; } else { setServiceState ( Lifecycle . RUNNING ) ; String domain = getAttribute ( MANAGED_DOMAIN ) ; if ( ! Strings . isEmpty ( domain ) ) { sensors ( ) . set ( ROOT_URL , ""http://"" + domain + ""/"" ) ; sensors ( ) . set ( MAIN_URI , URI . create ( ""http://"" + domain + ""/"" ) ) ; } } } else { log . warn ( ""Failed to retrieve or create GeoScaling smart subdomain '"" + smartSubdomainName + ""."" + primaryDomainName + ""', aborting attempt to configure service"" ) ; setServiceState ( Lifecycle . ON_FIRE ) ; } webClient . logout ( ) ; } +" +34,"protected void reconfigureService ( Collection < HostGeoInfo > targetHosts ) { if ( ! isConfigured ) { this . rememberedTargetHosts = MutableSet . copyOf ( targetHosts ) ; return ; } webClient . login ( username , password ) ; Domain primaryDomain = webClient . getPrimaryDomain ( primaryDomainName ) ; if ( primaryDomain == null ) throw new NullPointerException ( this + "" got null from web client for primary domain "" + primaryDomainName ) ; SmartSubdomain smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; if ( smartSubdomain == null ) { log . info ( ""GeoScaling {} smart subdomain '{}.{}' does not exist, creating it now"" , new Object [ ] { this , smartSubdomainName , primaryDomainName } ) ; primaryDomain . createSmartSubdomain ( smartSubdomainName ) ; smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; } if ( smartSubdomain != null ) { String script = GeoscalingScriptGenerator . generateScriptString ( targetHosts ) ; smartSubdomain . configure ( PROVIDE_CITY_INFO , script ) ; if ( targetHosts . isEmpty ( ) ) { setServiceState ( Lifecycle . CREATED ) ; sensors ( ) . set ( ROOT_URL , null ) ; sensors ( ) . set ( MAIN_URI , null ) ; } else { setServiceState ( Lifecycle . RUNNING ) ; String domain = getAttribute ( MANAGED_DOMAIN ) ; if ( ! Strings . isEmpty ( domain ) ) { sensors ( ) . set ( ROOT_URL , ""http://"" + domain + ""/"" ) ; sensors ( ) . set ( MAIN_URI , URI . create ( ""http://"" + domain + ""/"" ) ) ; } } } else { log . warn ( ""Failed to retrieve or create GeoScaling smart subdomain '"" + smartSubdomainName + ""."" + primaryDomainName + ""', aborting attempt to configure service"" ) ; setServiceState ( Lifecycle . ON_FIRE ) ; } webClient . logout ( ) ; } +","protected void reconfigureService ( Collection < HostGeoInfo > targetHosts ) { if ( ! isConfigured ) { this . rememberedTargetHosts = MutableSet . copyOf ( targetHosts ) ; return ; } webClient . login ( username , password ) ; Domain primaryDomain = webClient . getPrimaryDomain ( primaryDomainName ) ; if ( primaryDomain == null ) throw new NullPointerException ( this + "" got null from web client for primary domain "" + primaryDomainName ) ; SmartSubdomain smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; if ( smartSubdomain == null ) { log . info ( ""GeoScaling {} smart subdomain '{}.{}' does not exist, creating it now"" , new Object [ ] { this , smartSubdomainName , primaryDomainName } ) ; primaryDomain . createSmartSubdomain ( smartSubdomainName ) ; smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; } if ( smartSubdomain != null ) { log . debug ( ""GeoScaling {} being reconfigured to use {}"" , this , targetHosts ) ; String script = GeoscalingScriptGenerator . generateScriptString ( targetHosts ) ; smartSubdomain . configure ( PROVIDE_CITY_INFO , script ) ; if ( targetHosts . isEmpty ( ) ) { setServiceState ( Lifecycle . CREATED ) ; sensors ( ) . set ( ROOT_URL , null ) ; sensors ( ) . set ( MAIN_URI , null ) ; } else { setServiceState ( Lifecycle . RUNNING ) ; String domain = getAttribute ( MANAGED_DOMAIN ) ; if ( ! Strings . isEmpty ( domain ) ) { sensors ( ) . set ( ROOT_URL , ""http://"" + domain + ""/"" ) ; sensors ( ) . set ( MAIN_URI , URI . create ( ""http://"" + domain + ""/"" ) ) ; } } } else { log . warn ( ""Failed to retrieve or create GeoScaling smart subdomain '"" + smartSubdomainName + ""."" + primaryDomainName + ""', aborting attempt to configure service"" ) ; setServiceState ( Lifecycle . ON_FIRE ) ; } webClient . logout ( ) ; } +" +35,"protected void reconfigureService ( Collection < HostGeoInfo > targetHosts ) { if ( ! isConfigured ) { this . rememberedTargetHosts = MutableSet . copyOf ( targetHosts ) ; return ; } webClient . login ( username , password ) ; Domain primaryDomain = webClient . getPrimaryDomain ( primaryDomainName ) ; if ( primaryDomain == null ) throw new NullPointerException ( this + "" got null from web client for primary domain "" + primaryDomainName ) ; SmartSubdomain smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; if ( smartSubdomain == null ) { log . info ( ""GeoScaling {} smart subdomain '{}.{}' does not exist, creating it now"" , new Object [ ] { this , smartSubdomainName , primaryDomainName } ) ; primaryDomain . createSmartSubdomain ( smartSubdomainName ) ; smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; } if ( smartSubdomain != null ) { log . debug ( ""GeoScaling {} being reconfigured to use {}"" , this , targetHosts ) ; String script = GeoscalingScriptGenerator . generateScriptString ( targetHosts ) ; smartSubdomain . configure ( PROVIDE_CITY_INFO , script ) ; if ( targetHosts . isEmpty ( ) ) { setServiceState ( Lifecycle . CREATED ) ; sensors ( ) . set ( ROOT_URL , null ) ; sensors ( ) . set ( MAIN_URI , null ) ; } else { setServiceState ( Lifecycle . RUNNING ) ; String domain = getAttribute ( MANAGED_DOMAIN ) ; if ( ! Strings . isEmpty ( domain ) ) { sensors ( ) . set ( ROOT_URL , ""http://"" + domain + ""/"" ) ; sensors ( ) . set ( MAIN_URI , URI . create ( ""http://"" + domain + ""/"" ) ) ; } } } else { setServiceState ( Lifecycle . ON_FIRE ) ; } webClient . logout ( ) ; } +","protected void reconfigureService ( Collection < HostGeoInfo > targetHosts ) { if ( ! isConfigured ) { this . rememberedTargetHosts = MutableSet . copyOf ( targetHosts ) ; return ; } webClient . login ( username , password ) ; Domain primaryDomain = webClient . getPrimaryDomain ( primaryDomainName ) ; if ( primaryDomain == null ) throw new NullPointerException ( this + "" got null from web client for primary domain "" + primaryDomainName ) ; SmartSubdomain smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; if ( smartSubdomain == null ) { log . info ( ""GeoScaling {} smart subdomain '{}.{}' does not exist, creating it now"" , new Object [ ] { this , smartSubdomainName , primaryDomainName } ) ; primaryDomain . createSmartSubdomain ( smartSubdomainName ) ; smartSubdomain = primaryDomain . getSmartSubdomain ( smartSubdomainName ) ; } if ( smartSubdomain != null ) { log . debug ( ""GeoScaling {} being reconfigured to use {}"" , this , targetHosts ) ; String script = GeoscalingScriptGenerator . generateScriptString ( targetHosts ) ; smartSubdomain . configure ( PROVIDE_CITY_INFO , script ) ; if ( targetHosts . isEmpty ( ) ) { setServiceState ( Lifecycle . CREATED ) ; sensors ( ) . set ( ROOT_URL , null ) ; sensors ( ) . set ( MAIN_URI , null ) ; } else { setServiceState ( Lifecycle . RUNNING ) ; String domain = getAttribute ( MANAGED_DOMAIN ) ; if ( ! Strings . isEmpty ( domain ) ) { sensors ( ) . set ( ROOT_URL , ""http://"" + domain + ""/"" ) ; sensors ( ) . set ( MAIN_URI , URI . create ( ""http://"" + domain + ""/"" ) ) ; } } } else { log . warn ( ""Failed to retrieve or create GeoScaling smart subdomain '"" + smartSubdomainName + ""."" + primaryDomainName + ""', aborting attempt to configure service"" ) ; setServiceState ( Lifecycle . ON_FIRE ) ; } webClient . logout ( ) ; } +" +36,"private void endOfTestHook ( ITestResult result , RunResult outcome ) { if ( outcome != RunResult . SUCCESS ) { try { takeScreenShot ( result ) ; } catch ( Exception e ) { } } try { dumpFalconStore ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Dumping of falcon store failed: "" + e ) ; } LOGGER . info ( String . format ( ""Testing going to end for: %s.%s(%s) ----- Status: %s"" , result . getTestClass ( ) . getName ( ) , result . getName ( ) , Arrays . toString ( result . getParameters ( ) ) , outcome ) ) ; NDC . pop ( ) ; LOGGER . info ( hr ) ; } +","private void endOfTestHook ( ITestResult result , RunResult outcome ) { if ( outcome != RunResult . SUCCESS ) { try { takeScreenShot ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Saving screenshot FAILED: "" + e . getCause ( ) ) ; } } try { dumpFalconStore ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Dumping of falcon store failed: "" + e ) ; } LOGGER . info ( String . format ( ""Testing going to end for: %s.%s(%s) ----- Status: %s"" , result . getTestClass ( ) . getName ( ) , result . getName ( ) , Arrays . toString ( result . getParameters ( ) ) , outcome ) ) ; NDC . pop ( ) ; LOGGER . info ( hr ) ; } +" +37,"private void endOfTestHook ( ITestResult result , RunResult outcome ) { if ( outcome != RunResult . SUCCESS ) { try { takeScreenShot ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Saving screenshot FAILED: "" + e . getCause ( ) ) ; } } try { dumpFalconStore ( result ) ; } catch ( Exception e ) { } LOGGER . info ( String . format ( ""Testing going to end for: %s.%s(%s) ----- Status: %s"" , result . getTestClass ( ) . getName ( ) , result . getName ( ) , Arrays . toString ( result . getParameters ( ) ) , outcome ) ) ; NDC . pop ( ) ; LOGGER . info ( hr ) ; } +","private void endOfTestHook ( ITestResult result , RunResult outcome ) { if ( outcome != RunResult . SUCCESS ) { try { takeScreenShot ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Saving screenshot FAILED: "" + e . getCause ( ) ) ; } } try { dumpFalconStore ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Dumping of falcon store failed: "" + e ) ; } LOGGER . info ( String . format ( ""Testing going to end for: %s.%s(%s) ----- Status: %s"" , result . getTestClass ( ) . getName ( ) , result . getName ( ) , Arrays . toString ( result . getParameters ( ) ) , outcome ) ) ; NDC . pop ( ) ; LOGGER . info ( hr ) ; } +" +38,"private void endOfTestHook ( ITestResult result , RunResult outcome ) { if ( outcome != RunResult . SUCCESS ) { try { takeScreenShot ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Saving screenshot FAILED: "" + e . getCause ( ) ) ; } } try { dumpFalconStore ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Dumping of falcon store failed: "" + e ) ; } NDC . pop ( ) ; LOGGER . info ( hr ) ; } +","private void endOfTestHook ( ITestResult result , RunResult outcome ) { if ( outcome != RunResult . SUCCESS ) { try { takeScreenShot ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Saving screenshot FAILED: "" + e . getCause ( ) ) ; } } try { dumpFalconStore ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Dumping of falcon store failed: "" + e ) ; } LOGGER . info ( String . format ( ""Testing going to end for: %s.%s(%s) ----- Status: %s"" , result . getTestClass ( ) . getName ( ) , result . getName ( ) , Arrays . toString ( result . getParameters ( ) ) , outcome ) ) ; NDC . pop ( ) ; LOGGER . info ( hr ) ; } +" +39,"private void endOfTestHook ( ITestResult result , RunResult outcome ) { if ( outcome != RunResult . SUCCESS ) { try { takeScreenShot ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Saving screenshot FAILED: "" + e . getCause ( ) ) ; } } try { dumpFalconStore ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Dumping of falcon store failed: "" + e ) ; } LOGGER . info ( String . format ( ""Testing going to end for: %s.%s(%s) ----- Status: %s"" , result . getTestClass ( ) . getName ( ) , result . getName ( ) , Arrays . toString ( result . getParameters ( ) ) , outcome ) ) ; NDC . pop ( ) ; } +","private void endOfTestHook ( ITestResult result , RunResult outcome ) { if ( outcome != RunResult . SUCCESS ) { try { takeScreenShot ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Saving screenshot FAILED: "" + e . getCause ( ) ) ; } } try { dumpFalconStore ( result ) ; } catch ( Exception e ) { LOGGER . info ( ""Dumping of falcon store failed: "" + e ) ; } LOGGER . info ( String . format ( ""Testing going to end for: %s.%s(%s) ----- Status: %s"" , result . getTestClass ( ) . getName ( ) , result . getName ( ) , Arrays . toString ( result . getParameters ( ) ) , outcome ) ) ; NDC . pop ( ) ; LOGGER . info ( hr ) ; } +" +40,"public void run ( ) { KafkaCollector collector = null ; try { collector = new KafkaCollector ( props , name , topic ) ; LOG . info ( ""Running "" + name + "" for input "" + props . getProperty ( INPUT_PATHS ) ) ; collector . run ( ) ; } catch ( Throwable e ) { LOG . error ( ""Failed for "" + name + "" ,job: "" + collector == null ? null : collector . getJob ( ) + "" failed for "" + props . getProperty ( INPUT_PATHS ) + "" Exception:"" + e . getLocalizedMessage ( ) ) ; errorQueue . add ( new SweeperError ( name , props . get ( INPUT_PATHS ) . toString ( ) , e ) ) ; } } +","public void run ( ) { KafkaCollector collector = null ; try { LOG . info ( ""Starting runner for "" + this . props . getProperty ( TOPIC_AND_HOUR ) ) ; collector = new KafkaCollector ( props , name , topic ) ; LOG . info ( ""Running "" + name + "" for input "" + props . getProperty ( INPUT_PATHS ) ) ; collector . run ( ) ; } catch ( Throwable e ) { LOG . error ( ""Failed for "" + name + "" ,job: "" + collector == null ? null : collector . getJob ( ) + "" failed for "" + props . getProperty ( INPUT_PATHS ) + "" Exception:"" + e . getLocalizedMessage ( ) ) ; errorQueue . add ( new SweeperError ( name , props . get ( INPUT_PATHS ) . toString ( ) , e ) ) ; } } +" +41,"public void run ( ) { KafkaCollector collector = null ; try { LOG . info ( ""Starting runner for "" + this . props . getProperty ( TOPIC_AND_HOUR ) ) ; collector = new KafkaCollector ( props , name , topic ) ; collector . run ( ) ; } catch ( Throwable e ) { LOG . error ( ""Failed for "" + name + "" ,job: "" + collector == null ? null : collector . getJob ( ) + "" failed for "" + props . getProperty ( INPUT_PATHS ) + "" Exception:"" + e . getLocalizedMessage ( ) ) ; errorQueue . add ( new SweeperError ( name , props . get ( INPUT_PATHS ) . toString ( ) , e ) ) ; } } +","public void run ( ) { KafkaCollector collector = null ; try { LOG . info ( ""Starting runner for "" + this . props . getProperty ( TOPIC_AND_HOUR ) ) ; collector = new KafkaCollector ( props , name , topic ) ; LOG . info ( ""Running "" + name + "" for input "" + props . getProperty ( INPUT_PATHS ) ) ; collector . run ( ) ; } catch ( Throwable e ) { LOG . error ( ""Failed for "" + name + "" ,job: "" + collector == null ? null : collector . getJob ( ) + "" failed for "" + props . getProperty ( INPUT_PATHS ) + "" Exception:"" + e . getLocalizedMessage ( ) ) ; errorQueue . add ( new SweeperError ( name , props . get ( INPUT_PATHS ) . toString ( ) , e ) ) ; } } +" +42,"public void run ( ) { KafkaCollector collector = null ; try { LOG . info ( ""Starting runner for "" + this . props . getProperty ( TOPIC_AND_HOUR ) ) ; collector = new KafkaCollector ( props , name , topic ) ; LOG . info ( ""Running "" + name + "" for input "" + props . getProperty ( INPUT_PATHS ) ) ; collector . run ( ) ; } catch ( Throwable e ) { errorQueue . add ( new SweeperError ( name , props . get ( INPUT_PATHS ) . toString ( ) , e ) ) ; } } +","public void run ( ) { KafkaCollector collector = null ; try { LOG . info ( ""Starting runner for "" + this . props . getProperty ( TOPIC_AND_HOUR ) ) ; collector = new KafkaCollector ( props , name , topic ) ; LOG . info ( ""Running "" + name + "" for input "" + props . getProperty ( INPUT_PATHS ) ) ; collector . run ( ) ; } catch ( Throwable e ) { LOG . error ( ""Failed for "" + name + "" ,job: "" + collector == null ? null : collector . getJob ( ) + "" failed for "" + props . getProperty ( INPUT_PATHS ) + "" Exception:"" + e . getLocalizedMessage ( ) ) ; errorQueue . add ( new SweeperError ( name , props . get ( INPUT_PATHS ) . toString ( ) , e ) ) ; } } +" +43,"protected Response JSONRepresentation ( Object entity ) { try { String jsonStr = toJson ( entity ) ; return OK ( jsonStr ) ; } catch ( IOException e ) { return serverError ( ) ; } } +","protected Response JSONRepresentation ( Object entity ) { try { String jsonStr = toJson ( entity ) ; return OK ( jsonStr ) ; } catch ( IOException e ) { _logger . error ( ""Failed to convert "" + entity + "" to JSON response"" , e ) ; return serverError ( ) ; } } +" +44,"private static LoggingConfigurationType applyProfilingConfiguration ( PrismObject < SystemConfigurationType > systemConfigurationPrism , ProfilingConfigurationType profilingConfig , boolean subsystemProfiling ) { SystemConfigurationType systemConfig = systemConfigurationPrism . asObjectable ( ) ; LoggingConfigurationType loggingConfig = systemConfig . getLogging ( ) ; if ( loggingConfig != null ) { if ( checkXsdBooleanValue ( profilingConfig . isRequestFilter ( ) ) ) { ClassLoggerConfigurationType requestFilterLogger = new ClassLoggerConfigurationType ( ) ; requestFilterLogger . setPackage ( REQUEST_FILTER_LOGGER_CLASS_NAME ) ; requestFilterLogger . setLevel ( LoggingLevelType . TRACE ) ; requestFilterLogger . getAppender ( ) . clear ( ) ; requestFilterLogger . getAppender ( ) . add ( APPENDER_IDM_PROFILE ) ; loggingConfig . getClassLogger ( ) . add ( requestFilterLogger ) ; } if ( subsystemProfiling ) { ClassLoggerConfigurationType subsystemLogger = new ClassLoggerConfigurationType ( ) ; subsystemLogger . setPackage ( SUBSYSTEM_PROFILING_LOGGER ) ; subsystemLogger . setLevel ( LoggingLevelType . DEBUG ) ; subsystemLogger . getAppender ( ) . clear ( ) ; subsystemLogger . getAppender ( ) . add ( APPENDER_IDM_PROFILE ) ; loggingConfig . getClassLogger ( ) . add ( subsystemLogger ) ; } } return loggingConfig ; } +","private static LoggingConfigurationType applyProfilingConfiguration ( PrismObject < SystemConfigurationType > systemConfigurationPrism , ProfilingConfigurationType profilingConfig , boolean subsystemProfiling ) { SystemConfigurationType systemConfig = systemConfigurationPrism . asObjectable ( ) ; LoggingConfigurationType loggingConfig = systemConfig . getLogging ( ) ; if ( loggingConfig != null ) { if ( checkXsdBooleanValue ( profilingConfig . isRequestFilter ( ) ) ) { ClassLoggerConfigurationType requestFilterLogger = new ClassLoggerConfigurationType ( ) ; requestFilterLogger . setPackage ( REQUEST_FILTER_LOGGER_CLASS_NAME ) ; requestFilterLogger . setLevel ( LoggingLevelType . TRACE ) ; requestFilterLogger . getAppender ( ) . clear ( ) ; requestFilterLogger . getAppender ( ) . add ( APPENDER_IDM_PROFILE ) ; loggingConfig . getClassLogger ( ) . add ( requestFilterLogger ) ; } if ( subsystemProfiling ) { ClassLoggerConfigurationType subsystemLogger = new ClassLoggerConfigurationType ( ) ; subsystemLogger . setPackage ( SUBSYSTEM_PROFILING_LOGGER ) ; subsystemLogger . setLevel ( LoggingLevelType . DEBUG ) ; subsystemLogger . getAppender ( ) . clear ( ) ; subsystemLogger . getAppender ( ) . add ( APPENDER_IDM_PROFILE ) ; loggingConfig . getClassLogger ( ) . add ( subsystemLogger ) ; } } LOGGER . info ( ""Applying profiling configuration."" ) ; return loggingConfig ; } +" +45,"public List findAll ( ) { try { String queryString = ""from NZielobjekt"" ; Query queryObject = getSession ( ) . createQuery ( queryString ) ; return queryObject . list ( ) ; } catch ( RuntimeException re ) { log . error ( ""find all failed"" , re ) ; throw re ; } } +","public List findAll ( ) { log . debug ( ""finding all NZielobjekt instances"" ) ; try { String queryString = ""from NZielobjekt"" ; Query queryObject = getSession ( ) . createQuery ( queryString ) ; return queryObject . list ( ) ; } catch ( RuntimeException re ) { log . error ( ""find all failed"" , re ) ; throw re ; } } +" +46,"public List findAll ( ) { log . debug ( ""finding all NZielobjekt instances"" ) ; try { String queryString = ""from NZielobjekt"" ; Query queryObject = getSession ( ) . createQuery ( queryString ) ; return queryObject . list ( ) ; } catch ( RuntimeException re ) { throw re ; } } +","public List findAll ( ) { log . debug ( ""finding all NZielobjekt instances"" ) ; try { String queryString = ""from NZielobjekt"" ; Query queryObject = getSession ( ) . createQuery ( queryString ) ; return queryObject . list ( ) ; } catch ( RuntimeException re ) { log . error ( ""find all failed"" , re ) ; throw re ; } } +" +47,"private double computeLoadFactor ( ) { ensureInEventLoop ( ) ; long pendingRequestCountMin = Long . MAX_VALUE ; long pendingRequestCountTotal = 0L ; long channelCount = 0 ; for ( Channel channel : this . availableChannels ) { final RntbdRequestManager manager = channel . pipeline ( ) . get ( RntbdRequestManager . class ) ; if ( manager == null ) { continue ; } final long pendingRequestCount = manager . pendingRequestCount ( ) ; if ( pendingRequestCount < pendingRequestCountMin ) { pendingRequestCountMin = pendingRequestCount ; } pendingRequestCountTotal += pendingRequestCount ; channelCount ++ ; } for ( Channel channel : this . acquiredChannels . values ( ) ) { final RntbdRequestManager manager = channel . pipeline ( ) . get ( RntbdRequestManager . class ) ; if ( manager != null ) { final long pendingRequestCount = manager . pendingRequestCount ( ) ; if ( pendingRequestCount < pendingRequestCountMin ) { pendingRequestCountMin = pendingRequestCount ; } pendingRequestCountTotal += pendingRequestCount ; } channelCount ++ ; } return channelCount > 0 ? ( double ) pendingRequestCountTotal / ( channelCount * this . maxRequestsPerChannel ) : 1D ; } +","private double computeLoadFactor ( ) { ensureInEventLoop ( ) ; long pendingRequestCountMin = Long . MAX_VALUE ; long pendingRequestCountTotal = 0L ; long channelCount = 0 ; for ( Channel channel : this . availableChannels ) { final RntbdRequestManager manager = channel . pipeline ( ) . get ( RntbdRequestManager . class ) ; if ( manager == null ) { logger . debug ( ""Channel({}) connection lost"" , channel ) ; continue ; } final long pendingRequestCount = manager . pendingRequestCount ( ) ; if ( pendingRequestCount < pendingRequestCountMin ) { pendingRequestCountMin = pendingRequestCount ; } pendingRequestCountTotal += pendingRequestCount ; channelCount ++ ; } for ( Channel channel : this . acquiredChannels . values ( ) ) { final RntbdRequestManager manager = channel . pipeline ( ) . get ( RntbdRequestManager . class ) ; if ( manager != null ) { final long pendingRequestCount = manager . pendingRequestCount ( ) ; if ( pendingRequestCount < pendingRequestCountMin ) { pendingRequestCountMin = pendingRequestCount ; } pendingRequestCountTotal += pendingRequestCount ; } channelCount ++ ; } return channelCount > 0 ? ( double ) pendingRequestCountTotal / ( channelCount * this . maxRequestsPerChannel ) : 1D ; } +" +48,"@ SuppressWarnings ( ""unchecked"" ) static void findByQuery ( final EntityManager em , final String query ) { Query q = em . createNamedQuery ( query ) ; List < User > users = q . getResultList ( ) ; if ( users == null || users . isEmpty ( ) ) { logger . info ( ""0 Users Returned"" ) ; return ; } System . out . println ( ""#######################START##########################################"" ) ; logger . info ( ""\t\t Total number of users:"" + users . size ( ) ) ; logger . info ( ""\t\t User's total tweets:"" + users . get ( 0 ) . getTweets ( ) . size ( ) ) ; printTweets ( users ) ; logger . info ( ""\n"" ) ; System . out . println ( ""#######################END############################################"" ) ; logger . info ( ""\n"" ) ; } +","@ SuppressWarnings ( ""unchecked"" ) static void findByQuery ( final EntityManager em , final String query ) { Query q = em . createNamedQuery ( query ) ; logger . info ( ""[On Find All by Query]"" ) ; List < User > users = q . getResultList ( ) ; if ( users == null || users . isEmpty ( ) ) { logger . info ( ""0 Users Returned"" ) ; return ; } System . out . println ( ""#######################START##########################################"" ) ; logger . info ( ""\t\t Total number of users:"" + users . size ( ) ) ; logger . info ( ""\t\t User's total tweets:"" + users . get ( 0 ) . getTweets ( ) . size ( ) ) ; printTweets ( users ) ; logger . info ( ""\n"" ) ; System . out . println ( ""#######################END############################################"" ) ; logger . info ( ""\n"" ) ; } +" +49,"@ SuppressWarnings ( ""unchecked"" ) static void findByQuery ( final EntityManager em , final String query ) { Query q = em . createNamedQuery ( query ) ; logger . info ( ""[On Find All by Query]"" ) ; List < User > users = q . getResultList ( ) ; if ( users == null || users . isEmpty ( ) ) { return ; } System . out . println ( ""#######################START##########################################"" ) ; logger . info ( ""\t\t Total number of users:"" + users . size ( ) ) ; logger . info ( ""\t\t User's total tweets:"" + users . get ( 0 ) . getTweets ( ) . size ( ) ) ; printTweets ( users ) ; logger . info ( ""\n"" ) ; System . out . println ( ""#######################END############################################"" ) ; logger . info ( ""\n"" ) ; } +","@ SuppressWarnings ( ""unchecked"" ) static void findByQuery ( final EntityManager em , final String query ) { Query q = em . createNamedQuery ( query ) ; logger . info ( ""[On Find All by Query]"" ) ; List < User > users = q . getResultList ( ) ; if ( users == null || users . isEmpty ( ) ) { logger . info ( ""0 Users Returned"" ) ; return ; } System . out . println ( ""#######################START##########################################"" ) ; logger . info ( ""\t\t Total number of users:"" + users . size ( ) ) ; logger . info ( ""\t\t User's total tweets:"" + users . get ( 0 ) . getTweets ( ) . size ( ) ) ; printTweets ( users ) ; logger . info ( ""\n"" ) ; System . out . println ( ""#######################END############################################"" ) ; logger . info ( ""\n"" ) ; } +" +50,"@ SuppressWarnings ( ""unchecked"" ) static void findByQuery ( final EntityManager em , final String query ) { Query q = em . createNamedQuery ( query ) ; logger . info ( ""[On Find All by Query]"" ) ; List < User > users = q . getResultList ( ) ; if ( users == null || users . isEmpty ( ) ) { logger . info ( ""0 Users Returned"" ) ; return ; } System . out . println ( ""#######################START##########################################"" ) ; logger . info ( ""\t\t User's total tweets:"" + users . get ( 0 ) . getTweets ( ) . size ( ) ) ; printTweets ( users ) ; logger . info ( ""\n"" ) ; System . out . println ( ""#######################END############################################"" ) ; logger . info ( ""\n"" ) ; } +","@ SuppressWarnings ( ""unchecked"" ) static void findByQuery ( final EntityManager em , final String query ) { Query q = em . createNamedQuery ( query ) ; logger . info ( ""[On Find All by Query]"" ) ; List < User > users = q . getResultList ( ) ; if ( users == null || users . isEmpty ( ) ) { logger . info ( ""0 Users Returned"" ) ; return ; } System . out . println ( ""#######################START##########################################"" ) ; logger . info ( ""\t\t Total number of users:"" + users . size ( ) ) ; logger . info ( ""\t\t User's total tweets:"" + users . get ( 0 ) . getTweets ( ) . size ( ) ) ; printTweets ( users ) ; logger . info ( ""\n"" ) ; System . out . println ( ""#######################END############################################"" ) ; logger . info ( ""\n"" ) ; } +" +51,"@ SuppressWarnings ( ""unchecked"" ) static void findByQuery ( final EntityManager em , final String query ) { Query q = em . createNamedQuery ( query ) ; logger . info ( ""[On Find All by Query]"" ) ; List < User > users = q . getResultList ( ) ; if ( users == null || users . isEmpty ( ) ) { logger . info ( ""0 Users Returned"" ) ; return ; } System . out . println ( ""#######################START##########################################"" ) ; logger . info ( ""\t\t Total number of users:"" + users . size ( ) ) ; printTweets ( users ) ; logger . info ( ""\n"" ) ; System . out . println ( ""#######################END############################################"" ) ; logger . info ( ""\n"" ) ; } +","@ SuppressWarnings ( ""unchecked"" ) static void findByQuery ( final EntityManager em , final String query ) { Query q = em . createNamedQuery ( query ) ; logger . info ( ""[On Find All by Query]"" ) ; List < User > users = q . getResultList ( ) ; if ( users == null || users . isEmpty ( ) ) { logger . info ( ""0 Users Returned"" ) ; return ; } System . out . println ( ""#######################START##########################################"" ) ; logger . info ( ""\t\t Total number of users:"" + users . size ( ) ) ; logger . info ( ""\t\t User's total tweets:"" + users . get ( 0 ) . getTweets ( ) . size ( ) ) ; printTweets ( users ) ; logger . info ( ""\n"" ) ; System . out . println ( ""#######################END############################################"" ) ; logger . info ( ""\n"" ) ; } +" +52,"public void start ( ) { if ( container . getState ( ) . isStarted ( ) ) throw new IllegalArgumentException ( ""Container "" + description + "" failed to start because it is currently "" + container . getState ( ) ) ; LocalConfiguration config = getConfiguration ( ) ; int servletPort = portSupplier . getAsInt ( ) ; int containerRmiPort = portSupplier . getAsInt ( ) ; int tomcatAjpPort = portSupplier . getAsInt ( ) ; config . setProperty ( ServletPropertySet . PORT , Integer . toString ( servletPort ) ) ; config . setProperty ( GeneralPropertySet . RMI_PORT , Integer . toString ( containerRmiPort ) ) ; config . setProperty ( TomcatPropertySet . AJP_PORT , Integer . toString ( tomcatAjpPort ) ) ; config . setProperty ( GeneralPropertySet . PORT_OFFSET , ""0"" ) ; int jvmJmxPort = portSupplier . getAsInt ( ) ; String jvmArgs = ""-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address="" + jvmJmxPort ; if ( SystemUtils . isJavaVersionAtLeast ( JavaVersion . JAVA_9 ) ) { jvmArgs += "" --add-opens java.base/java.lang.module=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.module=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.reflect=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.misc=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.ref=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.platform.cgroupv1=ALL-UNNAMED"" + "" --add-opens jdk.management/com.sun.management.internal=ALL-UNNAMED"" ; } config . setProperty ( GeneralPropertySet . START_JVMARGS , jvmArgs ) ; container . setConfiguration ( config ) ; try { writeSettings ( ) ; container . start ( ) ; } catch ( Exception e ) { throw new RuntimeException ( ""Something very bad happened to this container when starting. Check the cargo_logs folder for container logs."" , e ) ; } } +","public void start ( ) { if ( container . getState ( ) . isStarted ( ) ) throw new IllegalArgumentException ( ""Container "" + description + "" failed to start because it is currently "" + container . getState ( ) ) ; LocalConfiguration config = getConfiguration ( ) ; int servletPort = portSupplier . getAsInt ( ) ; int containerRmiPort = portSupplier . getAsInt ( ) ; int tomcatAjpPort = portSupplier . getAsInt ( ) ; config . setProperty ( ServletPropertySet . PORT , Integer . toString ( servletPort ) ) ; config . setProperty ( GeneralPropertySet . RMI_PORT , Integer . toString ( containerRmiPort ) ) ; config . setProperty ( TomcatPropertySet . AJP_PORT , Integer . toString ( tomcatAjpPort ) ) ; config . setProperty ( GeneralPropertySet . PORT_OFFSET , ""0"" ) ; int jvmJmxPort = portSupplier . getAsInt ( ) ; String jvmArgs = ""-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address="" + jvmJmxPort ; if ( SystemUtils . isJavaVersionAtLeast ( JavaVersion . JAVA_9 ) ) { jvmArgs += "" --add-opens java.base/java.lang.module=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.module=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.reflect=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.misc=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.ref=ALL-UNNAMED"" + "" --add-opens java.base/jdk.internal.platform.cgroupv1=ALL-UNNAMED"" + "" --add-opens jdk.management/com.sun.management.internal=ALL-UNNAMED"" ; } config . setProperty ( GeneralPropertySet . START_JVMARGS , jvmArgs ) ; container . setConfiguration ( config ) ; try { logger . info ( ""Starting container {} RMI Port: {}"" , description , jvmJmxPort ) ; writeSettings ( ) ; container . start ( ) ; } catch ( Exception e ) { throw new RuntimeException ( ""Something very bad happened to this container when starting. Check the cargo_logs folder for container logs."" , e ) ; } } +" +53,"protected void execute ( final Object event ) throws Exception { if ( this . countEvents ) { this . eventCount ++ ; } @ SuppressWarnings ( ""unchecked"" ) final OutputPort < Object > selectedOutputPort = ( OutputPort < Object > ) this . selectOutputPort ( this . rootEventMatcher , event ) ; if ( selectedOutputPort != null ) { selectedOutputPort . send ( event ) ; } else { if ( this . reportUnknown ) { final String className = event . getClass ( ) . getCanonicalName ( ) ; Integer hits = this . unknownRecords . get ( className ) ; if ( hits == null ) { this . unknownRecords . put ( className , Integer . valueOf ( 1 ) ) ; } else { hits ++ ; this . unknownRecords . put ( className , hits ) ; if ( ( hits % DynamicEventDispatcher . LOOP_COUNT ) == 0 ) { DynamicEventDispatcher . LOGGER . warn ( ""Event occurances {} of unknown eventtype {}."" , hits , className ) ; } } } } } +","protected void execute ( final Object event ) throws Exception { if ( this . countEvents ) { this . eventCount ++ ; } @ SuppressWarnings ( ""unchecked"" ) final OutputPort < Object > selectedOutputPort = ( OutputPort < Object > ) this . selectOutputPort ( this . rootEventMatcher , event ) ; if ( selectedOutputPort != null ) { selectedOutputPort . send ( event ) ; } else { if ( this . reportUnknown ) { final String className = event . getClass ( ) . getCanonicalName ( ) ; Integer hits = this . unknownRecords . get ( className ) ; if ( hits == null ) { DynamicEventDispatcher . LOGGER . warn ( ""Configuration error: New unknown event type {}."" , className ) ; this . unknownRecords . put ( className , Integer . valueOf ( 1 ) ) ; } else { hits ++ ; this . unknownRecords . put ( className , hits ) ; if ( ( hits % DynamicEventDispatcher . LOOP_COUNT ) == 0 ) { DynamicEventDispatcher . LOGGER . warn ( ""Event occurances {} of unknown eventtype {}."" , hits , className ) ; } } } } } +" +54,"protected void execute ( final Object event ) throws Exception { if ( this . countEvents ) { this . eventCount ++ ; } @ SuppressWarnings ( ""unchecked"" ) final OutputPort < Object > selectedOutputPort = ( OutputPort < Object > ) this . selectOutputPort ( this . rootEventMatcher , event ) ; if ( selectedOutputPort != null ) { selectedOutputPort . send ( event ) ; } else { if ( this . reportUnknown ) { final String className = event . getClass ( ) . getCanonicalName ( ) ; Integer hits = this . unknownRecords . get ( className ) ; if ( hits == null ) { DynamicEventDispatcher . LOGGER . warn ( ""Configuration error: New unknown event type {}."" , className ) ; this . unknownRecords . put ( className , Integer . valueOf ( 1 ) ) ; } else { hits ++ ; this . unknownRecords . put ( className , hits ) ; if ( ( hits % DynamicEventDispatcher . LOOP_COUNT ) == 0 ) { } } } } } +","protected void execute ( final Object event ) throws Exception { if ( this . countEvents ) { this . eventCount ++ ; } @ SuppressWarnings ( ""unchecked"" ) final OutputPort < Object > selectedOutputPort = ( OutputPort < Object > ) this . selectOutputPort ( this . rootEventMatcher , event ) ; if ( selectedOutputPort != null ) { selectedOutputPort . send ( event ) ; } else { if ( this . reportUnknown ) { final String className = event . getClass ( ) . getCanonicalName ( ) ; Integer hits = this . unknownRecords . get ( className ) ; if ( hits == null ) { DynamicEventDispatcher . LOGGER . warn ( ""Configuration error: New unknown event type {}."" , className ) ; this . unknownRecords . put ( className , Integer . valueOf ( 1 ) ) ; } else { hits ++ ; this . unknownRecords . put ( className , hits ) ; if ( ( hits % DynamicEventDispatcher . LOOP_COUNT ) == 0 ) { DynamicEventDispatcher . LOGGER . warn ( ""Event occurances {} of unknown eventtype {}."" , hits , className ) ; } } } } } +" +55,"protected void setRecordCipher ( ) { KeySet keySet = getKeySet ( tlsContext ) ; RecordCipher recordCipher = RecordCipherFactory . getRecordCipher ( tlsContext , keySet ) ; tlsContext . getRecordLayer ( ) . setRecordCipher ( recordCipher ) ; } +","protected void setRecordCipher ( ) { KeySet keySet = getKeySet ( tlsContext ) ; LOGGER . debug ( ""Setting new Cipher in RecordLayer"" ) ; RecordCipher recordCipher = RecordCipherFactory . getRecordCipher ( tlsContext , keySet ) ; tlsContext . getRecordLayer ( ) . setRecordCipher ( recordCipher ) ; } +" +56,"private boolean isMetadataRequest ( ContainerRequestContext context , FedizContext fedConfig ) { String requestPath = context . getUriInfo ( ) . getPath ( ) ; if ( requestPath . indexOf ( FederationConstants . METADATA_PATH_URI ) != - 1 || requestPath . indexOf ( getMetadataURI ( fedConfig ) ) != - 1 ) { if ( LOG . isInfoEnabled ( ) ) { } FedizProcessor wfProc = FedizProcessorFactory . newFedizProcessor ( fedConfig . getProtocol ( ) ) ; try { HttpServletRequest request = messageContext . getHttpServletRequest ( ) ; Document metadata = wfProc . getMetaData ( request , fedConfig ) ; String metadataStr = DOM2Writer . nodeToString ( metadata ) ; ResponseBuilder response = Response . ok ( metadataStr , ""text/xml"" ) ; context . abortWith ( response . build ( ) ) ; return true ; } catch ( Exception ex ) { LOG . error ( ""Failed to get metadata document: "" + ex . getMessage ( ) ) ; throw ExceptionUtils . toInternalServerErrorException ( ex , null ) ; } } return false ; } +","private boolean isMetadataRequest ( ContainerRequestContext context , FedizContext fedConfig ) { String requestPath = context . getUriInfo ( ) . getPath ( ) ; if ( requestPath . indexOf ( FederationConstants . METADATA_PATH_URI ) != - 1 || requestPath . indexOf ( getMetadataURI ( fedConfig ) ) != - 1 ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( ""Metadata document requested"" ) ; } FedizProcessor wfProc = FedizProcessorFactory . newFedizProcessor ( fedConfig . getProtocol ( ) ) ; try { HttpServletRequest request = messageContext . getHttpServletRequest ( ) ; Document metadata = wfProc . getMetaData ( request , fedConfig ) ; String metadataStr = DOM2Writer . nodeToString ( metadata ) ; ResponseBuilder response = Response . ok ( metadataStr , ""text/xml"" ) ; context . abortWith ( response . build ( ) ) ; return true ; } catch ( Exception ex ) { LOG . error ( ""Failed to get metadata document: "" + ex . getMessage ( ) ) ; throw ExceptionUtils . toInternalServerErrorException ( ex , null ) ; } } return false ; } +" +57,"private boolean isMetadataRequest ( ContainerRequestContext context , FedizContext fedConfig ) { String requestPath = context . getUriInfo ( ) . getPath ( ) ; if ( requestPath . indexOf ( FederationConstants . METADATA_PATH_URI ) != - 1 || requestPath . indexOf ( getMetadataURI ( fedConfig ) ) != - 1 ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( ""Metadata document requested"" ) ; } FedizProcessor wfProc = FedizProcessorFactory . newFedizProcessor ( fedConfig . getProtocol ( ) ) ; try { HttpServletRequest request = messageContext . getHttpServletRequest ( ) ; Document metadata = wfProc . getMetaData ( request , fedConfig ) ; String metadataStr = DOM2Writer . nodeToString ( metadata ) ; ResponseBuilder response = Response . ok ( metadataStr , ""text/xml"" ) ; context . abortWith ( response . build ( ) ) ; return true ; } catch ( Exception ex ) { throw ExceptionUtils . toInternalServerErrorException ( ex , null ) ; } } return false ; } +","private boolean isMetadataRequest ( ContainerRequestContext context , FedizContext fedConfig ) { String requestPath = context . getUriInfo ( ) . getPath ( ) ; if ( requestPath . indexOf ( FederationConstants . METADATA_PATH_URI ) != - 1 || requestPath . indexOf ( getMetadataURI ( fedConfig ) ) != - 1 ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( ""Metadata document requested"" ) ; } FedizProcessor wfProc = FedizProcessorFactory . newFedizProcessor ( fedConfig . getProtocol ( ) ) ; try { HttpServletRequest request = messageContext . getHttpServletRequest ( ) ; Document metadata = wfProc . getMetaData ( request , fedConfig ) ; String metadataStr = DOM2Writer . nodeToString ( metadata ) ; ResponseBuilder response = Response . ok ( metadataStr , ""text/xml"" ) ; context . abortWith ( response . build ( ) ) ; return true ; } catch ( Exception ex ) { LOG . error ( ""Failed to get metadata document: "" + ex . getMessage ( ) ) ; throw ExceptionUtils . toInternalServerErrorException ( ex , null ) ; } } return false ; } +" +58,"private void handleConnectException ( RuntimeException ce ) { reconnectTracker . attemptFailed ( ) ; if ( reconnectTracker . shouldTryAgain ( ) ) { long waitTimeMs = reconnectTracker . getNextWaitTimeMs ( ) ; } else { throw shutDownAndThrow ( new JetException ( ""Failed to connect to database"" + getCause ( ce ) ) ) ; } } +","private void handleConnectException ( RuntimeException ce ) { reconnectTracker . attemptFailed ( ) ; if ( reconnectTracker . shouldTryAgain ( ) ) { long waitTimeMs = reconnectTracker . getNextWaitTimeMs ( ) ; logger . warning ( ""Failed to initialize the connector task, retrying in "" + waitTimeMs + ""ms"" + getCause ( ce ) ) ; } else { throw shutDownAndThrow ( new JetException ( ""Failed to connect to database"" + getCause ( ce ) ) ) ; } } +" +59,"public < T extends ComputeMetadata > T findObjectOfTypeForServerOrNull ( Set < ? extends T > supply , String type , final String objectId , final RegionAndId serverInRegion ) { try { return find ( supply , new Predicate < T > ( ) { @ Override public boolean apply ( T input ) { return input . getId ( ) . equals ( RegionAndId . fromRegionAndId ( serverInRegion . getRegion ( ) , objectId ) . slashEncode ( ) ) ; } } ) ; } catch ( NoSuchElementException e ) { } return null ; } +","public < T extends ComputeMetadata > T findObjectOfTypeForServerOrNull ( Set < ? extends T > supply , String type , final String objectId , final RegionAndId serverInRegion ) { try { return find ( supply , new Predicate < T > ( ) { @ Override public boolean apply ( T input ) { return input . getId ( ) . equals ( RegionAndId . fromRegionAndId ( serverInRegion . getRegion ( ) , objectId ) . slashEncode ( ) ) ; } } ) ; } catch ( NoSuchElementException e ) { logger . trace ( ""could not find %s with id(%s) for server(%s)"" , type , objectId , serverInRegion ) ; } return null ; } +" +60,"private void createACL ( UnManagedCifsShareACL origACL , List < CifsShareACL > shareACLList , FileShare fileshare ) { CifsShareACL shareACL = null ; shareACL = new CifsShareACL ( ) ; shareACL . setId ( URIUtil . createId ( CifsShareACL . class ) ) ; String user = origACL . getUser ( ) ; if ( user != null ) { shareACL . setUser ( user ) ; } else { shareACL . setGroup ( origACL . getGroup ( ) ) ; } String permissionText = null ; switch ( origACL . getPermission ( ) . toLowerCase ( ) ) { case ""read"" : permissionText = FileControllerConstants . CIFS_SHARE_PERMISSION_READ ; break ; case ""change"" : permissionText = FileControllerConstants . CIFS_SHARE_PERMISSION_CHANGE ; break ; case ""full"" : case ""fullcontrol"" : permissionText = FileControllerConstants . CIFS_SHARE_PERMISSION_FULLCONTROL ; break ; } shareACL . setPermission ( permissionText ) ; shareACL . setShareName ( origACL . getShareName ( ) ) ; shareACL . setFileSystemId ( fileshare . getId ( ) ) ; shareACLList . add ( shareACL ) ; } +","private void createACL ( UnManagedCifsShareACL origACL , List < CifsShareACL > shareACLList , FileShare fileshare ) { CifsShareACL shareACL = null ; shareACL = new CifsShareACL ( ) ; shareACL . setId ( URIUtil . createId ( CifsShareACL . class ) ) ; String user = origACL . getUser ( ) ; if ( user != null ) { shareACL . setUser ( user ) ; } else { shareACL . setGroup ( origACL . getGroup ( ) ) ; } String permissionText = null ; switch ( origACL . getPermission ( ) . toLowerCase ( ) ) { case ""read"" : permissionText = FileControllerConstants . CIFS_SHARE_PERMISSION_READ ; break ; case ""change"" : permissionText = FileControllerConstants . CIFS_SHARE_PERMISSION_CHANGE ; break ; case ""full"" : case ""fullcontrol"" : permissionText = FileControllerConstants . CIFS_SHARE_PERMISSION_FULLCONTROL ; break ; } shareACL . setPermission ( permissionText ) ; shareACL . setShareName ( origACL . getShareName ( ) ) ; shareACL . setFileSystemId ( fileshare . getId ( ) ) ; shareACLList . add ( shareACL ) ; _logger . info ( ""share ACLs details {}"" , shareACL . toString ( ) ) ; } +" +61,"public BigDecimal apply ( final BigDecimal ... args ) { if ( args . length != 2 ) return null ; try { final BigDecimal result = args [ 0 ] . pow ( args [ 1 ] . intValueExact ( ) , MathContext . DECIMAL128 ) ; return result ; } catch ( final ArithmeticException e ) { return null ; } } +","public BigDecimal apply ( final BigDecimal ... args ) { if ( args . length != 2 ) return null ; try { final BigDecimal result = args [ 0 ] . pow ( args [ 1 ] . intValueExact ( ) , MathContext . DECIMAL128 ) ; return result ; } catch ( final ArithmeticException e ) { _logger . log ( Level . FINE , """" , e ) ; return null ; } } +" +62,"public static int getCategoriesCount ( long groupId , long [ ] parentCategoryIds ) throws RemoteException { try { int returnValue = MBCategoryServiceUtil . getCategoriesCount ( groupId , parentCategoryIds ) ; return returnValue ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } } +","public static int getCategoriesCount ( long groupId , long [ ] parentCategoryIds ) throws RemoteException { try { int returnValue = MBCategoryServiceUtil . getCategoriesCount ( groupId , parentCategoryIds ) ; return returnValue ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } } +" +63,"public void handle ( final HttpServerRequest request ) { Logger log = RequestLoggerFactory . getLogger ( CORSHandler . class , request ) ; addCORSHeaders ( request , log ) ; if ( isOptionsRequest ( request ) ) { request . response ( ) . setStatusCode ( StatusCode . OK . getStatusCode ( ) ) ; request . response ( ) . end ( ) ; } } +","public void handle ( final HttpServerRequest request ) { Logger log = RequestLoggerFactory . getLogger ( CORSHandler . class , request ) ; addCORSHeaders ( request , log ) ; if ( isOptionsRequest ( request ) ) { log . info ( ""Got OPTIONS request. Respond with statusCode 200"" ) ; request . response ( ) . setStatusCode ( StatusCode . OK . getStatusCode ( ) ) ; request . response ( ) . end ( ) ; } } +" +64,"static DirectoryStream < Path > getInstance ( MCRDirectory dir , MCRPath path ) throws IOException { DirectoryStream . Filter < Path > filter = ( dir instanceof MCRFileCollection ) ? MCRFileCollectionFilter . FILTER : AcceptAllFilter . FILTER ; DirectoryStream < Path > baseDirectoryStream = Files . newDirectoryStream ( dir . getLocalPath ( ) , filter ) ; LOGGER . debug ( ""baseStream {}"" , baseDirectoryStream . getClass ( ) ) ; if ( baseDirectoryStream instanceof java . nio . file . SecureDirectoryStream ) { LOGGER . debug ( ""Returning SecureDirectoryStream"" ) ; return new SecureDirectoryStream ( dir , path , ( java . nio . file . SecureDirectoryStream < Path > ) baseDirectoryStream ) ; } return new SimpleDirectoryStream ( path , baseDirectoryStream ) ; } +","static DirectoryStream < Path > getInstance ( MCRDirectory dir , MCRPath path ) throws IOException { DirectoryStream . Filter < Path > filter = ( dir instanceof MCRFileCollection ) ? MCRFileCollectionFilter . FILTER : AcceptAllFilter . FILTER ; LOGGER . debug ( ""Dir {}, class {}, filter {}"" , path , dir . getClass ( ) , filter . getClass ( ) ) ; DirectoryStream < Path > baseDirectoryStream = Files . newDirectoryStream ( dir . getLocalPath ( ) , filter ) ; LOGGER . debug ( ""baseStream {}"" , baseDirectoryStream . getClass ( ) ) ; if ( baseDirectoryStream instanceof java . nio . file . SecureDirectoryStream ) { LOGGER . debug ( ""Returning SecureDirectoryStream"" ) ; return new SecureDirectoryStream ( dir , path , ( java . nio . file . SecureDirectoryStream < Path > ) baseDirectoryStream ) ; } return new SimpleDirectoryStream ( path , baseDirectoryStream ) ; } +" +65,"static DirectoryStream < Path > getInstance ( MCRDirectory dir , MCRPath path ) throws IOException { DirectoryStream . Filter < Path > filter = ( dir instanceof MCRFileCollection ) ? MCRFileCollectionFilter . FILTER : AcceptAllFilter . FILTER ; LOGGER . debug ( ""Dir {}, class {}, filter {}"" , path , dir . getClass ( ) , filter . getClass ( ) ) ; DirectoryStream < Path > baseDirectoryStream = Files . newDirectoryStream ( dir . getLocalPath ( ) , filter ) ; if ( baseDirectoryStream instanceof java . nio . file . SecureDirectoryStream ) { LOGGER . debug ( ""Returning SecureDirectoryStream"" ) ; return new SecureDirectoryStream ( dir , path , ( java . nio . file . SecureDirectoryStream < Path > ) baseDirectoryStream ) ; } return new SimpleDirectoryStream ( path , baseDirectoryStream ) ; } +","static DirectoryStream < Path > getInstance ( MCRDirectory dir , MCRPath path ) throws IOException { DirectoryStream . Filter < Path > filter = ( dir instanceof MCRFileCollection ) ? MCRFileCollectionFilter . FILTER : AcceptAllFilter . FILTER ; LOGGER . debug ( ""Dir {}, class {}, filter {}"" , path , dir . getClass ( ) , filter . getClass ( ) ) ; DirectoryStream < Path > baseDirectoryStream = Files . newDirectoryStream ( dir . getLocalPath ( ) , filter ) ; LOGGER . debug ( ""baseStream {}"" , baseDirectoryStream . getClass ( ) ) ; if ( baseDirectoryStream instanceof java . nio . file . SecureDirectoryStream ) { LOGGER . debug ( ""Returning SecureDirectoryStream"" ) ; return new SecureDirectoryStream ( dir , path , ( java . nio . file . SecureDirectoryStream < Path > ) baseDirectoryStream ) ; } return new SimpleDirectoryStream ( path , baseDirectoryStream ) ; } +" +66,"static DirectoryStream < Path > getInstance ( MCRDirectory dir , MCRPath path ) throws IOException { DirectoryStream . Filter < Path > filter = ( dir instanceof MCRFileCollection ) ? MCRFileCollectionFilter . FILTER : AcceptAllFilter . FILTER ; LOGGER . debug ( ""Dir {}, class {}, filter {}"" , path , dir . getClass ( ) , filter . getClass ( ) ) ; DirectoryStream < Path > baseDirectoryStream = Files . newDirectoryStream ( dir . getLocalPath ( ) , filter ) ; LOGGER . debug ( ""baseStream {}"" , baseDirectoryStream . getClass ( ) ) ; if ( baseDirectoryStream instanceof java . nio . file . SecureDirectoryStream ) { return new SecureDirectoryStream ( dir , path , ( java . nio . file . SecureDirectoryStream < Path > ) baseDirectoryStream ) ; } return new SimpleDirectoryStream ( path , baseDirectoryStream ) ; } +","static DirectoryStream < Path > getInstance ( MCRDirectory dir , MCRPath path ) throws IOException { DirectoryStream . Filter < Path > filter = ( dir instanceof MCRFileCollection ) ? MCRFileCollectionFilter . FILTER : AcceptAllFilter . FILTER ; LOGGER . debug ( ""Dir {}, class {}, filter {}"" , path , dir . getClass ( ) , filter . getClass ( ) ) ; DirectoryStream < Path > baseDirectoryStream = Files . newDirectoryStream ( dir . getLocalPath ( ) , filter ) ; LOGGER . debug ( ""baseStream {}"" , baseDirectoryStream . getClass ( ) ) ; if ( baseDirectoryStream instanceof java . nio . file . SecureDirectoryStream ) { LOGGER . debug ( ""Returning SecureDirectoryStream"" ) ; return new SecureDirectoryStream ( dir , path , ( java . nio . file . SecureDirectoryStream < Path > ) baseDirectoryStream ) ; } return new SimpleDirectoryStream ( path , baseDirectoryStream ) ; } +" +67,"@ Log Object onDelete ( ) { String fullName = user . getFullName ( ) ; userManager . removeUser ( user . getId ( ) . toString ( ) ) ; alertManager . alert ( Duration . TRANSIENT , Severity . SUCCESS , messages . format ( ""user.deleted"" , fullName ) ) ; return UserList . class ; } +","@ Log Object onDelete ( ) { String fullName = user . getFullName ( ) ; userManager . removeUser ( user . getId ( ) . toString ( ) ) ; alertManager . alert ( Duration . TRANSIENT , Severity . SUCCESS , messages . format ( ""user.deleted"" , fullName ) ) ; logger . debug ( ""After deletion.. ready to return userList object"" ) ; return UserList . class ; } +" +68,"@ Test public void withFile ( ) throws Exception { File temp = new File ( tmpDir ( ) ) ; File [ ] logFiles = temp . listFiles ( SPRING_LOG_FILTER ) ; for ( File file : logFiles ) { file . delete ( ) ; } this . loggingSystem . beforeInitialize ( ) ; this . loggingSystem . initialize ( null , null , getLogFile ( null , tmpDir ( ) ) ) ; this . logger . info ( ""Hello world"" ) ; String output = this . output . toString ( ) . trim ( ) ; assertThat ( output ) . contains ( ""Hello world"" ) . doesNotContain ( ""Hidden"" ) ; assertThat ( temp . listFiles ( SPRING_LOG_FILTER ) . length ) . isGreaterThan ( 0 ) ; } +","@ Test public void withFile ( ) throws Exception { File temp = new File ( tmpDir ( ) ) ; File [ ] logFiles = temp . listFiles ( SPRING_LOG_FILTER ) ; for ( File file : logFiles ) { file . delete ( ) ; } this . loggingSystem . beforeInitialize ( ) ; this . logger . info ( ""Hidden"" ) ; this . loggingSystem . initialize ( null , null , getLogFile ( null , tmpDir ( ) ) ) ; this . logger . info ( ""Hello world"" ) ; String output = this . output . toString ( ) . trim ( ) ; assertThat ( output ) . contains ( ""Hello world"" ) . doesNotContain ( ""Hidden"" ) ; assertThat ( temp . listFiles ( SPRING_LOG_FILTER ) . length ) . isGreaterThan ( 0 ) ; } +" +69,"@ Test public void withFile ( ) throws Exception { File temp = new File ( tmpDir ( ) ) ; File [ ] logFiles = temp . listFiles ( SPRING_LOG_FILTER ) ; for ( File file : logFiles ) { file . delete ( ) ; } this . loggingSystem . beforeInitialize ( ) ; this . logger . info ( ""Hidden"" ) ; this . loggingSystem . initialize ( null , null , getLogFile ( null , tmpDir ( ) ) ) ; String output = this . output . toString ( ) . trim ( ) ; assertThat ( output ) . contains ( ""Hello world"" ) . doesNotContain ( ""Hidden"" ) ; assertThat ( temp . listFiles ( SPRING_LOG_FILTER ) . length ) . isGreaterThan ( 0 ) ; } +","@ Test public void withFile ( ) throws Exception { File temp = new File ( tmpDir ( ) ) ; File [ ] logFiles = temp . listFiles ( SPRING_LOG_FILTER ) ; for ( File file : logFiles ) { file . delete ( ) ; } this . loggingSystem . beforeInitialize ( ) ; this . logger . info ( ""Hidden"" ) ; this . loggingSystem . initialize ( null , null , getLogFile ( null , tmpDir ( ) ) ) ; this . logger . info ( ""Hello world"" ) ; String output = this . output . toString ( ) . trim ( ) ; assertThat ( output ) . contains ( ""Hello world"" ) . doesNotContain ( ""Hidden"" ) ; assertThat ( temp . listFiles ( SPRING_LOG_FILTER ) . length ) . isGreaterThan ( 0 ) ; } +" +70,"public void publishEvent ( final EventPublishRequestDTO request , final Set < Subscription > involvedSubscriptions ) { checkPublishRequestDTO ( request ) ; checkInvolvedSubscriptions ( involvedSubscriptions ) ; final EventPublishStartDTO eventPublishStartDTO = new EventPublishStartDTO ( request , involvedSubscriptions ) ; try { publishingQueue . put ( eventPublishStartDTO ) ; } catch ( final Exception ex ) { logger . debug ( ""publishEvent finished with exception : "" + ex ) ; } } +","public void publishEvent ( final EventPublishRequestDTO request , final Set < Subscription > involvedSubscriptions ) { logger . debug ( ""publishEvent started..."" ) ; checkPublishRequestDTO ( request ) ; checkInvolvedSubscriptions ( involvedSubscriptions ) ; final EventPublishStartDTO eventPublishStartDTO = new EventPublishStartDTO ( request , involvedSubscriptions ) ; try { publishingQueue . put ( eventPublishStartDTO ) ; } catch ( final Exception ex ) { logger . debug ( ""publishEvent finished with exception : "" + ex ) ; } } +" +71,"public void publishEvent ( final EventPublishRequestDTO request , final Set < Subscription > involvedSubscriptions ) { logger . debug ( ""publishEvent started..."" ) ; checkPublishRequestDTO ( request ) ; checkInvolvedSubscriptions ( involvedSubscriptions ) ; final EventPublishStartDTO eventPublishStartDTO = new EventPublishStartDTO ( request , involvedSubscriptions ) ; try { publishingQueue . put ( eventPublishStartDTO ) ; } catch ( final Exception ex ) { } } +","public void publishEvent ( final EventPublishRequestDTO request , final Set < Subscription > involvedSubscriptions ) { logger . debug ( ""publishEvent started..."" ) ; checkPublishRequestDTO ( request ) ; checkInvolvedSubscriptions ( involvedSubscriptions ) ; final EventPublishStartDTO eventPublishStartDTO = new EventPublishStartDTO ( request , involvedSubscriptions ) ; try { publishingQueue . put ( eventPublishStartDTO ) ; } catch ( final Exception ex ) { logger . debug ( ""publishEvent finished with exception : "" + ex ) ; } } +" +72,"public static String getStandardFileNameForResponses ( String activityId , String studyId , String activityRunId , String participantId , String version ) { LOGGER . entry ( ""begin getStandardFileNameForResponses()"" ) ; String fileName = null ; try { fileName = new StringBuilder ( ) . append ( ""FDAHPHCI_"" ) . append ( new SimpleDateFormat ( ""MMddyyyyHHmmss"" ) . format ( new Date ( ) ) ) . append ( ""_"" ) . append ( studyId ) . append ( ""_"" ) . append ( activityId ) . append ( ""_"" ) . append ( activityRunId ) . append ( ""_"" ) . append ( participantId ) . append ( ""_"" ) . append ( version ) . append ( "".json"" ) . toString ( ) ; } catch ( Exception e ) { } LOGGER . exit ( ""getStandardFileNameForResponses() :: ends"" ) ; return fileName ; } +","public static String getStandardFileNameForResponses ( String activityId , String studyId , String activityRunId , String participantId , String version ) { LOGGER . entry ( ""begin getStandardFileNameForResponses()"" ) ; String fileName = null ; try { fileName = new StringBuilder ( ) . append ( ""FDAHPHCI_"" ) . append ( new SimpleDateFormat ( ""MMddyyyyHHmmss"" ) . format ( new Date ( ) ) ) . append ( ""_"" ) . append ( studyId ) . append ( ""_"" ) . append ( activityId ) . append ( ""_"" ) . append ( activityRunId ) . append ( ""_"" ) . append ( participantId ) . append ( ""_"" ) . append ( version ) . append ( "".json"" ) . toString ( ) ; } catch ( Exception e ) { LOGGER . error ( ""ERROR: StudyMetaDataUtil - getStandardFileNameForResponses()"" , e ) ; } LOGGER . exit ( ""getStandardFileNameForResponses() :: ends"" ) ; return fileName ; } +" +73,"@ GET @ Path ( ""/callback"" ) public Response callback ( @ QueryParam ( ""state"" ) UUID loginSession , @ QueryParam ( ""code"" ) String code ) { if ( ! loginSessionRedirects . containsKey ( loginSession ) ) { return Response . status ( 417 ) . entity ( ""Login session unknown"" ) . build ( ) ; } try { final Optional < Tokens > userTokens = openIdClient . getUserTokens ( code ) ; final String value = userTokens . isPresent ( ) ? userTokens . get ( ) . getBearerAccessToken ( ) . getValue ( ) : ""no-token"" ; final URI userUri = UriBuilder . fromUri ( loginSessionRedirects . get ( loginSession ) ) . queryParam ( ""sessionToken"" , value ) . build ( ) ; return Response . temporaryRedirect ( userUri ) . build ( ) ; } catch ( IOException | ParseException e ) { return Response . serverError ( ) . build ( ) ; } } +","@ GET @ Path ( ""/callback"" ) public Response callback ( @ QueryParam ( ""state"" ) UUID loginSession , @ QueryParam ( ""code"" ) String code ) { if ( ! loginSessionRedirects . containsKey ( loginSession ) ) { return Response . status ( 417 ) . entity ( ""Login session unknown"" ) . build ( ) ; } try { final Optional < Tokens > userTokens = openIdClient . getUserTokens ( code ) ; final String value = userTokens . isPresent ( ) ? userTokens . get ( ) . getBearerAccessToken ( ) . getValue ( ) : ""no-token"" ; final URI userUri = UriBuilder . fromUri ( loginSessionRedirects . get ( loginSession ) ) . queryParam ( ""sessionToken"" , value ) . build ( ) ; return Response . temporaryRedirect ( userUri ) . build ( ) ; } catch ( IOException | ParseException e ) { LOG . error ( ""Retrieval of userTokes failed"" , e ) ; return Response . serverError ( ) . build ( ) ; } } +" +74,"public void adjustClientCipherAfterEarly ( ) { try { context . setActiveClientKeySetType ( Tls13KeySetType . HANDSHAKE_TRAFFIC_SECRETS ) ; KeySet clientKeySet = KeySetGenerator . generateKeySet ( context , context . getChooser ( ) . getSelectedProtocolVersion ( ) , context . getActiveClientKeySetType ( ) ) ; RecordCipher recordCipherClient = RecordCipherFactory . getRecordCipher ( context , clientKeySet , context . getChooser ( ) . getSelectedCipherSuite ( ) ) ; context . getRecordLayer ( ) . setRecordCipher ( recordCipherClient ) ; context . getRecordLayer ( ) . updateDecryptionCipher ( ) ; context . setReadSequenceNumber ( 0 ) ; } catch ( CryptoException | NoSuchAlgorithmException ex ) { LOGGER . error ( ""Generating KeySet failed"" , ex ) ; throw new WorkflowExecutionException ( ex . toString ( ) ) ; } } +","public void adjustClientCipherAfterEarly ( ) { try { context . setActiveClientKeySetType ( Tls13KeySetType . HANDSHAKE_TRAFFIC_SECRETS ) ; LOGGER . debug ( ""Setting cipher for client to use handshake secrets"" ) ; KeySet clientKeySet = KeySetGenerator . generateKeySet ( context , context . getChooser ( ) . getSelectedProtocolVersion ( ) , context . getActiveClientKeySetType ( ) ) ; RecordCipher recordCipherClient = RecordCipherFactory . getRecordCipher ( context , clientKeySet , context . getChooser ( ) . getSelectedCipherSuite ( ) ) ; context . getRecordLayer ( ) . setRecordCipher ( recordCipherClient ) ; context . getRecordLayer ( ) . updateDecryptionCipher ( ) ; context . setReadSequenceNumber ( 0 ) ; } catch ( CryptoException | NoSuchAlgorithmException ex ) { LOGGER . error ( ""Generating KeySet failed"" , ex ) ; throw new WorkflowExecutionException ( ex . toString ( ) ) ; } } +" +75,"public void adjustClientCipherAfterEarly ( ) { try { context . setActiveClientKeySetType ( Tls13KeySetType . HANDSHAKE_TRAFFIC_SECRETS ) ; LOGGER . debug ( ""Setting cipher for client to use handshake secrets"" ) ; KeySet clientKeySet = KeySetGenerator . generateKeySet ( context , context . getChooser ( ) . getSelectedProtocolVersion ( ) , context . getActiveClientKeySetType ( ) ) ; RecordCipher recordCipherClient = RecordCipherFactory . getRecordCipher ( context , clientKeySet , context . getChooser ( ) . getSelectedCipherSuite ( ) ) ; context . getRecordLayer ( ) . setRecordCipher ( recordCipherClient ) ; context . getRecordLayer ( ) . updateDecryptionCipher ( ) ; context . setReadSequenceNumber ( 0 ) ; } catch ( CryptoException | NoSuchAlgorithmException ex ) { throw new WorkflowExecutionException ( ex . toString ( ) ) ; } } +","public void adjustClientCipherAfterEarly ( ) { try { context . setActiveClientKeySetType ( Tls13KeySetType . HANDSHAKE_TRAFFIC_SECRETS ) ; LOGGER . debug ( ""Setting cipher for client to use handshake secrets"" ) ; KeySet clientKeySet = KeySetGenerator . generateKeySet ( context , context . getChooser ( ) . getSelectedProtocolVersion ( ) , context . getActiveClientKeySetType ( ) ) ; RecordCipher recordCipherClient = RecordCipherFactory . getRecordCipher ( context , clientKeySet , context . getChooser ( ) . getSelectedCipherSuite ( ) ) ; context . getRecordLayer ( ) . setRecordCipher ( recordCipherClient ) ; context . getRecordLayer ( ) . updateDecryptionCipher ( ) ; context . setReadSequenceNumber ( 0 ) ; } catch ( CryptoException | NoSuchAlgorithmException ex ) { LOGGER . error ( ""Generating KeySet failed"" , ex ) ; throw new WorkflowExecutionException ( ex . toString ( ) ) ; } } +" +76,"@ OnWebSocketClose public void onClose ( int statusCode , String reason ) { if ( statusCode != StatusCode . NORMAL ) { } if ( session != null ) { if ( ! session . isOpen ( ) ) { if ( session != null ) { session . close ( ) ; } } session = null ; } if ( websocketHandler != null ) { websocketHandler . onClose ( ) ; } } +","@ OnWebSocketClose public void onClose ( int statusCode , String reason ) { if ( statusCode != StatusCode . NORMAL ) { logger . debug ( ""WebSocket Connection closed: {} - {}"" , statusCode , reason ) ; } if ( session != null ) { if ( ! session . isOpen ( ) ) { if ( session != null ) { session . close ( ) ; } } session = null ; } if ( websocketHandler != null ) { websocketHandler . onClose ( ) ; } } +" +77,"private List < RemoteInfo > loadRemoteInfos ( RepositoryInfo repo ) { String repoId = repo . getId ( ) ; if ( null == repoId ) { return new ArrayList < > ( ) ; } ArrayList < RemoteInfo > list = new ArrayList < > ( ) ; Repository geogig ; try { geogig = RepositoryManager . get ( ) . getRepository ( repoId ) ; if ( geogig != null ) { ImmutableList < Remote > geogigRemotes = geogig . command ( RemoteListOp . class ) . call ( ) ; list = RemoteInfo . fromList ( geogigRemotes ) ; } } catch ( Exception e ) { } return list ; } +","private List < RemoteInfo > loadRemoteInfos ( RepositoryInfo repo ) { String repoId = repo . getId ( ) ; if ( null == repoId ) { return new ArrayList < > ( ) ; } ArrayList < RemoteInfo > list = new ArrayList < > ( ) ; Repository geogig ; try { geogig = RepositoryManager . get ( ) . getRepository ( repoId ) ; if ( geogig != null ) { ImmutableList < Remote > geogigRemotes = geogig . command ( RemoteListOp . class ) . call ( ) ; list = RemoteInfo . fromList ( geogigRemotes ) ; } } catch ( Exception e ) { LOGGER . warn ( ""Failed to load Remotes for repository"" , e ) ; } return list ; } +" +78,"public BusinessObjectEntry getBusinessObjectEntryForConcreteClass ( DataDictionaryIndex ddIndex , String className ) { if ( StringUtils . isBlank ( className ) ) { throw new IllegalArgumentException ( ""invalid (blank) className"" ) ; } if ( LOG . isDebugEnabled ( ) ) { } int index = className . indexOf ( ""$$"" ) ; if ( index >= 0 ) { className = className . substring ( 0 , index ) ; } return ddIndex . getBusinessObjectEntries ( ) . get ( className ) ; } +","public BusinessObjectEntry getBusinessObjectEntryForConcreteClass ( DataDictionaryIndex ddIndex , String className ) { if ( StringUtils . isBlank ( className ) ) { throw new IllegalArgumentException ( ""invalid (blank) className"" ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( ""calling getBusinessObjectEntry '"" + className + ""'"" ) ; } int index = className . indexOf ( ""$$"" ) ; if ( index >= 0 ) { className = className . substring ( 0 , index ) ; } return ddIndex . getBusinessObjectEntries ( ) . get ( className ) ; } +" +79,"public synchronized void disconnect ( boolean askForReconnect ) { if ( isConnected ( ) ) { sessionState = SessionState . DISCONNECTING ; } if ( connection == null || sessionState == SessionState . DISCONNECTED ) { return ; } try { if ( callbackClient . isConnected ( ) ) { callbackClient . removeListener ( callbackHandler ) ; callbackClient . disconnect ( ) ; } TransporterClient . destroyTransporterClient ( server ) ; } catch ( Throwable ex ) { logger . fatal ( ""Disconnecting FAIL"" , ex ) ; } if ( sessionState == SessionState . DISCONNECTING || sessionState == SessionState . CONNECTING ) { sessionState = SessionState . DISCONNECTED ; logger . info ( ""Disconnecting DONE"" ) ; if ( askForReconnect ) { client . showError ( ""Network error. You have been disconnected from "" + connection . getHost ( ) ) ; } client . disconnected ( askForReconnect ) ; pingTime . clear ( ) ; } } +","public synchronized void disconnect ( boolean askForReconnect ) { if ( isConnected ( ) ) { logger . info ( ""Disconnecting..."" ) ; sessionState = SessionState . DISCONNECTING ; } if ( connection == null || sessionState == SessionState . DISCONNECTED ) { return ; } try { if ( callbackClient . isConnected ( ) ) { callbackClient . removeListener ( callbackHandler ) ; callbackClient . disconnect ( ) ; } TransporterClient . destroyTransporterClient ( server ) ; } catch ( Throwable ex ) { logger . fatal ( ""Disconnecting FAIL"" , ex ) ; } if ( sessionState == SessionState . DISCONNECTING || sessionState == SessionState . CONNECTING ) { sessionState = SessionState . DISCONNECTED ; logger . info ( ""Disconnecting DONE"" ) ; if ( askForReconnect ) { client . showError ( ""Network error. You have been disconnected from "" + connection . getHost ( ) ) ; } client . disconnected ( askForReconnect ) ; pingTime . clear ( ) ; } } +" +80,"public synchronized void disconnect ( boolean askForReconnect ) { if ( isConnected ( ) ) { logger . info ( ""Disconnecting..."" ) ; sessionState = SessionState . DISCONNECTING ; } if ( connection == null || sessionState == SessionState . DISCONNECTED ) { return ; } try { if ( callbackClient . isConnected ( ) ) { callbackClient . removeListener ( callbackHandler ) ; callbackClient . disconnect ( ) ; } TransporterClient . destroyTransporterClient ( server ) ; } catch ( Throwable ex ) { } if ( sessionState == SessionState . DISCONNECTING || sessionState == SessionState . CONNECTING ) { sessionState = SessionState . DISCONNECTED ; logger . info ( ""Disconnecting DONE"" ) ; if ( askForReconnect ) { client . showError ( ""Network error. You have been disconnected from "" + connection . getHost ( ) ) ; } client . disconnected ( askForReconnect ) ; pingTime . clear ( ) ; } } +","public synchronized void disconnect ( boolean askForReconnect ) { if ( isConnected ( ) ) { logger . info ( ""Disconnecting..."" ) ; sessionState = SessionState . DISCONNECTING ; } if ( connection == null || sessionState == SessionState . DISCONNECTED ) { return ; } try { if ( callbackClient . isConnected ( ) ) { callbackClient . removeListener ( callbackHandler ) ; callbackClient . disconnect ( ) ; } TransporterClient . destroyTransporterClient ( server ) ; } catch ( Throwable ex ) { logger . fatal ( ""Disconnecting FAIL"" , ex ) ; } if ( sessionState == SessionState . DISCONNECTING || sessionState == SessionState . CONNECTING ) { sessionState = SessionState . DISCONNECTED ; logger . info ( ""Disconnecting DONE"" ) ; if ( askForReconnect ) { client . showError ( ""Network error. You have been disconnected from "" + connection . getHost ( ) ) ; } client . disconnected ( askForReconnect ) ; pingTime . clear ( ) ; } } +" +81,"public synchronized void disconnect ( boolean askForReconnect ) { if ( isConnected ( ) ) { logger . info ( ""Disconnecting..."" ) ; sessionState = SessionState . DISCONNECTING ; } if ( connection == null || sessionState == SessionState . DISCONNECTED ) { return ; } try { if ( callbackClient . isConnected ( ) ) { callbackClient . removeListener ( callbackHandler ) ; callbackClient . disconnect ( ) ; } TransporterClient . destroyTransporterClient ( server ) ; } catch ( Throwable ex ) { logger . fatal ( ""Disconnecting FAIL"" , ex ) ; } if ( sessionState == SessionState . DISCONNECTING || sessionState == SessionState . CONNECTING ) { sessionState = SessionState . DISCONNECTED ; if ( askForReconnect ) { client . showError ( ""Network error. You have been disconnected from "" + connection . getHost ( ) ) ; } client . disconnected ( askForReconnect ) ; pingTime . clear ( ) ; } } +","public synchronized void disconnect ( boolean askForReconnect ) { if ( isConnected ( ) ) { logger . info ( ""Disconnecting..."" ) ; sessionState = SessionState . DISCONNECTING ; } if ( connection == null || sessionState == SessionState . DISCONNECTED ) { return ; } try { if ( callbackClient . isConnected ( ) ) { callbackClient . removeListener ( callbackHandler ) ; callbackClient . disconnect ( ) ; } TransporterClient . destroyTransporterClient ( server ) ; } catch ( Throwable ex ) { logger . fatal ( ""Disconnecting FAIL"" , ex ) ; } if ( sessionState == SessionState . DISCONNECTING || sessionState == SessionState . CONNECTING ) { sessionState = SessionState . DISCONNECTED ; logger . info ( ""Disconnecting DONE"" ) ; if ( askForReconnect ) { client . showError ( ""Network error. You have been disconnected from "" + connection . getHost ( ) ) ; } client . disconnected ( askForReconnect ) ; pingTime . clear ( ) ; } } +" +82,"public static com . liferay . commerce . inventory . model . CommerceInventoryWarehouseItem increaseCommerceInventoryWarehouseItemQuantity ( HttpPrincipal httpPrincipal , long commerceInventoryWarehouseItemId , int quantity ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( CommerceInventoryWarehouseItemServiceUtil . class , ""increaseCommerceInventoryWarehouseItemQuantity"" , _increaseCommerceInventoryWarehouseItemQuantityParameterTypes19 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , commerceInventoryWarehouseItemId , quantity ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( com . liferay . commerce . inventory . model . CommerceInventoryWarehouseItem ) returnObj ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { throw systemException ; } } +","public static com . liferay . commerce . inventory . model . CommerceInventoryWarehouseItem increaseCommerceInventoryWarehouseItemQuantity ( HttpPrincipal httpPrincipal , long commerceInventoryWarehouseItemId , int quantity ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( CommerceInventoryWarehouseItemServiceUtil . class , ""increaseCommerceInventoryWarehouseItemQuantity"" , _increaseCommerceInventoryWarehouseItemQuantityParameterTypes19 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , commerceInventoryWarehouseItemId , quantity ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( com . liferay . commerce . inventory . model . CommerceInventoryWarehouseItem ) returnObj ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { _log . error ( systemException , systemException ) ; throw systemException ; } } +" +83,"public void run ( ) { try { started = true ; gotLock = service . lock ( ""obj"" , - 1 , - 1 ) ; if ( logger . isDebugEnabled ( ) ) { } } catch ( VirtualMachineError e ) { SystemFailure . initiateFailure ( e ) ; throw e ; } catch ( Throwable ex ) { logger . warn ( ""[testLockIsNotInterruptible] Caught..."" , ex ) ; exception = ex ; } wasFlagSet = Thread . currentThread ( ) . isInterrupted ( ) ; } +","public void run ( ) { try { started = true ; gotLock = service . lock ( ""obj"" , - 1 , - 1 ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""[testLockIsNotInterruptible] thread2 finished lock() - got "" + gotLock ) ; } } catch ( VirtualMachineError e ) { SystemFailure . initiateFailure ( e ) ; throw e ; } catch ( Throwable ex ) { logger . warn ( ""[testLockIsNotInterruptible] Caught..."" , ex ) ; exception = ex ; } wasFlagSet = Thread . currentThread ( ) . isInterrupted ( ) ; } +" +84,"public void run ( ) { try { started = true ; gotLock = service . lock ( ""obj"" , - 1 , - 1 ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""[testLockIsNotInterruptible] thread2 finished lock() - got "" + gotLock ) ; } } catch ( VirtualMachineError e ) { SystemFailure . initiateFailure ( e ) ; throw e ; } catch ( Throwable ex ) { exception = ex ; } wasFlagSet = Thread . currentThread ( ) . isInterrupted ( ) ; } +","public void run ( ) { try { started = true ; gotLock = service . lock ( ""obj"" , - 1 , - 1 ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""[testLockIsNotInterruptible] thread2 finished lock() - got "" + gotLock ) ; } } catch ( VirtualMachineError e ) { SystemFailure . initiateFailure ( e ) ; throw e ; } catch ( Throwable ex ) { logger . warn ( ""[testLockIsNotInterruptible] Caught..."" , ex ) ; exception = ex ; } wasFlagSet = Thread . currentThread ( ) . isInterrupted ( ) ; } +" +85,"@ PostConstruct public void init ( ) { if ( log . isDebugEnabled ( ) ) { } if ( conf == null ) { throw new IllegalArgumentException ( ""QueryExpirationConfiguration is null"" ) ; } } +","@ PostConstruct public void init ( ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""@PostConstruct - init()"" ) ; } if ( conf == null ) { throw new IllegalArgumentException ( ""QueryExpirationConfiguration is null"" ) ; } } +" +86,"public LdapEntry [ ] search ( LdapConnectionHandler originalConnectionHandler , LdapEntry entry ) throws IOException , NamingException { SearchControls searchControls = createSearchControl ( recursive , attributeArray ) ; Set < LdapEntry > foundEntries = new HashSet < LdapEntry > ( ) ; LdapConnectionHandler connectionHandler = originalConnectionHandler ; URI referralAddress = null ; if ( ( referralAddress = entry . getReferralUri ( ) ) != null && preferOriginalConnection == false ) { connectionHandler = connectionHandler . findForReferral ( referralAddress ) ; if ( connectionHandler == null ) { SECURITY_LOGGER . tracef ( ""Unable to obtain connection handler for referral URI %s"" , referralAddress ) ; return foundEntries . toArray ( new LdapEntry [ foundEntries . size ( ) ] ) ; } } else { referralAddress = null ; } Object [ ] searchParameter = getSearchParameter ( entry ) ; boolean trace = SECURITY_LOGGER . isTraceEnabled ( ) ; if ( trace ) { SECURITY_LOGGER . tracef ( ""Performing search baseDn=%s, filterString=%s, searchParameter=%s"" , baseDn , filterString , Arrays . toString ( searchParameter ) ) ; } NamingEnumeration < SearchResult > searchResults = connectionHandler . getConnection ( ) . search ( baseDn , filterString , searchParameter , searchControls ) ; if ( trace && searchResults . hasMore ( ) == false ) { } while ( searchResults . hasMore ( ) ) { SearchResult current = searchResults . next ( ) ; Attributes attributes = current . getAttributes ( ) ; if ( attributes != null ) { LdapEntry newEntry = convertToLdapEntry ( current , attributes , referralAddress ) ; SECURITY_LOGGER . tracef ( ""Adding %s"" , newEntry ) ; foundEntries . add ( newEntry ) ; } else { SECURITY_LOGGER . tracef ( ""No attributes found for %s"" , current ) ; } } return foundEntries . toArray ( new LdapEntry [ foundEntries . size ( ) ] ) ; } +","public LdapEntry [ ] search ( LdapConnectionHandler originalConnectionHandler , LdapEntry entry ) throws IOException , NamingException { SearchControls searchControls = createSearchControl ( recursive , attributeArray ) ; Set < LdapEntry > foundEntries = new HashSet < LdapEntry > ( ) ; LdapConnectionHandler connectionHandler = originalConnectionHandler ; URI referralAddress = null ; if ( ( referralAddress = entry . getReferralUri ( ) ) != null && preferOriginalConnection == false ) { connectionHandler = connectionHandler . findForReferral ( referralAddress ) ; if ( connectionHandler == null ) { SECURITY_LOGGER . tracef ( ""Unable to obtain connection handler for referral URI %s"" , referralAddress ) ; return foundEntries . toArray ( new LdapEntry [ foundEntries . size ( ) ] ) ; } } else { referralAddress = null ; } Object [ ] searchParameter = getSearchParameter ( entry ) ; boolean trace = SECURITY_LOGGER . isTraceEnabled ( ) ; if ( trace ) { SECURITY_LOGGER . tracef ( ""Performing search baseDn=%s, filterString=%s, searchParameter=%s"" , baseDn , filterString , Arrays . toString ( searchParameter ) ) ; } NamingEnumeration < SearchResult > searchResults = connectionHandler . getConnection ( ) . search ( baseDn , filterString , searchParameter , searchControls ) ; if ( trace && searchResults . hasMore ( ) == false ) { SECURITY_LOGGER . trace ( ""No search results found."" ) ; } while ( searchResults . hasMore ( ) ) { SearchResult current = searchResults . next ( ) ; Attributes attributes = current . getAttributes ( ) ; if ( attributes != null ) { LdapEntry newEntry = convertToLdapEntry ( current , attributes , referralAddress ) ; SECURITY_LOGGER . tracef ( ""Adding %s"" , newEntry ) ; foundEntries . add ( newEntry ) ; } else { SECURITY_LOGGER . tracef ( ""No attributes found for %s"" , current ) ; } } return foundEntries . toArray ( new LdapEntry [ foundEntries . size ( ) ] ) ; } +" +87,"protected Object executeScript ( ScriptFactory scriptFactory , Map < String , Object > scriptVariables , HttpServletResponse response , String scriptUrl ) { try { return scriptFactory . getScript ( scriptUrl ) . execute ( scriptVariables ) ; } catch ( ScriptNotFoundException e ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; return singletonMap ( errorMessageModelAttributeName , ""REST script not found"" ) ; } catch ( Exception e ) { logger . error ( ""Error executing REST script at "" + scriptUrl , e ) ; Throwable cause = checkHttpStatusCodeAwareException ( e , response ) ; if ( cause == null ) { cause = checkValidationException ( e , response ) ; if ( cause == null ) { response . setStatus ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; } } return singletonMap ( errorMessageModelAttributeName , cause != null ? cause . getMessage ( ) : e . getMessage ( ) ) ; } } +","protected Object executeScript ( ScriptFactory scriptFactory , Map < String , Object > scriptVariables , HttpServletResponse response , String scriptUrl ) { try { return scriptFactory . getScript ( scriptUrl ) . execute ( scriptVariables ) ; } catch ( ScriptNotFoundException e ) { logger . error ( ""Script not found at "" + scriptUrl , e ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; return singletonMap ( errorMessageModelAttributeName , ""REST script not found"" ) ; } catch ( Exception e ) { logger . error ( ""Error executing REST script at "" + scriptUrl , e ) ; Throwable cause = checkHttpStatusCodeAwareException ( e , response ) ; if ( cause == null ) { cause = checkValidationException ( e , response ) ; if ( cause == null ) { response . setStatus ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; } } return singletonMap ( errorMessageModelAttributeName , cause != null ? cause . getMessage ( ) : e . getMessage ( ) ) ; } } +" +88,"protected Object executeScript ( ScriptFactory scriptFactory , Map < String , Object > scriptVariables , HttpServletResponse response , String scriptUrl ) { try { return scriptFactory . getScript ( scriptUrl ) . execute ( scriptVariables ) ; } catch ( ScriptNotFoundException e ) { logger . error ( ""Script not found at "" + scriptUrl , e ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; return singletonMap ( errorMessageModelAttributeName , ""REST script not found"" ) ; } catch ( Exception e ) { Throwable cause = checkHttpStatusCodeAwareException ( e , response ) ; if ( cause == null ) { cause = checkValidationException ( e , response ) ; if ( cause == null ) { response . setStatus ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; } } return singletonMap ( errorMessageModelAttributeName , cause != null ? cause . getMessage ( ) : e . getMessage ( ) ) ; } } +","protected Object executeScript ( ScriptFactory scriptFactory , Map < String , Object > scriptVariables , HttpServletResponse response , String scriptUrl ) { try { return scriptFactory . getScript ( scriptUrl ) . execute ( scriptVariables ) ; } catch ( ScriptNotFoundException e ) { logger . error ( ""Script not found at "" + scriptUrl , e ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; return singletonMap ( errorMessageModelAttributeName , ""REST script not found"" ) ; } catch ( Exception e ) { logger . error ( ""Error executing REST script at "" + scriptUrl , e ) ; Throwable cause = checkHttpStatusCodeAwareException ( e , response ) ; if ( cause == null ) { cause = checkValidationException ( e , response ) ; if ( cause == null ) { response . setStatus ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; } } return singletonMap ( errorMessageModelAttributeName , cause != null ? cause . getMessage ( ) : e . getMessage ( ) ) ; } } +" +89,"private static void loadBusinessProperties ( String filename ) { try { prop . load ( new FileInputStream ( filename ) ) ; } catch ( Exception e ) { logger . debug ( e . getMessage ( ) ) ; throw new CloudRuntimeException ( e . getMessage ( ) ) ; } } +","private static void loadBusinessProperties ( String filename ) { try { logger . debug ( String . format ( ""start load business properties: %s"" , filename ) ) ; prop . load ( new FileInputStream ( filename ) ) ; } catch ( Exception e ) { logger . debug ( e . getMessage ( ) ) ; throw new CloudRuntimeException ( e . getMessage ( ) ) ; } } +" +90,"private static void loadBusinessProperties ( String filename ) { try { logger . debug ( String . format ( ""start load business properties: %s"" , filename ) ) ; prop . load ( new FileInputStream ( filename ) ) ; } catch ( Exception e ) { throw new CloudRuntimeException ( e . getMessage ( ) ) ; } } +","private static void loadBusinessProperties ( String filename ) { try { logger . debug ( String . format ( ""start load business properties: %s"" , filename ) ) ; prop . load ( new FileInputStream ( filename ) ) ; } catch ( Exception e ) { logger . debug ( e . getMessage ( ) ) ; throw new CloudRuntimeException ( e . getMessage ( ) ) ; } } +" +91,"public int truncate ( String path , long size ) { return - ErrorCodes . EOPNOTSUPP ( ) ; } +","public int truncate ( String path , long size ) { LOG . error ( ""Truncate is not supported {}"" , path ) ; return - ErrorCodes . EOPNOTSUPP ( ) ; } +" +92,"private void doHousekeeping ( ) { try { doClaimCleanup ( ) ; updateStallStop ( ) ; doRecovery ( ) ; } catch ( Throwable t ) { } } +","private void doHousekeeping ( ) { try { doClaimCleanup ( ) ; updateStallStop ( ) ; doRecovery ( ) ; } catch ( Throwable t ) { logger . error ( ""Encountered problem during housekeeping"" , t ) ; } } +" +93,"public static com . liferay . commerce . product . model . CPOption addCPOption ( HttpPrincipal httpPrincipal , java . util . Map < java . util . Locale , String > nameMap , java . util . Map < java . util . Locale , String > descriptionMap , String ddmFormFieldTypeName , boolean facetable , boolean required , boolean skuContributor , String key , com . liferay . portal . kernel . service . ServiceContext serviceContext ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( CPOptionServiceUtil . class , ""addCPOption"" , _addCPOptionParameterTypes0 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , nameMap , descriptionMap , ddmFormFieldTypeName , facetable , required , skuContributor , key , serviceContext ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( com . liferay . commerce . product . model . CPOption ) returnObj ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { throw systemException ; } } +","public static com . liferay . commerce . product . model . CPOption addCPOption ( HttpPrincipal httpPrincipal , java . util . Map < java . util . Locale , String > nameMap , java . util . Map < java . util . Locale , String > descriptionMap , String ddmFormFieldTypeName , boolean facetable , boolean required , boolean skuContributor , String key , com . liferay . portal . kernel . service . ServiceContext serviceContext ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( CPOptionServiceUtil . class , ""addCPOption"" , _addCPOptionParameterTypes0 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , nameMap , descriptionMap , ddmFormFieldTypeName , facetable , required , skuContributor , key , serviceContext ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( com . liferay . commerce . product . model . CPOption ) returnObj ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { _log . error ( systemException , systemException ) ; throw systemException ; } } +" +94,"@ Test public void testJmsProperties ( ) throws Exception { QueueDeployment deployment = new QueueDeployment ( ) ; deployment . setDuplicatesAllowed ( true ) ; deployment . setDurableSend ( false ) ; final String queueName = ""testQueue"" ; deployment . setName ( queueName ) ; manager . getQueueManager ( ) . deploy ( deployment ) ; ClientSession session = manager . getQueueManager ( ) . getSessionFactory ( ) . createSession ( ) ; try { session . createConsumer ( queueName ) . setMessageHandler ( new Listener ( ) ) ; session . start ( ) ; ClientRequest request = new ClientRequest ( generateURL ( Util . getUrlPath ( queueName ) ) ) ; ClientResponse < ? > response = request . head ( ) ; response . releaseConnection ( ) ; Assert . assertEquals ( 200 , response . getStatus ( ) ) ; Link sender = getLinkByTitle ( manager . getQueueManager ( ) . getLinkStrategy ( ) , response , ""create"" ) ; { response = sender . request ( ) . body ( ""text/plain"" , ""val"" ) . header ( ""dummyHeader"" , ""DummyValue"" ) . header ( HttpHeaderProperty . MESSAGE_PROPERTY_DISCRIMINATOR + ""property1"" , ""val"" ) . post ( ) ; response . releaseConnection ( ) ; Assert . assertEquals ( 201 , response . getStatus ( ) ) ; Listener . latch . await ( 2 , TimeUnit . SECONDS ) ; Assert . assertEquals ( 4 , Listener . propertyNames . size ( ) ) ; Assert . assertThat ( Listener . propertyNames , hasItem ( new SimpleString ( ""http_content$type"" ) ) ) ; Assert . assertThat ( Listener . propertyNames , hasItem ( new SimpleString ( ""http_content$length"" ) ) ) ; Assert . assertThat ( Listener . propertyNames , hasItem ( new SimpleString ( POSTED_AS_HTTP_MESSAGE ) ) ) ; Assert . assertThat ( Listener . propertyNames , hasItem ( new SimpleString ( ""property1"" ) ) ) ; Assert . assertThat ( Listener . propertyNames , not ( hasItem ( new SimpleString ( ""dummyHeader"" ) ) ) ) ; } } finally { session . close ( ) ; } } +","@ Test public void testJmsProperties ( ) throws Exception { QueueDeployment deployment = new QueueDeployment ( ) ; deployment . setDuplicatesAllowed ( true ) ; deployment . setDurableSend ( false ) ; final String queueName = ""testQueue"" ; deployment . setName ( queueName ) ; manager . getQueueManager ( ) . deploy ( deployment ) ; ClientSession session = manager . getQueueManager ( ) . getSessionFactory ( ) . createSession ( ) ; try { session . createConsumer ( queueName ) . setMessageHandler ( new Listener ( ) ) ; session . start ( ) ; ClientRequest request = new ClientRequest ( generateURL ( Util . getUrlPath ( queueName ) ) ) ; ClientResponse < ? > response = request . head ( ) ; response . releaseConnection ( ) ; Assert . assertEquals ( 200 , response . getStatus ( ) ) ; Link sender = getLinkByTitle ( manager . getQueueManager ( ) . getLinkStrategy ( ) , response , ""create"" ) ; log . debug ( ""create: "" + sender ) ; { response = sender . request ( ) . body ( ""text/plain"" , ""val"" ) . header ( ""dummyHeader"" , ""DummyValue"" ) . header ( HttpHeaderProperty . MESSAGE_PROPERTY_DISCRIMINATOR + ""property1"" , ""val"" ) . post ( ) ; response . releaseConnection ( ) ; Assert . assertEquals ( 201 , response . getStatus ( ) ) ; Listener . latch . await ( 2 , TimeUnit . SECONDS ) ; Assert . assertEquals ( 4 , Listener . propertyNames . size ( ) ) ; Assert . assertThat ( Listener . propertyNames , hasItem ( new SimpleString ( ""http_content$type"" ) ) ) ; Assert . assertThat ( Listener . propertyNames , hasItem ( new SimpleString ( ""http_content$length"" ) ) ) ; Assert . assertThat ( Listener . propertyNames , hasItem ( new SimpleString ( POSTED_AS_HTTP_MESSAGE ) ) ) ; Assert . assertThat ( Listener . propertyNames , hasItem ( new SimpleString ( ""property1"" ) ) ) ; Assert . assertThat ( Listener . propertyNames , not ( hasItem ( new SimpleString ( ""dummyHeader"" ) ) ) ) ; } } finally { session . close ( ) ; } } +" +95,"public static String getParentName ( ComposedSchema composedSchema , OpenAPI openAPI ) { Map < String , Schema > allSchemas = OpenLOpenAPIUtils . getSchemas ( openAPI ) ; List < Schema > interfaces = OpenLOpenAPIUtils . getInterfaces ( composedSchema ) ; List < String > refedWithoutDiscriminator = new ArrayList < > ( ) ; if ( CollectionUtils . isNotEmpty ( interfaces ) ) { for ( Schema < ? > schema : interfaces ) { if ( StringUtils . isNotEmpty ( schema . get$ref ( ) ) ) { String parentName = OpenAPITypeUtils . getSimpleName ( schema . get$ref ( ) ) ; Schema < ? > s = allSchemas . get ( parentName ) ; if ( s == null ) { return ""UNKNOWN_PARENT_NAME"" ; } else if ( hasOrInheritsDiscriminator ( s , allSchemas ) ) { return parentName ; } else { refedWithoutDiscriminator . add ( parentName ) ; } } } } if ( refedWithoutDiscriminator . size ( ) == 1 ) { return refedWithoutDiscriminator . get ( 0 ) ; } return null ; } +","public static String getParentName ( ComposedSchema composedSchema , OpenAPI openAPI ) { Map < String , Schema > allSchemas = OpenLOpenAPIUtils . getSchemas ( openAPI ) ; List < Schema > interfaces = OpenLOpenAPIUtils . getInterfaces ( composedSchema ) ; List < String > refedWithoutDiscriminator = new ArrayList < > ( ) ; if ( CollectionUtils . isNotEmpty ( interfaces ) ) { for ( Schema < ? > schema : interfaces ) { if ( StringUtils . isNotEmpty ( schema . get$ref ( ) ) ) { String parentName = OpenAPITypeUtils . getSimpleName ( schema . get$ref ( ) ) ; Schema < ? > s = allSchemas . get ( parentName ) ; if ( s == null ) { LOGGER . error ( ""Failed to obtain schema from {}"" , parentName ) ; return ""UNKNOWN_PARENT_NAME"" ; } else if ( hasOrInheritsDiscriminator ( s , allSchemas ) ) { return parentName ; } else { refedWithoutDiscriminator . add ( parentName ) ; } } } } if ( refedWithoutDiscriminator . size ( ) == 1 ) { return refedWithoutDiscriminator . get ( 0 ) ; } return null ; } +" +96,"public static void close ( Closeable closeable , boolean swallowIOException ) throws IOException { if ( closeable == null ) { return ; } try { closeable . close ( ) ; } catch ( IOException e ) { if ( swallowIOException ) { } else { throw e ; } } } +","public static void close ( Closeable closeable , boolean swallowIOException ) throws IOException { if ( closeable == null ) { return ; } try { closeable . close ( ) ; } catch ( IOException e ) { if ( swallowIOException ) { logger . log ( Level . WARNING , ""IOException thrown while closing Closeable."" , e ) ; } else { throw e ; } } } +" +97,"@ SuppressWarnings ( ""unchecked"" ) public void shutdown ( Database database ) { getDatabaseService ( database ) . shutdown ( database ) ; } +","@ SuppressWarnings ( ""unchecked"" ) public void shutdown ( Database database ) { log . debug ( ""shutting down "" + database . toString ( ) ) ; getDatabaseService ( database ) . shutdown ( database ) ; } +" +98,"public SendableEvent read ( ) throws EPException { if ( stateManager . getState ( ) == AdapterState . DESTROYED || atEOF ) { return null ; } try { if ( eventsToSend . isEmpty ( ) ) { if ( beanClass != null ) { return new SendableBeanEvent ( newMapEvent ( ) , beanClass , eventTypeName , totalDelay , scheduleSlot ) ; } else { return new SendableMapEvent ( newMapEvent ( ) , eventTypeName , totalDelay , scheduleSlot ) ; } } else { SendableEvent theEvent = eventsToSend . first ( ) ; eventsToSend . remove ( theEvent ) ; return theEvent ; } } catch ( EOFException e ) { if ( ( ExecutionPathDebugLog . isDebugEnabled ) && ( log . isDebugEnabled ( ) ) ) { } atEOF = true ; if ( stateManager . getState ( ) == AdapterState . STARTED ) { stop ( ) ; } else { destroy ( ) ; } return null ; } } +","public SendableEvent read ( ) throws EPException { if ( stateManager . getState ( ) == AdapterState . DESTROYED || atEOF ) { return null ; } try { if ( eventsToSend . isEmpty ( ) ) { if ( beanClass != null ) { return new SendableBeanEvent ( newMapEvent ( ) , beanClass , eventTypeName , totalDelay , scheduleSlot ) ; } else { return new SendableMapEvent ( newMapEvent ( ) , eventTypeName , totalDelay , scheduleSlot ) ; } } else { SendableEvent theEvent = eventsToSend . first ( ) ; eventsToSend . remove ( theEvent ) ; return theEvent ; } } catch ( EOFException e ) { if ( ( ExecutionPathDebugLog . isDebugEnabled ) && ( log . isDebugEnabled ( ) ) ) { log . debug ( "".read reached end of CSV file"" ) ; } atEOF = true ; if ( stateManager . getState ( ) == AdapterState . STARTED ) { stop ( ) ; } else { destroy ( ) ; } return null ; } } +" +99,"public void run ( ) { try { long count = 0 ; while ( count < numEvents ) { env . sendEventBean ( new SupportBean ( ) ) ; count ++ ; if ( count % 10000 == 0 ) { log . info ( ""Sending event #"" + count ) ; } } } catch ( RuntimeException ex ) { log . error ( ""Exception encountered: "" + ex . getMessage ( ) , ex ) ; exception = ex ; } log . info ( ""Completed event send"" ) ; } +","public void run ( ) { log . info ( ""Started event send"" ) ; try { long count = 0 ; while ( count < numEvents ) { env . sendEventBean ( new SupportBean ( ) ) ; count ++ ; if ( count % 10000 == 0 ) { log . info ( ""Sending event #"" + count ) ; } } } catch ( RuntimeException ex ) { log . error ( ""Exception encountered: "" + ex . getMessage ( ) , ex ) ; exception = ex ; } log . info ( ""Completed event send"" ) ; } +" +100,"public void run ( ) { log . info ( ""Started event send"" ) ; try { long count = 0 ; while ( count < numEvents ) { env . sendEventBean ( new SupportBean ( ) ) ; count ++ ; if ( count % 10000 == 0 ) { } } } catch ( RuntimeException ex ) { log . error ( ""Exception encountered: "" + ex . getMessage ( ) , ex ) ; exception = ex ; } log . info ( ""Completed event send"" ) ; } +","public void run ( ) { log . info ( ""Started event send"" ) ; try { long count = 0 ; while ( count < numEvents ) { env . sendEventBean ( new SupportBean ( ) ) ; count ++ ; if ( count % 10000 == 0 ) { log . info ( ""Sending event #"" + count ) ; } } } catch ( RuntimeException ex ) { log . error ( ""Exception encountered: "" + ex . getMessage ( ) , ex ) ; exception = ex ; } log . info ( ""Completed event send"" ) ; } +" +101,"public void run ( ) { log . info ( ""Started event send"" ) ; try { long count = 0 ; while ( count < numEvents ) { env . sendEventBean ( new SupportBean ( ) ) ; count ++ ; if ( count % 10000 == 0 ) { log . info ( ""Sending event #"" + count ) ; } } } catch ( RuntimeException ex ) { exception = ex ; } log . info ( ""Completed event send"" ) ; } +","public void run ( ) { log . info ( ""Started event send"" ) ; try { long count = 0 ; while ( count < numEvents ) { env . sendEventBean ( new SupportBean ( ) ) ; count ++ ; if ( count % 10000 == 0 ) { log . info ( ""Sending event #"" + count ) ; } } } catch ( RuntimeException ex ) { log . error ( ""Exception encountered: "" + ex . getMessage ( ) , ex ) ; exception = ex ; } log . info ( ""Completed event send"" ) ; } +" +102,"public void run ( ) { log . info ( ""Started event send"" ) ; try { long count = 0 ; while ( count < numEvents ) { env . sendEventBean ( new SupportBean ( ) ) ; count ++ ; if ( count % 10000 == 0 ) { log . info ( ""Sending event #"" + count ) ; } } } catch ( RuntimeException ex ) { log . error ( ""Exception encountered: "" + ex . getMessage ( ) , ex ) ; exception = ex ; } } +","public void run ( ) { log . info ( ""Started event send"" ) ; try { long count = 0 ; while ( count < numEvents ) { env . sendEventBean ( new SupportBean ( ) ) ; count ++ ; if ( count % 10000 == 0 ) { log . info ( ""Sending event #"" + count ) ; } } } catch ( RuntimeException ex ) { log . error ( ""Exception encountered: "" + ex . getMessage ( ) , ex ) ; exception = ex ; } log . info ( ""Completed event send"" ) ; } +" +103,"public static MTree deserializeFrom ( File mtreeSnapshot ) { try ( MLogReader mLogReader = new MLogReader ( mtreeSnapshot ) ) { return deserializeFromReader ( mLogReader ) ; } catch ( IOException e ) { return new MTree ( ) ; } finally { limit = new ThreadLocal < > ( ) ; offset = new ThreadLocal < > ( ) ; count = new ThreadLocal < > ( ) ; curOffset = new ThreadLocal < > ( ) ; } } +","public static MTree deserializeFrom ( File mtreeSnapshot ) { try ( MLogReader mLogReader = new MLogReader ( mtreeSnapshot ) ) { return deserializeFromReader ( mLogReader ) ; } catch ( IOException e ) { logger . warn ( ""Failed to deserialize from {}. Use a new MTree."" , mtreeSnapshot . getPath ( ) ) ; return new MTree ( ) ; } finally { limit = new ThreadLocal < > ( ) ; offset = new ThreadLocal < > ( ) ; count = new ThreadLocal < > ( ) ; curOffset = new ThreadLocal < > ( ) ; } } +" +104,"private void createMustUnderstandFault ( SoapMessage soapResponse , List < QName > notUnderstoodHeaderNames , String [ ] actorsOrRoles ) { if ( logger . isWarnEnabled ( ) ) { } SoapBody responseBody = soapResponse . getSoapBody ( ) ; SoapFault fault = responseBody . addMustUnderstandFault ( mustUnderstandFaultString , mustUnderstandFaultStringLocale ) ; if ( ! ObjectUtils . isEmpty ( actorsOrRoles ) ) { fault . setFaultActorOrRole ( actorsOrRoles [ 0 ] ) ; } SoapHeader header = soapResponse . getSoapHeader ( ) ; if ( header instanceof Soap12Header ) { Soap12Header soap12Header = ( Soap12Header ) header ; for ( QName headerName : notUnderstoodHeaderNames ) { soap12Header . addNotUnderstoodHeaderElement ( headerName ) ; } } } +","private void createMustUnderstandFault ( SoapMessage soapResponse , List < QName > notUnderstoodHeaderNames , String [ ] actorsOrRoles ) { if ( logger . isWarnEnabled ( ) ) { logger . warn ( ""Could not handle mustUnderstand headers: "" + StringUtils . collectionToCommaDelimitedString ( notUnderstoodHeaderNames ) + "". Returning fault"" ) ; } SoapBody responseBody = soapResponse . getSoapBody ( ) ; SoapFault fault = responseBody . addMustUnderstandFault ( mustUnderstandFaultString , mustUnderstandFaultStringLocale ) ; if ( ! ObjectUtils . isEmpty ( actorsOrRoles ) ) { fault . setFaultActorOrRole ( actorsOrRoles [ 0 ] ) ; } SoapHeader header = soapResponse . getSoapHeader ( ) ; if ( header instanceof Soap12Header ) { Soap12Header soap12Header = ( Soap12Header ) header ; for ( QName headerName : notUnderstoodHeaderNames ) { soap12Header . addNotUnderstoodHeaderElement ( headerName ) ; } } } +" +105,"public void deleteAddress ( AddressInfo addressInfo ) throws Exception { if ( logger . isDebugEnabled ( ) ) { } try { server . removeAddressInfo ( addressInfo . getName ( ) , null , true ) ; } catch ( ActiveMQAddressDoesNotExistException expected ) { logger . debug ( expected . getMessage ( ) , expected ) ; } catch ( Exception e ) { logger . warn ( e . getMessage ( ) , e ) ; } } +","public void deleteAddress ( AddressInfo addressInfo ) throws Exception { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""delete address "" + addressInfo ) ; } try { server . removeAddressInfo ( addressInfo . getName ( ) , null , true ) ; } catch ( ActiveMQAddressDoesNotExistException expected ) { logger . debug ( expected . getMessage ( ) , expected ) ; } catch ( Exception e ) { logger . warn ( e . getMessage ( ) , e ) ; } } +" +106,"public void deleteAddress ( AddressInfo addressInfo ) throws Exception { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""delete address "" + addressInfo ) ; } try { server . removeAddressInfo ( addressInfo . getName ( ) , null , true ) ; } catch ( ActiveMQAddressDoesNotExistException expected ) { } catch ( Exception e ) { logger . warn ( e . getMessage ( ) , e ) ; } } +","public void deleteAddress ( AddressInfo addressInfo ) throws Exception { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""delete address "" + addressInfo ) ; } try { server . removeAddressInfo ( addressInfo . getName ( ) , null , true ) ; } catch ( ActiveMQAddressDoesNotExistException expected ) { logger . debug ( expected . getMessage ( ) , expected ) ; } catch ( Exception e ) { logger . warn ( e . getMessage ( ) , e ) ; } } +" +107,"public void deleteAddress ( AddressInfo addressInfo ) throws Exception { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""delete address "" + addressInfo ) ; } try { server . removeAddressInfo ( addressInfo . getName ( ) , null , true ) ; } catch ( ActiveMQAddressDoesNotExistException expected ) { logger . debug ( expected . getMessage ( ) , expected ) ; } catch ( Exception e ) { } } +","public void deleteAddress ( AddressInfo addressInfo ) throws Exception { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""delete address "" + addressInfo ) ; } try { server . removeAddressInfo ( addressInfo . getName ( ) , null , true ) ; } catch ( ActiveMQAddressDoesNotExistException expected ) { logger . debug ( expected . getMessage ( ) , expected ) ; } catch ( Exception e ) { logger . warn ( e . getMessage ( ) , e ) ; } } +" +108,"public String getContentType ( ) { FileEntry fileEntry = getModel ( ) ; try { FileVersion fileVersion = fileEntry . getLatestFileVersion ( ) ; return fileVersion . getMimeType ( ) ; } catch ( Exception exception ) { if ( _log . isDebugEnabled ( ) ) { } return fileEntry . getMimeType ( ) ; } } +","public String getContentType ( ) { FileEntry fileEntry = getModel ( ) ; try { FileVersion fileVersion = fileEntry . getLatestFileVersion ( ) ; return fileVersion . getMimeType ( ) ; } catch ( Exception exception ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( exception , exception ) ; } return fileEntry . getMimeType ( ) ; } } +" +109,"public void save ( String fileName ) { Gpr . toFileSerializeGz ( fileName , this ) ; } +","public void save ( String fileName ) { Log . info ( ""Saving to file: "" + fileName ) ; Gpr . toFileSerializeGz ( fileName , this ) ; } +" +110,"@ PUT @ Timed @ Path ( ""{name}/graph_read_mode"" ) @ Consumes ( APPLICATION_JSON ) @ Produces ( APPLICATION_JSON_WITH_CHARSET ) @ RolesAllowed ( ""admin"" ) public Map < String , GraphReadMode > graphReadMode ( @ Context GraphManager manager , @ PathParam ( ""name"" ) String name , GraphReadMode readMode ) { E . checkArgument ( readMode != null , ""Graph read mode can't be null"" ) ; HugeGraph g = graph ( manager , name ) ; g . readMode ( readMode ) ; return ImmutableMap . of ( ""graph_read_mode"" , readMode ) ; } +","@ PUT @ Timed @ Path ( ""{name}/graph_read_mode"" ) @ Consumes ( APPLICATION_JSON ) @ Produces ( APPLICATION_JSON_WITH_CHARSET ) @ RolesAllowed ( ""admin"" ) public Map < String , GraphReadMode > graphReadMode ( @ Context GraphManager manager , @ PathParam ( ""name"" ) String name , GraphReadMode readMode ) { LOG . debug ( ""Set graph read mode to: '{}' of graph '{}'"" , readMode , name ) ; E . checkArgument ( readMode != null , ""Graph read mode can't be null"" ) ; HugeGraph g = graph ( manager , name ) ; g . readMode ( readMode ) ; return ImmutableMap . of ( ""graph_read_mode"" , readMode ) ; } +" +111,"public static void sendApplicationInstanceTerminatedEvent ( String appId , String instanceId ) { if ( log . isInfoEnabled ( ) ) { } ApplicationInstanceTerminatedEvent applicationTerminatedEvent = new ApplicationInstanceTerminatedEvent ( appId , instanceId ) ; publishEvent ( applicationTerminatedEvent ) ; } +","public static void sendApplicationInstanceTerminatedEvent ( String appId , String instanceId ) { if ( log . isInfoEnabled ( ) ) { log . info ( ""Publishing application instance terminated event: [application] "" + appId + "" [instance] "" + instanceId ) ; } ApplicationInstanceTerminatedEvent applicationTerminatedEvent = new ApplicationInstanceTerminatedEvent ( appId , instanceId ) ; publishEvent ( applicationTerminatedEvent ) ; } +" +112,"public static void applyDefaultConfiguration ( Configuration configuration , @ Nullable ConfigDescription configDescription ) { if ( configDescription != null ) { for ( ConfigDescriptionParameter parameter : configDescription . getParameters ( ) ) { String defaultValue = parameter . getDefault ( ) ; if ( defaultValue != null && configuration . get ( parameter . getName ( ) ) == null ) { if ( parameter . isMultiple ( ) ) { if ( defaultValue . contains ( DEFAULT_LIST_DELIMITER ) ) { List < Object > values = ( List < Object > ) List . of ( defaultValue . split ( DEFAULT_LIST_DELIMITER ) ) . stream ( ) . map ( String :: trim ) . filter ( not ( String :: isEmpty ) ) . map ( value -> ConfigUtil . getDefaultValueAsCorrectType ( parameter . getName ( ) , parameter . getType ( ) , value ) ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; Integer multipleLimit = parameter . getMultipleLimit ( ) ; if ( multipleLimit != null && values . size ( ) > multipleLimit . intValue ( ) ) { } configuration . put ( parameter . getName ( ) , values ) ; } else { Object value = ConfigUtil . getDefaultValueAsCorrectType ( parameter ) ; if ( value != null ) { configuration . put ( parameter . getName ( ) , Arrays . asList ( value ) ) ; } } } else { Object value = ConfigUtil . getDefaultValueAsCorrectType ( parameter ) ; if ( value != null ) { configuration . put ( parameter . getName ( ) , value ) ; } } } } } } +","public static void applyDefaultConfiguration ( Configuration configuration , @ Nullable ConfigDescription configDescription ) { if ( configDescription != null ) { for ( ConfigDescriptionParameter parameter : configDescription . getParameters ( ) ) { String defaultValue = parameter . getDefault ( ) ; if ( defaultValue != null && configuration . get ( parameter . getName ( ) ) == null ) { if ( parameter . isMultiple ( ) ) { if ( defaultValue . contains ( DEFAULT_LIST_DELIMITER ) ) { List < Object > values = ( List < Object > ) List . of ( defaultValue . split ( DEFAULT_LIST_DELIMITER ) ) . stream ( ) . map ( String :: trim ) . filter ( not ( String :: isEmpty ) ) . map ( value -> ConfigUtil . getDefaultValueAsCorrectType ( parameter . getName ( ) , parameter . getType ( ) , value ) ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; Integer multipleLimit = parameter . getMultipleLimit ( ) ; if ( multipleLimit != null && values . size ( ) > multipleLimit . intValue ( ) ) { LoggerFactory . getLogger ( ConfigUtil . class ) . warn ( ""Number of default values ({}) for parameter '{}' is greater than multiple limit ({})"" , values . size ( ) , parameter . getName ( ) , multipleLimit ) ; } configuration . put ( parameter . getName ( ) , values ) ; } else { Object value = ConfigUtil . getDefaultValueAsCorrectType ( parameter ) ; if ( value != null ) { configuration . put ( parameter . getName ( ) , Arrays . asList ( value ) ) ; } } } else { Object value = ConfigUtil . getDefaultValueAsCorrectType ( parameter ) ; if ( value != null ) { configuration . put ( parameter . getName ( ) , value ) ; } } } } } } +" +113,"public PredictionOutcome predict ( Task task , Map < String , Object > inputData ) { final String confidenceThreshold = System . getProperty ( ""org.jbpm.task.prediction.service.confidence_threshold"" ) ; if ( confidenceThreshold != null ) { try { this . confidenceThreshold = Double . parseDouble ( confidenceThreshold ) ; } catch ( NumberFormatException e ) { } } Map < String , Object > outcomes = new HashMap < > ( ) ; if ( outcomeSet . size ( ) >= 2 ) { model = new RandomForest ( dataset , this . numberTrees ) ; final double [ ] features = buildFeatures ( inputData ) ; final double [ ] posteriori = new double [ outcomeSet . size ( ) ] ; double prediction = model . predict ( features , posteriori ) ; String predictionStr = dataset . responseAttribute ( ) . toString ( prediction ) ; outcomes . put ( outcomeAttribute . getName ( ) , predictionStr ) ; final double confidence = posteriori [ ( int ) prediction ] ; outcomes . put ( ""confidence"" , confidence ) ; logger . debug ( inputData + "", prediction = "" + predictionStr + "", confidence = "" + confidence ) ; return new PredictionOutcome ( confidence , this . confidenceThreshold , outcomes ) ; } else { return new PredictionOutcome ( 0.0 , this . confidenceThreshold , outcomes ) ; } } +","public PredictionOutcome predict ( Task task , Map < String , Object > inputData ) { final String confidenceThreshold = System . getProperty ( ""org.jbpm.task.prediction.service.confidence_threshold"" ) ; if ( confidenceThreshold != null ) { try { this . confidenceThreshold = Double . parseDouble ( confidenceThreshold ) ; } catch ( NumberFormatException e ) { logger . error ( ""Invalid confidence threshold set in org.jbpm.task.prediction.service.confidence_threshold"" ) ; } } Map < String , Object > outcomes = new HashMap < > ( ) ; if ( outcomeSet . size ( ) >= 2 ) { model = new RandomForest ( dataset , this . numberTrees ) ; final double [ ] features = buildFeatures ( inputData ) ; final double [ ] posteriori = new double [ outcomeSet . size ( ) ] ; double prediction = model . predict ( features , posteriori ) ; String predictionStr = dataset . responseAttribute ( ) . toString ( prediction ) ; outcomes . put ( outcomeAttribute . getName ( ) , predictionStr ) ; final double confidence = posteriori [ ( int ) prediction ] ; outcomes . put ( ""confidence"" , confidence ) ; logger . debug ( inputData + "", prediction = "" + predictionStr + "", confidence = "" + confidence ) ; return new PredictionOutcome ( confidence , this . confidenceThreshold , outcomes ) ; } else { return new PredictionOutcome ( 0.0 , this . confidenceThreshold , outcomes ) ; } } +" +114,"public PredictionOutcome predict ( Task task , Map < String , Object > inputData ) { final String confidenceThreshold = System . getProperty ( ""org.jbpm.task.prediction.service.confidence_threshold"" ) ; if ( confidenceThreshold != null ) { try { this . confidenceThreshold = Double . parseDouble ( confidenceThreshold ) ; } catch ( NumberFormatException e ) { logger . error ( ""Invalid confidence threshold set in org.jbpm.task.prediction.service.confidence_threshold"" ) ; } } Map < String , Object > outcomes = new HashMap < > ( ) ; if ( outcomeSet . size ( ) >= 2 ) { model = new RandomForest ( dataset , this . numberTrees ) ; final double [ ] features = buildFeatures ( inputData ) ; final double [ ] posteriori = new double [ outcomeSet . size ( ) ] ; double prediction = model . predict ( features , posteriori ) ; String predictionStr = dataset . responseAttribute ( ) . toString ( prediction ) ; outcomes . put ( outcomeAttribute . getName ( ) , predictionStr ) ; final double confidence = posteriori [ ( int ) prediction ] ; outcomes . put ( ""confidence"" , confidence ) ; return new PredictionOutcome ( confidence , this . confidenceThreshold , outcomes ) ; } else { return new PredictionOutcome ( 0.0 , this . confidenceThreshold , outcomes ) ; } } +","public PredictionOutcome predict ( Task task , Map < String , Object > inputData ) { final String confidenceThreshold = System . getProperty ( ""org.jbpm.task.prediction.service.confidence_threshold"" ) ; if ( confidenceThreshold != null ) { try { this . confidenceThreshold = Double . parseDouble ( confidenceThreshold ) ; } catch ( NumberFormatException e ) { logger . error ( ""Invalid confidence threshold set in org.jbpm.task.prediction.service.confidence_threshold"" ) ; } } Map < String , Object > outcomes = new HashMap < > ( ) ; if ( outcomeSet . size ( ) >= 2 ) { model = new RandomForest ( dataset , this . numberTrees ) ; final double [ ] features = buildFeatures ( inputData ) ; final double [ ] posteriori = new double [ outcomeSet . size ( ) ] ; double prediction = model . predict ( features , posteriori ) ; String predictionStr = dataset . responseAttribute ( ) . toString ( prediction ) ; outcomes . put ( outcomeAttribute . getName ( ) , predictionStr ) ; final double confidence = posteriori [ ( int ) prediction ] ; outcomes . put ( ""confidence"" , confidence ) ; logger . debug ( inputData + "", prediction = "" + predictionStr + "", confidence = "" + confidence ) ; return new PredictionOutcome ( confidence , this . confidenceThreshold , outcomes ) ; } else { return new PredictionOutcome ( 0.0 , this . confidenceThreshold , outcomes ) ; } } +" +115,"public void onLoad ( ItemGroup < ? extends Item > parent , String name ) throws IOException { super . onLoad ( parent , name ) ; File buildDir = getBuildDir ( ) ; runIdMigrator = new RunIdMigrator ( ) ; runIdMigrator . migrate ( buildDir , Jenkins . get ( ) . getRootDir ( ) ) ; TextFile f = getNextBuildNumberFile ( ) ; if ( f . exists ( ) ) { try { synchronized ( this ) { this . nextBuildNumber = Integer . parseInt ( f . readTrim ( ) ) ; } } catch ( NumberFormatException e ) { if ( this instanceof LazyBuildMixIn . LazyLoadingJob ) { } else { RunT lB = getLastBuild ( ) ; synchronized ( this ) { this . nextBuildNumber = lB != null ? lB . getNumber ( ) + 1 : 1 ; } saveNextBuildNumber ( ) ; } } } else { saveNextBuildNumber ( ) ; } if ( properties == null ) properties = new CopyOnWriteList < > ( ) ; for ( JobProperty p : properties ) p . setOwner ( this ) ; } +","public void onLoad ( ItemGroup < ? extends Item > parent , String name ) throws IOException { super . onLoad ( parent , name ) ; File buildDir = getBuildDir ( ) ; runIdMigrator = new RunIdMigrator ( ) ; runIdMigrator . migrate ( buildDir , Jenkins . get ( ) . getRootDir ( ) ) ; TextFile f = getNextBuildNumberFile ( ) ; if ( f . exists ( ) ) { try { synchronized ( this ) { this . nextBuildNumber = Integer . parseInt ( f . readTrim ( ) ) ; } } catch ( NumberFormatException e ) { LOGGER . log ( Level . WARNING , ""Corruption in {0}: {1}"" , new Object [ ] { f , e } ) ; if ( this instanceof LazyBuildMixIn . LazyLoadingJob ) { } else { RunT lB = getLastBuild ( ) ; synchronized ( this ) { this . nextBuildNumber = lB != null ? lB . getNumber ( ) + 1 : 1 ; } saveNextBuildNumber ( ) ; } } } else { saveNextBuildNumber ( ) ; } if ( properties == null ) properties = new CopyOnWriteList < > ( ) ; for ( JobProperty p : properties ) p . setOwner ( this ) ; } +" +116,"public GenericQueryConfiguration initialize ( AccumuloClient client , Query settings , Set < Authorizations > auths ) throws Exception { this . config = ShardQueryConfiguration . create ( this , settings ) ; if ( log . isTraceEnabled ( ) ) this . config . setExpandFields ( true ) ; this . config . setExpandValues ( true ) ; initialize ( config , client , settings , auths ) ; return config ; } +","public GenericQueryConfiguration initialize ( AccumuloClient client , Query settings , Set < Authorizations > auths ) throws Exception { this . config = ShardQueryConfiguration . create ( this , settings ) ; if ( log . isTraceEnabled ( ) ) log . trace ( ""Initializing ShardQueryLogic: "" + System . identityHashCode ( this ) + '(' + ( this . getSettings ( ) == null ? ""empty"" : this . getSettings ( ) . getId ( ) ) + ')' ) ; this . config . setExpandFields ( true ) ; this . config . setExpandValues ( true ) ; initialize ( config , client , settings , auths ) ; return config ; } +" +117,"public final void afterPropertiesSet ( ) throws Exception { Object parentObject = null ; if ( ResourcesUtils . getResourceAsStream ( ""/org/springframework/web/context/WebApplicationContext.class"" ) != null ) { if ( this . applicationContext instanceof WebApplicationContext ) { parentObject = ( ( WebApplicationContext ) this . applicationContext ) . getServletContext ( ) ; } } Set < Class < ? > > needCheckRepeat = new HashSet < > ( ) ; if ( this . loadModules != null ) { for ( String name : this . loadModules ) { needCheckRepeat . add ( this . applicationContext . getType ( name ) ) ; this . buildConfig . loadModules . add ( ( Module ) this . applicationContext . getBean ( name ) ) ; } } if ( this . scanPackages != null && this . scanPackages . length > 0 ) { Predicate < Class < ? > > classPredicate = needCheckRepeat . isEmpty ( ) ? Matchers . anyClass ( ) : Matchers . anyClassExcludes ( needCheckRepeat ) ; AutoScanPackagesModule autoScanModule = new AutoScanPackagesModule ( this . scanPackages , classPredicate ) ; autoScanModule . setApplicationContext ( Objects . requireNonNull ( this . applicationContext ) ) ; this . buildConfig . loadModules . add ( autoScanModule ) ; } this . realAppContext = this . buildConfig . build ( parentObject , this . applicationContext ) . build ( this ) ; } +","public final void afterPropertiesSet ( ) throws Exception { Object parentObject = null ; if ( ResourcesUtils . getResourceAsStream ( ""/org/springframework/web/context/WebApplicationContext.class"" ) != null ) { if ( this . applicationContext instanceof WebApplicationContext ) { parentObject = ( ( WebApplicationContext ) this . applicationContext ) . getServletContext ( ) ; } } Set < Class < ? > > needCheckRepeat = new HashSet < > ( ) ; if ( this . loadModules != null ) { for ( String name : this . loadModules ) { needCheckRepeat . add ( this . applicationContext . getType ( name ) ) ; this . buildConfig . loadModules . add ( ( Module ) this . applicationContext . getBean ( name ) ) ; } } if ( this . scanPackages != null && this . scanPackages . length > 0 ) { Predicate < Class < ? > > classPredicate = needCheckRepeat . isEmpty ( ) ? Matchers . anyClass ( ) : Matchers . anyClassExcludes ( needCheckRepeat ) ; AutoScanPackagesModule autoScanModule = new AutoScanPackagesModule ( this . scanPackages , classPredicate ) ; autoScanModule . setApplicationContext ( Objects . requireNonNull ( this . applicationContext ) ) ; this . buildConfig . loadModules . add ( autoScanModule ) ; } this . realAppContext = this . buildConfig . build ( parentObject , this . applicationContext ) . build ( this ) ; logger . info ( ""hasor Spring factory inited."" ) ; } +" +118,"public double get ( ) { try ( BufferedReader fileReaderMemoryMax = new BufferedReader ( new FileReader ( maxMemoryFile ) ) ) { return Long . parseLong ( fileReaderMemoryMax . readLine ( ) ) ; } catch ( Exception e ) { return Double . NaN ; } } +","public double get ( ) { try ( BufferedReader fileReaderMemoryMax = new BufferedReader ( new FileReader ( maxMemoryFile ) ) ) { return Long . parseLong ( fileReaderMemoryMax . readLine ( ) ) ; } catch ( Exception e ) { logger . debug ( ""Failed to read "" + maxMemoryFile + "" file"" , e ) ; return Double . NaN ; } } +" +119,"protected RouteBuilder createRouteBuilder ( ) throws Exception { return new RouteBuilder ( ) { @ Override public void configure ( ) throws Exception { getContext ( ) . setTracing ( true ) ; from ( ""direct:c"" ) . transform ( constant ( ""Bye World"" ) ) . to ( ""mock:c"" ) ; } } ; } +","protected RouteBuilder createRouteBuilder ( ) throws Exception { return new RouteBuilder ( ) { @ Override public void configure ( ) throws Exception { getContext ( ) . setTracing ( true ) ; from ( ""direct:start"" ) . delay ( 10 ) . to ( ""mock:a"" ) . process ( new Processor ( ) { public void process ( Exchange exchange ) throws Exception { LOG . info ( ""This is the processor being invoked between mock:a and mock:b"" ) ; } } ) . to ( ""mock:b"" ) . toD ( ""direct:c"" ) . to ( ""mock:result"" ) . transform ( simple ( ""${body}${body}"" ) ) ; from ( ""direct:c"" ) . transform ( constant ( ""Bye World"" ) ) . to ( ""mock:c"" ) ; } } ; } +" +120,"protected RouteBuilder createRouteBuilder ( ) throws Exception { return new RouteBuilder ( ) { @ Override public void configure ( ) throws Exception { getContext ( ) . setTracing ( true ) ; from ( ""direct:start"" ) . delay ( 10 ) . to ( ""mock:a"" ) . process ( new Processor ( ) { public void process ( Exchange exchange ) throws Exception { } } ) . to ( ""mock:b"" ) . toD ( ""direct:c"" ) . to ( ""mock:result"" ) . transform ( simple ( ""${body}${body}"" ) ) ; from ( ""direct:c"" ) . transform ( constant ( ""Bye World"" ) ) . to ( ""mock:c"" ) ; } } ; } +","protected RouteBuilder createRouteBuilder ( ) throws Exception { return new RouteBuilder ( ) { @ Override public void configure ( ) throws Exception { getContext ( ) . setTracing ( true ) ; from ( ""direct:start"" ) . delay ( 10 ) . to ( ""mock:a"" ) . process ( new Processor ( ) { public void process ( Exchange exchange ) throws Exception { LOG . info ( ""This is the processor being invoked between mock:a and mock:b"" ) ; } } ) . to ( ""mock:b"" ) . toD ( ""direct:c"" ) . to ( ""mock:result"" ) . transform ( simple ( ""${body}${body}"" ) ) ; from ( ""direct:c"" ) . transform ( constant ( ""Bye World"" ) ) . to ( ""mock:c"" ) ; } } ; } +" +121,"public void logSettings ( Direction direction , ChannelHandlerContext ctx , Http2Settings settings ) { if ( isEnabled ( ) ) { } } +","public void logSettings ( Direction direction , ChannelHandlerContext ctx , Http2Settings settings ) { if ( isEnabled ( ) ) { logger . log ( level , ""{} {} SETTINGS: ack=false settings={}"" , ctx . channel ( ) , direction . name ( ) , settings ) ; } } +" +122,"public void run ( ) { try { try ( SmbResource f = createTestFile ( ) ) { try { } finally { try { f . delete ( ) ; } catch ( IOException e ) { System . err . println ( f . getLocator ( ) . getUNCPath ( ) ) ; throw e ; } } } this . completed = true ; } catch ( IOException | RuntimeException e ) { } } +","public void run ( ) { try { try ( SmbResource f = createTestFile ( ) ) { try { } finally { try { f . delete ( ) ; } catch ( IOException e ) { System . err . println ( f . getLocator ( ) . getUNCPath ( ) ) ; throw e ; } } } this . completed = true ; } catch ( IOException | RuntimeException e ) { log . error ( ""Test case failed"" , e ) ; } } +" +123,"public AdhocQueryResponse respondingGatewayCrossGatewayQuery ( AdhocQueryRequest request , AssertionType assertion ) { AdhocQueryResponse response = null ; try { if ( request != null ) { AdapterComponentDocRegistryProxyObjectFactory objFactory = new AdapterComponentDocRegistryProxyObjectFactory ( ) ; AdapterComponentDocRegistryProxy registryProxy = objFactory . getAdapterComponentDocRegistryProxy ( ) ; AdhocQueryRequest adhocQueryRequest = new AdhocQueryRequest ( ) ; adhocQueryRequest . setAdhocQuery ( request . getAdhocQuery ( ) ) ; adhocQueryRequest . setResponseOption ( request . getResponseOption ( ) ) ; adhocQueryRequest . setComment ( request . getComment ( ) ) ; adhocQueryRequest . setFederated ( request . isFederated ( ) ) ; adhocQueryRequest . setFederation ( request . getFederation ( ) ) ; adhocQueryRequest . setId ( request . getId ( ) ) ; adhocQueryRequest . setMaxResults ( request . getMaxResults ( ) ) ; adhocQueryRequest . setRequestSlotList ( request . getRequestSlotList ( ) ) ; adhocQueryRequest . setStartIndex ( request . getStartIndex ( ) ) ; response = registryProxy . registryStoredQuery ( request , assertion ) ; } else { throw new IllegalArgumentException ( ""Request must be provided."" ) ; } } catch ( Exception e ) { RegistryErrorList errorList = new RegistryErrorList ( ) ; response = new AdhocQueryResponse ( ) ; response . setRegistryObjectList ( new RegistryObjectListType ( ) ) ; response . setStatus ( DocumentConstants . XDS_QUERY_RESPONSE_STATUS_FAILURE ) ; RegistryError err = new RegistryError ( ) ; errorList . getRegistryError ( ) . add ( err ) ; response . setRegistryErrorList ( errorList ) ; err . setValue ( ERROR_VALUE ) ; err . setSeverity ( NhincConstants . XDS_REGISTRY_ERROR_SEVERITY_ERROR ) ; err . setCodeContext ( ERROR_CODE_CONTEXT ) ; err . setErrorCode ( DocumentConstants . XDS_ERRORCODE_REPOSITORY_ERROR ) ; throw new ErrorEventException ( e , response , ""Unable to query document registry."" ) ; } LOG . debug ( ""End AdapterDocQueryOrchImpl.respondingGatewayCrossGatewayQuery()"" ) ; return response ; } +","public AdhocQueryResponse respondingGatewayCrossGatewayQuery ( AdhocQueryRequest request , AssertionType assertion ) { LOG . debug ( ""Enter AdapterDocQueryOrchImpl.respondingGatewayCrossGatewayQuery()"" ) ; AdhocQueryResponse response = null ; try { if ( request != null ) { AdapterComponentDocRegistryProxyObjectFactory objFactory = new AdapterComponentDocRegistryProxyObjectFactory ( ) ; AdapterComponentDocRegistryProxy registryProxy = objFactory . getAdapterComponentDocRegistryProxy ( ) ; AdhocQueryRequest adhocQueryRequest = new AdhocQueryRequest ( ) ; adhocQueryRequest . setAdhocQuery ( request . getAdhocQuery ( ) ) ; adhocQueryRequest . setResponseOption ( request . getResponseOption ( ) ) ; adhocQueryRequest . setComment ( request . getComment ( ) ) ; adhocQueryRequest . setFederated ( request . isFederated ( ) ) ; adhocQueryRequest . setFederation ( request . getFederation ( ) ) ; adhocQueryRequest . setId ( request . getId ( ) ) ; adhocQueryRequest . setMaxResults ( request . getMaxResults ( ) ) ; adhocQueryRequest . setRequestSlotList ( request . getRequestSlotList ( ) ) ; adhocQueryRequest . setStartIndex ( request . getStartIndex ( ) ) ; response = registryProxy . registryStoredQuery ( request , assertion ) ; } else { throw new IllegalArgumentException ( ""Request must be provided."" ) ; } } catch ( Exception e ) { RegistryErrorList errorList = new RegistryErrorList ( ) ; response = new AdhocQueryResponse ( ) ; response . setRegistryObjectList ( new RegistryObjectListType ( ) ) ; response . setStatus ( DocumentConstants . XDS_QUERY_RESPONSE_STATUS_FAILURE ) ; RegistryError err = new RegistryError ( ) ; errorList . getRegistryError ( ) . add ( err ) ; response . setRegistryErrorList ( errorList ) ; err . setValue ( ERROR_VALUE ) ; err . setSeverity ( NhincConstants . XDS_REGISTRY_ERROR_SEVERITY_ERROR ) ; err . setCodeContext ( ERROR_CODE_CONTEXT ) ; err . setErrorCode ( DocumentConstants . XDS_ERRORCODE_REPOSITORY_ERROR ) ; throw new ErrorEventException ( e , response , ""Unable to query document registry."" ) ; } LOG . debug ( ""End AdapterDocQueryOrchImpl.respondingGatewayCrossGatewayQuery()"" ) ; return response ; } +" +124,"public AdhocQueryResponse respondingGatewayCrossGatewayQuery ( AdhocQueryRequest request , AssertionType assertion ) { LOG . debug ( ""Enter AdapterDocQueryOrchImpl.respondingGatewayCrossGatewayQuery()"" ) ; AdhocQueryResponse response = null ; try { if ( request != null ) { AdapterComponentDocRegistryProxyObjectFactory objFactory = new AdapterComponentDocRegistryProxyObjectFactory ( ) ; AdapterComponentDocRegistryProxy registryProxy = objFactory . getAdapterComponentDocRegistryProxy ( ) ; AdhocQueryRequest adhocQueryRequest = new AdhocQueryRequest ( ) ; adhocQueryRequest . setAdhocQuery ( request . getAdhocQuery ( ) ) ; adhocQueryRequest . setResponseOption ( request . getResponseOption ( ) ) ; adhocQueryRequest . setComment ( request . getComment ( ) ) ; adhocQueryRequest . setFederated ( request . isFederated ( ) ) ; adhocQueryRequest . setFederation ( request . getFederation ( ) ) ; adhocQueryRequest . setId ( request . getId ( ) ) ; adhocQueryRequest . setMaxResults ( request . getMaxResults ( ) ) ; adhocQueryRequest . setRequestSlotList ( request . getRequestSlotList ( ) ) ; adhocQueryRequest . setStartIndex ( request . getStartIndex ( ) ) ; response = registryProxy . registryStoredQuery ( request , assertion ) ; } else { throw new IllegalArgumentException ( ""Request must be provided."" ) ; } } catch ( Exception e ) { RegistryErrorList errorList = new RegistryErrorList ( ) ; response = new AdhocQueryResponse ( ) ; response . setRegistryObjectList ( new RegistryObjectListType ( ) ) ; response . setStatus ( DocumentConstants . XDS_QUERY_RESPONSE_STATUS_FAILURE ) ; RegistryError err = new RegistryError ( ) ; errorList . getRegistryError ( ) . add ( err ) ; response . setRegistryErrorList ( errorList ) ; err . setValue ( ERROR_VALUE ) ; err . setSeverity ( NhincConstants . XDS_REGISTRY_ERROR_SEVERITY_ERROR ) ; err . setCodeContext ( ERROR_CODE_CONTEXT ) ; err . setErrorCode ( DocumentConstants . XDS_ERRORCODE_REPOSITORY_ERROR ) ; throw new ErrorEventException ( e , response , ""Unable to query document registry."" ) ; } return response ; } +","public AdhocQueryResponse respondingGatewayCrossGatewayQuery ( AdhocQueryRequest request , AssertionType assertion ) { LOG . debug ( ""Enter AdapterDocQueryOrchImpl.respondingGatewayCrossGatewayQuery()"" ) ; AdhocQueryResponse response = null ; try { if ( request != null ) { AdapterComponentDocRegistryProxyObjectFactory objFactory = new AdapterComponentDocRegistryProxyObjectFactory ( ) ; AdapterComponentDocRegistryProxy registryProxy = objFactory . getAdapterComponentDocRegistryProxy ( ) ; AdhocQueryRequest adhocQueryRequest = new AdhocQueryRequest ( ) ; adhocQueryRequest . setAdhocQuery ( request . getAdhocQuery ( ) ) ; adhocQueryRequest . setResponseOption ( request . getResponseOption ( ) ) ; adhocQueryRequest . setComment ( request . getComment ( ) ) ; adhocQueryRequest . setFederated ( request . isFederated ( ) ) ; adhocQueryRequest . setFederation ( request . getFederation ( ) ) ; adhocQueryRequest . setId ( request . getId ( ) ) ; adhocQueryRequest . setMaxResults ( request . getMaxResults ( ) ) ; adhocQueryRequest . setRequestSlotList ( request . getRequestSlotList ( ) ) ; adhocQueryRequest . setStartIndex ( request . getStartIndex ( ) ) ; response = registryProxy . registryStoredQuery ( request , assertion ) ; } else { throw new IllegalArgumentException ( ""Request must be provided."" ) ; } } catch ( Exception e ) { RegistryErrorList errorList = new RegistryErrorList ( ) ; response = new AdhocQueryResponse ( ) ; response . setRegistryObjectList ( new RegistryObjectListType ( ) ) ; response . setStatus ( DocumentConstants . XDS_QUERY_RESPONSE_STATUS_FAILURE ) ; RegistryError err = new RegistryError ( ) ; errorList . getRegistryError ( ) . add ( err ) ; response . setRegistryErrorList ( errorList ) ; err . setValue ( ERROR_VALUE ) ; err . setSeverity ( NhincConstants . XDS_REGISTRY_ERROR_SEVERITY_ERROR ) ; err . setCodeContext ( ERROR_CODE_CONTEXT ) ; err . setErrorCode ( DocumentConstants . XDS_ERRORCODE_REPOSITORY_ERROR ) ; throw new ErrorEventException ( e , response , ""Unable to query document registry."" ) ; } LOG . debug ( ""End AdapterDocQueryOrchImpl.respondingGatewayCrossGatewayQuery()"" ) ; return response ; } +" +125,"public MigrateState updateSecondaryIndex ( CommandParamInfo paramInfo ) { checkRequiredParameters ( paramInfo ) ; long startAllTables = System . currentTimeMillis ( ) ; String currentTable = null ; TransactionalDataSource sourceDataSource = databaseManager . getDataSource ( ) ; ShardRecovery recovery = shardRecoveryDaoJdbc . getLatestShardRecovery ( sourceDataSource ) ; if ( recovery != null && recovery . getState ( ) != null && recovery . getState ( ) . getValue ( ) > SECONDARY_INDEX_FINISHED . getValue ( ) ) { return state = SECONDARY_INDEX_FINISHED ; } else { recovery = new ShardRecovery ( SECONDARY_INDEX_STARTED ) ; } durableTaskUpdateByState ( state , 13.0 , ""Secondary indexes creation..."" ) ; try ( Connection sourceConnect = beginOrOpenConnection ( sourceDataSource ) ) { for ( TableInfo tableInfo : paramInfo . getTableInfoList ( ) ) { long start = System . currentTimeMillis ( ) ; currentTable = tableInfo . getName ( ) ; BatchedPaginationOperation paginationOperationHelper = helperFactory . createSelectInsertHelper ( currentTable ) ; processOneTableByHelper ( paramInfo , sourceConnect , currentTable , start , paginationOperationHelper ) ; recovery = updateShardRecoveryProcessedTableList ( sourceConnect , currentTable , SECONDARY_INDEX_STARTED ) ; incrementDurableTaskUpdateByPercent ( 1.8 ) ; } state = SECONDARY_INDEX_FINISHED ; updateToFinalStepState ( sourceConnect , recovery , state ) ; sourceConnect . commit ( ) ; durableTaskUpdateByState ( state , 17.0 , ""Secondary indexes created..."" ) ; } catch ( Exception e ) { log . error ( ""Error UPDATE S/Index processing table = '"" + currentTable + ""'"" , e ) ; sourceDataSource . rollback ( false ) ; state = MigrateState . FAILED ; durableTaskUpdateByState ( state , null , null ) ; return state ; } finally { if ( sourceDataSource != null ) { sourceDataSource . commit ( ) ; } } log . debug ( ""UPDATE Processed table(s)=[{}] in {} sec"" , paramInfo . getTableInfoList ( ) . size ( ) , ( System . currentTimeMillis ( ) - startAllTables ) / 1000 ) ; return state ; } +","public MigrateState updateSecondaryIndex ( CommandParamInfo paramInfo ) { checkRequiredParameters ( paramInfo ) ; long startAllTables = System . currentTimeMillis ( ) ; log . debug ( ""Starting SECONDARY INDEX data update from [{}] tables..."" , paramInfo . getTableInfoList ( ) . size ( ) ) ; String currentTable = null ; TransactionalDataSource sourceDataSource = databaseManager . getDataSource ( ) ; ShardRecovery recovery = shardRecoveryDaoJdbc . getLatestShardRecovery ( sourceDataSource ) ; if ( recovery != null && recovery . getState ( ) != null && recovery . getState ( ) . getValue ( ) > SECONDARY_INDEX_FINISHED . getValue ( ) ) { return state = SECONDARY_INDEX_FINISHED ; } else { recovery = new ShardRecovery ( SECONDARY_INDEX_STARTED ) ; } durableTaskUpdateByState ( state , 13.0 , ""Secondary indexes creation..."" ) ; try ( Connection sourceConnect = beginOrOpenConnection ( sourceDataSource ) ) { for ( TableInfo tableInfo : paramInfo . getTableInfoList ( ) ) { long start = System . currentTimeMillis ( ) ; currentTable = tableInfo . getName ( ) ; BatchedPaginationOperation paginationOperationHelper = helperFactory . createSelectInsertHelper ( currentTable ) ; processOneTableByHelper ( paramInfo , sourceConnect , currentTable , start , paginationOperationHelper ) ; recovery = updateShardRecoveryProcessedTableList ( sourceConnect , currentTable , SECONDARY_INDEX_STARTED ) ; incrementDurableTaskUpdateByPercent ( 1.8 ) ; } state = SECONDARY_INDEX_FINISHED ; updateToFinalStepState ( sourceConnect , recovery , state ) ; sourceConnect . commit ( ) ; durableTaskUpdateByState ( state , 17.0 , ""Secondary indexes created..."" ) ; } catch ( Exception e ) { log . error ( ""Error UPDATE S/Index processing table = '"" + currentTable + ""'"" , e ) ; sourceDataSource . rollback ( false ) ; state = MigrateState . FAILED ; durableTaskUpdateByState ( state , null , null ) ; return state ; } finally { if ( sourceDataSource != null ) { sourceDataSource . commit ( ) ; } } log . debug ( ""UPDATE Processed table(s)=[{}] in {} sec"" , paramInfo . getTableInfoList ( ) . size ( ) , ( System . currentTimeMillis ( ) - startAllTables ) / 1000 ) ; return state ; } +" +126,"public MigrateState updateSecondaryIndex ( CommandParamInfo paramInfo ) { checkRequiredParameters ( paramInfo ) ; long startAllTables = System . currentTimeMillis ( ) ; log . debug ( ""Starting SECONDARY INDEX data update from [{}] tables..."" , paramInfo . getTableInfoList ( ) . size ( ) ) ; String currentTable = null ; TransactionalDataSource sourceDataSource = databaseManager . getDataSource ( ) ; ShardRecovery recovery = shardRecoveryDaoJdbc . getLatestShardRecovery ( sourceDataSource ) ; if ( recovery != null && recovery . getState ( ) != null && recovery . getState ( ) . getValue ( ) > SECONDARY_INDEX_FINISHED . getValue ( ) ) { return state = SECONDARY_INDEX_FINISHED ; } else { recovery = new ShardRecovery ( SECONDARY_INDEX_STARTED ) ; } durableTaskUpdateByState ( state , 13.0 , ""Secondary indexes creation..."" ) ; try ( Connection sourceConnect = beginOrOpenConnection ( sourceDataSource ) ) { for ( TableInfo tableInfo : paramInfo . getTableInfoList ( ) ) { long start = System . currentTimeMillis ( ) ; currentTable = tableInfo . getName ( ) ; BatchedPaginationOperation paginationOperationHelper = helperFactory . createSelectInsertHelper ( currentTable ) ; processOneTableByHelper ( paramInfo , sourceConnect , currentTable , start , paginationOperationHelper ) ; recovery = updateShardRecoveryProcessedTableList ( sourceConnect , currentTable , SECONDARY_INDEX_STARTED ) ; incrementDurableTaskUpdateByPercent ( 1.8 ) ; } state = SECONDARY_INDEX_FINISHED ; updateToFinalStepState ( sourceConnect , recovery , state ) ; sourceConnect . commit ( ) ; durableTaskUpdateByState ( state , 17.0 , ""Secondary indexes created..."" ) ; } catch ( Exception e ) { sourceDataSource . rollback ( false ) ; state = MigrateState . FAILED ; durableTaskUpdateByState ( state , null , null ) ; return state ; } finally { if ( sourceDataSource != null ) { sourceDataSource . commit ( ) ; } } log . debug ( ""UPDATE Processed table(s)=[{}] in {} sec"" , paramInfo . getTableInfoList ( ) . size ( ) , ( System . currentTimeMillis ( ) - startAllTables ) / 1000 ) ; return state ; } +","public MigrateState updateSecondaryIndex ( CommandParamInfo paramInfo ) { checkRequiredParameters ( paramInfo ) ; long startAllTables = System . currentTimeMillis ( ) ; log . debug ( ""Starting SECONDARY INDEX data update from [{}] tables..."" , paramInfo . getTableInfoList ( ) . size ( ) ) ; String currentTable = null ; TransactionalDataSource sourceDataSource = databaseManager . getDataSource ( ) ; ShardRecovery recovery = shardRecoveryDaoJdbc . getLatestShardRecovery ( sourceDataSource ) ; if ( recovery != null && recovery . getState ( ) != null && recovery . getState ( ) . getValue ( ) > SECONDARY_INDEX_FINISHED . getValue ( ) ) { return state = SECONDARY_INDEX_FINISHED ; } else { recovery = new ShardRecovery ( SECONDARY_INDEX_STARTED ) ; } durableTaskUpdateByState ( state , 13.0 , ""Secondary indexes creation..."" ) ; try ( Connection sourceConnect = beginOrOpenConnection ( sourceDataSource ) ) { for ( TableInfo tableInfo : paramInfo . getTableInfoList ( ) ) { long start = System . currentTimeMillis ( ) ; currentTable = tableInfo . getName ( ) ; BatchedPaginationOperation paginationOperationHelper = helperFactory . createSelectInsertHelper ( currentTable ) ; processOneTableByHelper ( paramInfo , sourceConnect , currentTable , start , paginationOperationHelper ) ; recovery = updateShardRecoveryProcessedTableList ( sourceConnect , currentTable , SECONDARY_INDEX_STARTED ) ; incrementDurableTaskUpdateByPercent ( 1.8 ) ; } state = SECONDARY_INDEX_FINISHED ; updateToFinalStepState ( sourceConnect , recovery , state ) ; sourceConnect . commit ( ) ; durableTaskUpdateByState ( state , 17.0 , ""Secondary indexes created..."" ) ; } catch ( Exception e ) { log . error ( ""Error UPDATE S/Index processing table = '"" + currentTable + ""'"" , e ) ; sourceDataSource . rollback ( false ) ; state = MigrateState . FAILED ; durableTaskUpdateByState ( state , null , null ) ; return state ; } finally { if ( sourceDataSource != null ) { sourceDataSource . commit ( ) ; } } log . debug ( ""UPDATE Processed table(s)=[{}] in {} sec"" , paramInfo . getTableInfoList ( ) . size ( ) , ( System . currentTimeMillis ( ) - startAllTables ) / 1000 ) ; return state ; } +" +127,"public MigrateState updateSecondaryIndex ( CommandParamInfo paramInfo ) { checkRequiredParameters ( paramInfo ) ; long startAllTables = System . currentTimeMillis ( ) ; log . debug ( ""Starting SECONDARY INDEX data update from [{}] tables..."" , paramInfo . getTableInfoList ( ) . size ( ) ) ; String currentTable = null ; TransactionalDataSource sourceDataSource = databaseManager . getDataSource ( ) ; ShardRecovery recovery = shardRecoveryDaoJdbc . getLatestShardRecovery ( sourceDataSource ) ; if ( recovery != null && recovery . getState ( ) != null && recovery . getState ( ) . getValue ( ) > SECONDARY_INDEX_FINISHED . getValue ( ) ) { return state = SECONDARY_INDEX_FINISHED ; } else { recovery = new ShardRecovery ( SECONDARY_INDEX_STARTED ) ; } durableTaskUpdateByState ( state , 13.0 , ""Secondary indexes creation..."" ) ; try ( Connection sourceConnect = beginOrOpenConnection ( sourceDataSource ) ) { for ( TableInfo tableInfo : paramInfo . getTableInfoList ( ) ) { long start = System . currentTimeMillis ( ) ; currentTable = tableInfo . getName ( ) ; BatchedPaginationOperation paginationOperationHelper = helperFactory . createSelectInsertHelper ( currentTable ) ; processOneTableByHelper ( paramInfo , sourceConnect , currentTable , start , paginationOperationHelper ) ; recovery = updateShardRecoveryProcessedTableList ( sourceConnect , currentTable , SECONDARY_INDEX_STARTED ) ; incrementDurableTaskUpdateByPercent ( 1.8 ) ; } state = SECONDARY_INDEX_FINISHED ; updateToFinalStepState ( sourceConnect , recovery , state ) ; sourceConnect . commit ( ) ; durableTaskUpdateByState ( state , 17.0 , ""Secondary indexes created..."" ) ; } catch ( Exception e ) { log . error ( ""Error UPDATE S/Index processing table = '"" + currentTable + ""'"" , e ) ; sourceDataSource . rollback ( false ) ; state = MigrateState . FAILED ; durableTaskUpdateByState ( state , null , null ) ; return state ; } finally { if ( sourceDataSource != null ) { sourceDataSource . commit ( ) ; } } return state ; } +","public MigrateState updateSecondaryIndex ( CommandParamInfo paramInfo ) { checkRequiredParameters ( paramInfo ) ; long startAllTables = System . currentTimeMillis ( ) ; log . debug ( ""Starting SECONDARY INDEX data update from [{}] tables..."" , paramInfo . getTableInfoList ( ) . size ( ) ) ; String currentTable = null ; TransactionalDataSource sourceDataSource = databaseManager . getDataSource ( ) ; ShardRecovery recovery = shardRecoveryDaoJdbc . getLatestShardRecovery ( sourceDataSource ) ; if ( recovery != null && recovery . getState ( ) != null && recovery . getState ( ) . getValue ( ) > SECONDARY_INDEX_FINISHED . getValue ( ) ) { return state = SECONDARY_INDEX_FINISHED ; } else { recovery = new ShardRecovery ( SECONDARY_INDEX_STARTED ) ; } durableTaskUpdateByState ( state , 13.0 , ""Secondary indexes creation..."" ) ; try ( Connection sourceConnect = beginOrOpenConnection ( sourceDataSource ) ) { for ( TableInfo tableInfo : paramInfo . getTableInfoList ( ) ) { long start = System . currentTimeMillis ( ) ; currentTable = tableInfo . getName ( ) ; BatchedPaginationOperation paginationOperationHelper = helperFactory . createSelectInsertHelper ( currentTable ) ; processOneTableByHelper ( paramInfo , sourceConnect , currentTable , start , paginationOperationHelper ) ; recovery = updateShardRecoveryProcessedTableList ( sourceConnect , currentTable , SECONDARY_INDEX_STARTED ) ; incrementDurableTaskUpdateByPercent ( 1.8 ) ; } state = SECONDARY_INDEX_FINISHED ; updateToFinalStepState ( sourceConnect , recovery , state ) ; sourceConnect . commit ( ) ; durableTaskUpdateByState ( state , 17.0 , ""Secondary indexes created..."" ) ; } catch ( Exception e ) { log . error ( ""Error UPDATE S/Index processing table = '"" + currentTable + ""'"" , e ) ; sourceDataSource . rollback ( false ) ; state = MigrateState . FAILED ; durableTaskUpdateByState ( state , null , null ) ; return state ; } finally { if ( sourceDataSource != null ) { sourceDataSource . commit ( ) ; } } log . debug ( ""UPDATE Processed table(s)=[{}] in {} sec"" , paramInfo . getTableInfoList ( ) . size ( ) , ( System . currentTimeMillis ( ) - startAllTables ) / 1000 ) ; return state ; } +" +128,"public static int getGroupThreadsCount ( long groupId , long userId , int status , boolean subscribed , boolean includeAnonymous ) throws RemoteException { try { int returnValue = MBThreadServiceUtil . getGroupThreadsCount ( groupId , userId , status , subscribed , includeAnonymous ) ; return returnValue ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } } +","public static int getGroupThreadsCount ( long groupId , long userId , int status , boolean subscribed , boolean includeAnonymous ) throws RemoteException { try { int returnValue = MBThreadServiceUtil . getGroupThreadsCount ( groupId , userId , status , subscribed , includeAnonymous ) ; return returnValue ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } } +" +129,"public List findByExample ( SysNotizen instance ) { try { List results = sessionFactory . getCurrentSession ( ) . createCriteria ( ""sernet.gs.reveng.SysNotizen"" ) . add ( Example . create ( instance ) ) . list ( ) ; log . debug ( ""find by example successful, result size: "" + results . size ( ) ) ; return results ; } catch ( RuntimeException re ) { log . error ( ""find by example failed"" , re ) ; throw re ; } } +","public List findByExample ( SysNotizen instance ) { log . debug ( ""finding SysNotizen instance by example"" ) ; try { List results = sessionFactory . getCurrentSession ( ) . createCriteria ( ""sernet.gs.reveng.SysNotizen"" ) . add ( Example . create ( instance ) ) . list ( ) ; log . debug ( ""find by example successful, result size: "" + results . size ( ) ) ; return results ; } catch ( RuntimeException re ) { log . error ( ""find by example failed"" , re ) ; throw re ; } } +" +130,"public List findByExample ( SysNotizen instance ) { log . debug ( ""finding SysNotizen instance by example"" ) ; try { List results = sessionFactory . getCurrentSession ( ) . createCriteria ( ""sernet.gs.reveng.SysNotizen"" ) . add ( Example . create ( instance ) ) . list ( ) ; return results ; } catch ( RuntimeException re ) { log . error ( ""find by example failed"" , re ) ; throw re ; } } +","public List findByExample ( SysNotizen instance ) { log . debug ( ""finding SysNotizen instance by example"" ) ; try { List results = sessionFactory . getCurrentSession ( ) . createCriteria ( ""sernet.gs.reveng.SysNotizen"" ) . add ( Example . create ( instance ) ) . list ( ) ; log . debug ( ""find by example successful, result size: "" + results . size ( ) ) ; return results ; } catch ( RuntimeException re ) { log . error ( ""find by example failed"" , re ) ; throw re ; } } +" +131,"public List findByExample ( SysNotizen instance ) { log . debug ( ""finding SysNotizen instance by example"" ) ; try { List results = sessionFactory . getCurrentSession ( ) . createCriteria ( ""sernet.gs.reveng.SysNotizen"" ) . add ( Example . create ( instance ) ) . list ( ) ; log . debug ( ""find by example successful, result size: "" + results . size ( ) ) ; return results ; } catch ( RuntimeException re ) { throw re ; } } +","public List findByExample ( SysNotizen instance ) { log . debug ( ""finding SysNotizen instance by example"" ) ; try { List results = sessionFactory . getCurrentSession ( ) . createCriteria ( ""sernet.gs.reveng.SysNotizen"" ) . add ( Example . create ( instance ) ) . list ( ) ; log . debug ( ""find by example successful, result size: "" + results . size ( ) ) ; return results ; } catch ( RuntimeException re ) { log . error ( ""find by example failed"" , re ) ; throw re ; } } +" +132,"public int abortMultipartUploads ( S3FileTransferRequestParamsDto params , Date thresholdDate ) { AmazonS3Client s3Client = getAmazonS3 ( params ) ; int abortedMultipartUploadsCount = 0 ; try { String uploadIdMarker = null ; String keyMarker = null ; boolean truncated ; do { ListMultipartUploadsRequest request = new ListMultipartUploadsRequest ( params . getS3BucketName ( ) ) ; request . setUploadIdMarker ( uploadIdMarker ) ; request . setKeyMarker ( keyMarker ) ; MultipartUploadListing uploadListing = s3Operations . listMultipartUploads ( TransferManager . appendSingleObjectUserAgent ( request ) , s3Client ) ; for ( MultipartUpload upload : uploadListing . getMultipartUploads ( ) ) { if ( upload . getInitiated ( ) . compareTo ( thresholdDate ) < 0 ) { s3Operations . abortMultipartUpload ( TransferManager . appendSingleObjectUserAgent ( new AbortMultipartUploadRequest ( params . getS3BucketName ( ) , upload . getKey ( ) , upload . getUploadId ( ) ) ) , s3Client ) ; abortedMultipartUploadsCount ++ ; } } truncated = uploadListing . isTruncated ( ) ; if ( truncated ) { uploadIdMarker = uploadListing . getNextUploadIdMarker ( ) ; keyMarker = uploadListing . getNextKeyMarker ( ) ; } } while ( truncated ) ; } finally { s3Client . shutdown ( ) ; } return abortedMultipartUploadsCount ; } +","public int abortMultipartUploads ( S3FileTransferRequestParamsDto params , Date thresholdDate ) { AmazonS3Client s3Client = getAmazonS3 ( params ) ; int abortedMultipartUploadsCount = 0 ; try { String uploadIdMarker = null ; String keyMarker = null ; boolean truncated ; do { ListMultipartUploadsRequest request = new ListMultipartUploadsRequest ( params . getS3BucketName ( ) ) ; request . setUploadIdMarker ( uploadIdMarker ) ; request . setKeyMarker ( keyMarker ) ; MultipartUploadListing uploadListing = s3Operations . listMultipartUploads ( TransferManager . appendSingleObjectUserAgent ( request ) , s3Client ) ; for ( MultipartUpload upload : uploadListing . getMultipartUploads ( ) ) { if ( upload . getInitiated ( ) . compareTo ( thresholdDate ) < 0 ) { s3Operations . abortMultipartUpload ( TransferManager . appendSingleObjectUserAgent ( new AbortMultipartUploadRequest ( params . getS3BucketName ( ) , upload . getKey ( ) , upload . getUploadId ( ) ) ) , s3Client ) ; LOGGER . info ( ""Aborted S3 multipart upload. s3Key=\""{}\"" s3BucketName=\""{}\"" s3MultipartUploadInitiatedDate=\""{}\"""" , upload . getKey ( ) , params . getS3BucketName ( ) , upload . getInitiated ( ) ) ; abortedMultipartUploadsCount ++ ; } } truncated = uploadListing . isTruncated ( ) ; if ( truncated ) { uploadIdMarker = uploadListing . getNextUploadIdMarker ( ) ; keyMarker = uploadListing . getNextKeyMarker ( ) ; } } while ( truncated ) ; } finally { s3Client . shutdown ( ) ; } return abortedMultipartUploadsCount ; } +" +133,"public static long [ ] getCPDefinitionIds ( long commercePricingClassId ) throws RemoteException { try { long [ ] returnValue = CommercePricingClassCPDefinitionRelServiceUtil . getCPDefinitionIds ( commercePricingClassId ) ; return returnValue ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } } +","public static long [ ] getCPDefinitionIds ( long commercePricingClassId ) throws RemoteException { try { long [ ] returnValue = CommercePricingClassCPDefinitionRelServiceUtil . getCPDefinitionIds ( commercePricingClassId ) ; return returnValue ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } } +" +134,"public void sessionIdle ( NextFilter nextFilter , IoSession session , IdleStatus status ) throws Exception { if ( status == IdleStatus . BOTH_IDLE && SESSION_IDLE_TIMEOUT_KEY . get ( session ) != null ) { if ( logger . isDebugEnabled ( ) ) { } session . close ( false ) ; } super . sessionIdle ( nextFilter , session , status ) ; } +","public void sessionIdle ( NextFilter nextFilter , IoSession session , IdleStatus status ) throws Exception { if ( status == IdleStatus . BOTH_IDLE && SESSION_IDLE_TIMEOUT_KEY . get ( session ) != null ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( String . format ( ""Closing HTTP parent session %s because http.keepalive.timeout of %d secs is exceeded"" , session , SESSION_IDLE_TIMEOUT_KEY . get ( session ) ) ) ; } session . close ( false ) ; } super . sessionIdle ( nextFilter , session , status ) ; } +" +135,"private InternalFuture < Void > createRoleBindingsForExperimentRun ( final ExperimentRun experimentRun ) { ModelDBResourceEnum . ModelDBServiceResourceTypes modelDBServiceResourceType = ModelDBResourceEnum . ModelDBServiceResourceTypes . EXPERIMENT_RUN ; String roleName = ModelDBConstants . ROLE_EXPERIMENT_RUN_OWNER ; return FutureGrpc . ClientRequest ( uac . getRoleService ( ) . setRoleBinding ( SetRoleBinding . newBuilder ( ) . setRoleBinding ( RoleBinding . newBuilder ( ) . setName ( buildRoleBindingName ( roleName , experimentRun . getId ( ) , experimentRun . getOwner ( ) , modelDBServiceResourceType . name ( ) ) ) . setScope ( RoleScope . newBuilder ( ) . build ( ) ) . setRoleName ( roleName ) . addEntities ( Entities . newBuilder ( ) . addUserIds ( experimentRun . getOwner ( ) ) . build ( ) ) . addResources ( Resources . newBuilder ( ) . setService ( ServiceEnum . Service . MODELDB_SERVICE ) . setResourceType ( ResourceType . newBuilder ( ) . setModeldbServiceResourceType ( modelDBServiceResourceType ) ) . addResourceIds ( experimentRun . getId ( ) ) . build ( ) ) . build ( ) ) . build ( ) ) , executor ) . thenAccept ( response -> { } , executor ) ; } +","private InternalFuture < Void > createRoleBindingsForExperimentRun ( final ExperimentRun experimentRun ) { ModelDBResourceEnum . ModelDBServiceResourceTypes modelDBServiceResourceType = ModelDBResourceEnum . ModelDBServiceResourceTypes . EXPERIMENT_RUN ; String roleName = ModelDBConstants . ROLE_EXPERIMENT_RUN_OWNER ; return FutureGrpc . ClientRequest ( uac . getRoleService ( ) . setRoleBinding ( SetRoleBinding . newBuilder ( ) . setRoleBinding ( RoleBinding . newBuilder ( ) . setName ( buildRoleBindingName ( roleName , experimentRun . getId ( ) , experimentRun . getOwner ( ) , modelDBServiceResourceType . name ( ) ) ) . setScope ( RoleScope . newBuilder ( ) . build ( ) ) . setRoleName ( roleName ) . addEntities ( Entities . newBuilder ( ) . addUserIds ( experimentRun . getOwner ( ) ) . build ( ) ) . addResources ( Resources . newBuilder ( ) . setService ( ServiceEnum . Service . MODELDB_SERVICE ) . setResourceType ( ResourceType . newBuilder ( ) . setModeldbServiceResourceType ( modelDBServiceResourceType ) ) . addResourceIds ( experimentRun . getId ( ) ) . build ( ) ) . build ( ) ) . build ( ) ) , executor ) . thenAccept ( response -> { LOGGER . trace ( CommonMessages . ROLE_SERVICE_RES_RECEIVED_TRACE_MSG , response ) ; } , executor ) ; } +" +136,"protected void calculateActions ( Game game ) { currentScore = GameStateEvaluator . evaluate ( playerId , game ) ; if ( ! getNextAction ( game ) ) { Game sim = createSimulation ( game ) ; SimulationNode . resetCount ( ) ; root = new SimulationNode ( null , sim , playerId ) ; if ( ! isTestMode ) addActionsTimed ( ) ; else addActions ( root , Integer . MIN_VALUE , Integer . MAX_VALUE ) ; logger . info ( name + "" simulated "" + nodeCount + "" nodes in "" + thinkTime / 1000000000.0 + ""s - average "" + nodeCount / ( thinkTime / 1000000000.0 ) + "" nodes/s"" ) ; if ( ! root . children . isEmpty ( ) ) { root = root . children . get ( 0 ) ; actions = new LinkedList < > ( root . abilities ) ; combat = root . combat ; if ( logger . isDebugEnabled ( ) ) logger . debug ( ""adding actions:"" + actions ) ; } else logger . debug ( ""no actions added"" ) ; } } +","protected void calculateActions ( Game game ) { currentScore = GameStateEvaluator . evaluate ( playerId , game ) ; if ( ! getNextAction ( game ) ) { Game sim = createSimulation ( game ) ; SimulationNode . resetCount ( ) ; root = new SimulationNode ( null , sim , playerId ) ; logger . debug ( ""simulating actions"" ) ; if ( ! isTestMode ) addActionsTimed ( ) ; else addActions ( root , Integer . MIN_VALUE , Integer . MAX_VALUE ) ; logger . info ( name + "" simulated "" + nodeCount + "" nodes in "" + thinkTime / 1000000000.0 + ""s - average "" + nodeCount / ( thinkTime / 1000000000.0 ) + "" nodes/s"" ) ; if ( ! root . children . isEmpty ( ) ) { root = root . children . get ( 0 ) ; actions = new LinkedList < > ( root . abilities ) ; combat = root . combat ; if ( logger . isDebugEnabled ( ) ) logger . debug ( ""adding actions:"" + actions ) ; } else logger . debug ( ""no actions added"" ) ; } } +" +137,"protected void calculateActions ( Game game ) { currentScore = GameStateEvaluator . evaluate ( playerId , game ) ; if ( ! getNextAction ( game ) ) { Game sim = createSimulation ( game ) ; SimulationNode . resetCount ( ) ; root = new SimulationNode ( null , sim , playerId ) ; logger . debug ( ""simulating actions"" ) ; if ( ! isTestMode ) addActionsTimed ( ) ; else addActions ( root , Integer . MIN_VALUE , Integer . MAX_VALUE ) ; if ( ! root . children . isEmpty ( ) ) { root = root . children . get ( 0 ) ; actions = new LinkedList < > ( root . abilities ) ; combat = root . combat ; if ( logger . isDebugEnabled ( ) ) logger . debug ( ""adding actions:"" + actions ) ; } else logger . debug ( ""no actions added"" ) ; } } +","protected void calculateActions ( Game game ) { currentScore = GameStateEvaluator . evaluate ( playerId , game ) ; if ( ! getNextAction ( game ) ) { Game sim = createSimulation ( game ) ; SimulationNode . resetCount ( ) ; root = new SimulationNode ( null , sim , playerId ) ; logger . debug ( ""simulating actions"" ) ; if ( ! isTestMode ) addActionsTimed ( ) ; else addActions ( root , Integer . MIN_VALUE , Integer . MAX_VALUE ) ; logger . info ( name + "" simulated "" + nodeCount + "" nodes in "" + thinkTime / 1000000000.0 + ""s - average "" + nodeCount / ( thinkTime / 1000000000.0 ) + "" nodes/s"" ) ; if ( ! root . children . isEmpty ( ) ) { root = root . children . get ( 0 ) ; actions = new LinkedList < > ( root . abilities ) ; combat = root . combat ; if ( logger . isDebugEnabled ( ) ) logger . debug ( ""adding actions:"" + actions ) ; } else logger . debug ( ""no actions added"" ) ; } } +" +138,"protected void calculateActions ( Game game ) { currentScore = GameStateEvaluator . evaluate ( playerId , game ) ; if ( ! getNextAction ( game ) ) { Game sim = createSimulation ( game ) ; SimulationNode . resetCount ( ) ; root = new SimulationNode ( null , sim , playerId ) ; logger . debug ( ""simulating actions"" ) ; if ( ! isTestMode ) addActionsTimed ( ) ; else addActions ( root , Integer . MIN_VALUE , Integer . MAX_VALUE ) ; logger . info ( name + "" simulated "" + nodeCount + "" nodes in "" + thinkTime / 1000000000.0 + ""s - average "" + nodeCount / ( thinkTime / 1000000000.0 ) + "" nodes/s"" ) ; if ( ! root . children . isEmpty ( ) ) { root = root . children . get ( 0 ) ; actions = new LinkedList < > ( root . abilities ) ; combat = root . combat ; if ( logger . isDebugEnabled ( ) ) } else logger . debug ( ""no actions added"" ) ; } } +","protected void calculateActions ( Game game ) { currentScore = GameStateEvaluator . evaluate ( playerId , game ) ; if ( ! getNextAction ( game ) ) { Game sim = createSimulation ( game ) ; SimulationNode . resetCount ( ) ; root = new SimulationNode ( null , sim , playerId ) ; logger . debug ( ""simulating actions"" ) ; if ( ! isTestMode ) addActionsTimed ( ) ; else addActions ( root , Integer . MIN_VALUE , Integer . MAX_VALUE ) ; logger . info ( name + "" simulated "" + nodeCount + "" nodes in "" + thinkTime / 1000000000.0 + ""s - average "" + nodeCount / ( thinkTime / 1000000000.0 ) + "" nodes/s"" ) ; if ( ! root . children . isEmpty ( ) ) { root = root . children . get ( 0 ) ; actions = new LinkedList < > ( root . abilities ) ; combat = root . combat ; if ( logger . isDebugEnabled ( ) ) logger . debug ( ""adding actions:"" + actions ) ; } else logger . debug ( ""no actions added"" ) ; } } +" +139,"protected void calculateActions ( Game game ) { currentScore = GameStateEvaluator . evaluate ( playerId , game ) ; if ( ! getNextAction ( game ) ) { Game sim = createSimulation ( game ) ; SimulationNode . resetCount ( ) ; root = new SimulationNode ( null , sim , playerId ) ; logger . debug ( ""simulating actions"" ) ; if ( ! isTestMode ) addActionsTimed ( ) ; else addActions ( root , Integer . MIN_VALUE , Integer . MAX_VALUE ) ; logger . info ( name + "" simulated "" + nodeCount + "" nodes in "" + thinkTime / 1000000000.0 + ""s - average "" + nodeCount / ( thinkTime / 1000000000.0 ) + "" nodes/s"" ) ; if ( ! root . children . isEmpty ( ) ) { root = root . children . get ( 0 ) ; actions = new LinkedList < > ( root . abilities ) ; combat = root . combat ; if ( logger . isDebugEnabled ( ) ) logger . debug ( ""adding actions:"" + actions ) ; } else } } +","protected void calculateActions ( Game game ) { currentScore = GameStateEvaluator . evaluate ( playerId , game ) ; if ( ! getNextAction ( game ) ) { Game sim = createSimulation ( game ) ; SimulationNode . resetCount ( ) ; root = new SimulationNode ( null , sim , playerId ) ; logger . debug ( ""simulating actions"" ) ; if ( ! isTestMode ) addActionsTimed ( ) ; else addActions ( root , Integer . MIN_VALUE , Integer . MAX_VALUE ) ; logger . info ( name + "" simulated "" + nodeCount + "" nodes in "" + thinkTime / 1000000000.0 + ""s - average "" + nodeCount / ( thinkTime / 1000000000.0 ) + "" nodes/s"" ) ; if ( ! root . children . isEmpty ( ) ) { root = root . children . get ( 0 ) ; actions = new LinkedList < > ( root . abilities ) ; combat = root . combat ; if ( logger . isDebugEnabled ( ) ) logger . debug ( ""adding actions:"" + actions ) ; } else logger . debug ( ""no actions added"" ) ; } } +" +140,"public static Error from ( final GafferRuntimeException gex ) { return new ErrorBuilder ( ) . status ( gex . getStatus ( ) ) . simpleMessage ( gex . getMessage ( ) ) . detailMessage ( ExceptionUtils . getStackTrace ( gex ) ) . build ( ) ; } +","public static Error from ( final GafferRuntimeException gex ) { LOGGER . error ( ""Error: {}"" , gex . getMessage ( ) , gex ) ; return new ErrorBuilder ( ) . status ( gex . getStatus ( ) ) . simpleMessage ( gex . getMessage ( ) ) . detailMessage ( ExceptionUtils . getStackTrace ( gex ) ) . build ( ) ; } +" +141,"protected void updatePreferences ( ActionRequest actionRequest , PortletPreferences preferences ) throws Exception { String domainName = ParamUtil . getString ( actionRequest , ""domainName"" ) ; String rules = ParamUtil . getString ( actionRequest , ""rules"" ) ; long [ ] classNameIds = StringUtil . split ( ParamUtil . getString ( actionRequest , ""classNameIds"" ) , 0L ) ; if ( Validator . isNull ( domainName ) ) { SessionErrors . add ( actionRequest , ""domainName"" ) ; } else if ( Validator . isNull ( rules ) ) { SessionErrors . add ( actionRequest , ""rules"" ) ; } else if ( classNameIds . length == 0 ) { SessionErrors . add ( actionRequest , ""classNameIds"" ) ; } else { RulesResourceRetriever rulesResourceRetriever = new RulesResourceRetriever ( new StringResourceRetriever ( rules ) , String . valueOf ( RulesLanguage . DROOLS_RULE_LANGUAGE ) ) ; try { _rulesEngine . update ( domainName , rulesResourceRetriever ) ; } catch ( RulesEngineException rulesEngineException ) { SessionErrors . add ( actionRequest , ""rulesEngineException"" ) ; } } if ( SessionErrors . isEmpty ( actionRequest ) ) { preferences . setValue ( ""rules"" , rules ) ; preferences . setValue ( ""domain-name"" , domainName ) ; String userCustomAttributeNames = ParamUtil . getString ( actionRequest , ""userCustomAttributeNames"" ) ; preferences . setValue ( ""user-custom-attribute-names"" , userCustomAttributeNames ) ; preferences . setValues ( ""class-name-ids"" , ArrayUtil . toStringArray ( classNameIds ) ) ; } } +","protected void updatePreferences ( ActionRequest actionRequest , PortletPreferences preferences ) throws Exception { String domainName = ParamUtil . getString ( actionRequest , ""domainName"" ) ; String rules = ParamUtil . getString ( actionRequest , ""rules"" ) ; long [ ] classNameIds = StringUtil . split ( ParamUtil . getString ( actionRequest , ""classNameIds"" ) , 0L ) ; if ( Validator . isNull ( domainName ) ) { SessionErrors . add ( actionRequest , ""domainName"" ) ; } else if ( Validator . isNull ( rules ) ) { SessionErrors . add ( actionRequest , ""rules"" ) ; } else if ( classNameIds . length == 0 ) { SessionErrors . add ( actionRequest , ""classNameIds"" ) ; } else { RulesResourceRetriever rulesResourceRetriever = new RulesResourceRetriever ( new StringResourceRetriever ( rules ) , String . valueOf ( RulesLanguage . DROOLS_RULE_LANGUAGE ) ) ; try { _rulesEngine . update ( domainName , rulesResourceRetriever ) ; } catch ( RulesEngineException rulesEngineException ) { _log . error ( rulesEngineException , rulesEngineException ) ; SessionErrors . add ( actionRequest , ""rulesEngineException"" ) ; } } if ( SessionErrors . isEmpty ( actionRequest ) ) { preferences . setValue ( ""rules"" , rules ) ; preferences . setValue ( ""domain-name"" , domainName ) ; String userCustomAttributeNames = ParamUtil . getString ( actionRequest , ""userCustomAttributeNames"" ) ; preferences . setValue ( ""user-custom-attribute-names"" , userCustomAttributeNames ) ; preferences . setValues ( ""class-name-ids"" , ArrayUtil . toStringArray ( classNameIds ) ) ; } } +" +142,"public void streamClosed ( Stream s ) { } +","public void streamClosed ( Stream s ) { log . debug ( ""Stream {} closed"" , s . getName ( ) ) ; } +" +143,"@ Test public void testDescribeFeatureTypeManyTypes ( ) { Document doc = getAsDOM ( ""wfs?request=DescribeFeatureType&version=1.1.0&typeName=gsml:MappedFeature,gsml:GeologicUnit,ex:FirstParentFeature,ex:SecondParentFeature"" ) ; checkGsmlExDescribeFeatureType ( doc ) ; } +","@ Test public void testDescribeFeatureTypeManyTypes ( ) { Document doc = getAsDOM ( ""wfs?request=DescribeFeatureType&version=1.1.0&typeName=gsml:MappedFeature,gsml:GeologicUnit,ex:FirstParentFeature,ex:SecondParentFeature"" ) ; LOGGER . info ( ""WFS DescribeFeatureType, typename=gsml:MappedFeature,gsml:GeologicUnit,ex:FirstParentFeature,ex:SecondParentFeature response:\n"" + prettyString ( doc ) ) ; checkGsmlExDescribeFeatureType ( doc ) ; } +" +144,"private void initializeSinkStub ( ) { if ( getChannelState ( ) . equals ( ConnectivityState . READY ) ) { sinkStream = asyncStub . sinkStreaming ( new EmptyMessageReceiver ( ) ) ; } else { LOG . warn ( ""gRPC IPC server is not in ready state"" ) ; } } +","private void initializeSinkStub ( ) { if ( getChannelState ( ) . equals ( ConnectivityState . READY ) ) { sinkStream = asyncStub . sinkStreaming ( new EmptyMessageReceiver ( ) ) ; LOG . info ( ""Initialized Sink stream"" ) ; } else { LOG . warn ( ""gRPC IPC server is not in ready state"" ) ; } } +" +145,"private void initializeSinkStub ( ) { if ( getChannelState ( ) . equals ( ConnectivityState . READY ) ) { sinkStream = asyncStub . sinkStreaming ( new EmptyMessageReceiver ( ) ) ; LOG . info ( ""Initialized Sink stream"" ) ; } else { } } +","private void initializeSinkStub ( ) { if ( getChannelState ( ) . equals ( ConnectivityState . READY ) ) { sinkStream = asyncStub . sinkStreaming ( new EmptyMessageReceiver ( ) ) ; LOG . info ( ""Initialized Sink stream"" ) ; } else { LOG . warn ( ""gRPC IPC server is not in ready state"" ) ; } } +" +146,"public Collection < AugmentedEvent > apply ( Collection < AugmentedEvent > events ) { if ( this . sought . get ( ) ) { return events ; } else { Collection < AugmentedEvent > soughtEvents = new ArrayList < > ( ) ; for ( AugmentedEvent event : events ) { int partition = this . partitioner . apply ( event , this . totalPartitions ) ; if ( this . partitionSought . get ( partition ) ) { soughtEvents . add ( event ) ; } else if ( this . partitionCheckpoint [ partition ] == null || this . partitionCheckpoint [ partition ] . compareTo ( event . getHeader ( ) . getCheckpoint ( ) ) < 0 ) { this . partitionSought . set ( partition ) ; this . sought . set ( this . partitionSought . cardinality ( ) == this . totalPartitions ) ; soughtEvents . add ( event ) ; } } if ( soughtEvents . size ( ) > 0 ) { return soughtEvents ; } else { return null ; } } } +","public Collection < AugmentedEvent > apply ( Collection < AugmentedEvent > events ) { if ( this . sought . get ( ) ) { return events ; } else { Collection < AugmentedEvent > soughtEvents = new ArrayList < > ( ) ; for ( AugmentedEvent event : events ) { int partition = this . partitioner . apply ( event , this . totalPartitions ) ; if ( this . partitionSought . get ( partition ) ) { soughtEvents . add ( event ) ; } else if ( this . partitionCheckpoint [ partition ] == null || this . partitionCheckpoint [ partition ] . compareTo ( event . getHeader ( ) . getCheckpoint ( ) ) < 0 ) { this . partitionSought . set ( partition ) ; this . sought . set ( this . partitionSought . cardinality ( ) == this . totalPartitions ) ; soughtEvents . add ( event ) ; KafkaSeeker . LOG . info ( String . format ( ""sought partition %d"" , partition ) ) ; } } if ( soughtEvents . size ( ) > 0 ) { return soughtEvents ; } else { return null ; } } } +" +147,"private void processAuthenticatedPasswordModify ( LdapSession requestor , PasswordModifyRequest req , Dn userDn ) { byte [ ] oldPassword = req . getOldPassword ( ) ; byte [ ] newPassword = req . getNewPassword ( ) ; Entry modifiedEntry = null ; Dn principalDn = requestor . getCoreSession ( ) . getEffectivePrincipal ( ) . getDn ( ) ; if ( ( userDn != null ) && ( ! userDn . equals ( principalDn ) ) ) { if ( requestor . getCoreSession ( ) . isAdministrator ( ) ) { modifiedEntry = getModifiedEntry ( requestor , req , userDn ) ; if ( modifiedEntry == null ) { return ; } modifyUserPassword ( requestor . getCoreSession ( ) , modifiedEntry , userDn , oldPassword , newPassword , req ) ; } else { writeResult ( requestor , req , ResultCodeEnum . INSUFFICIENT_ACCESS_RIGHTS , ""Non-admin user cannot access another user's password to modify it"" ) ; } } else { modifiedEntry = getModifiedEntry ( requestor , req , principalDn ) ; if ( modifiedEntry == null ) { return ; } modifyUserPassword ( requestor . getCoreSession ( ) , modifiedEntry , principalDn , oldPassword , newPassword , req ) ; } } +","private void processAuthenticatedPasswordModify ( LdapSession requestor , PasswordModifyRequest req , Dn userDn ) { byte [ ] oldPassword = req . getOldPassword ( ) ; byte [ ] newPassword = req . getNewPassword ( ) ; Entry modifiedEntry = null ; Dn principalDn = requestor . getCoreSession ( ) . getEffectivePrincipal ( ) . getDn ( ) ; LOG . debug ( ""User {} trying to modify password of user {}"" , principalDn , userDn ) ; if ( ( userDn != null ) && ( ! userDn . equals ( principalDn ) ) ) { if ( requestor . getCoreSession ( ) . isAdministrator ( ) ) { modifiedEntry = getModifiedEntry ( requestor , req , userDn ) ; if ( modifiedEntry == null ) { return ; } modifyUserPassword ( requestor . getCoreSession ( ) , modifiedEntry , userDn , oldPassword , newPassword , req ) ; } else { writeResult ( requestor , req , ResultCodeEnum . INSUFFICIENT_ACCESS_RIGHTS , ""Non-admin user cannot access another user's password to modify it"" ) ; } } else { modifiedEntry = getModifiedEntry ( requestor , req , principalDn ) ; if ( modifiedEntry == null ) { return ; } modifyUserPassword ( requestor . getCoreSession ( ) , modifiedEntry , principalDn , oldPassword , newPassword , req ) ; } } +" +148,"public Boolean hasUserPermissionToObject ( final User user , final PermissionName permissionName , final SecurableModel securableModel ) { if ( user == null ) { throw new IllegalArgumentException ( ""User can not be null."" ) ; } if ( permissionName == null ) { throw new IllegalArgumentException ( ""Permission Name can not be null."" ) ; } if ( securableModel == null ) { throw new IllegalArgumentException ( ""SecurableModel can not be null."" ) ; } if ( hasUserPermissionToClass ( user , permissionName , securableModel . getClass ( ) . getCanonicalName ( ) ) ) { return true ; } final List < Long > userRoleIds = new ArrayList < > ( ) ; for ( Role role : user . getRoles ( ) ) { userRoleIds . add ( role . getId ( ) ) ; } final List < AclEntry > rolesAclEntries = aclEntryService . findAll ( permissionName , AclSidType . ROLE , userRoleIds , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( rolesAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { } return true ; } if ( securableModel . getCreatedBy ( ) != null && securableModel . getCreatedBy ( ) . getId ( ) . equals ( user . getId ( ) ) ) { List < AclEntry > ownerAclEntries = aclEntryService . findAll ( permissionName , AclSidType . OWNER , 0L , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( ownerAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on that he is the owner."" ) ; } return true ; } } if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has not permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""]."" ) ; } return false ; } +","public Boolean hasUserPermissionToObject ( final User user , final PermissionName permissionName , final SecurableModel securableModel ) { if ( user == null ) { throw new IllegalArgumentException ( ""User can not be null."" ) ; } if ( permissionName == null ) { throw new IllegalArgumentException ( ""Permission Name can not be null."" ) ; } if ( securableModel == null ) { throw new IllegalArgumentException ( ""SecurableModel can not be null."" ) ; } if ( hasUserPermissionToClass ( user , permissionName , securableModel . getClass ( ) . getCanonicalName ( ) ) ) { return true ; } final List < Long > userRoleIds = new ArrayList < > ( ) ; for ( Role role : user . getRoles ( ) ) { userRoleIds . add ( role . getId ( ) ) ; } final List < AclEntry > rolesAclEntries = aclEntryService . findAll ( permissionName , AclSidType . ROLE , userRoleIds , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( rolesAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on the ACL security settings."" ) ; } return true ; } if ( securableModel . getCreatedBy ( ) != null && securableModel . getCreatedBy ( ) . getId ( ) . equals ( user . getId ( ) ) ) { List < AclEntry > ownerAclEntries = aclEntryService . findAll ( permissionName , AclSidType . OWNER , 0L , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( ownerAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on that he is the owner."" ) ; } return true ; } } if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has not permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""]."" ) ; } return false ; } +" +149,"public Boolean hasUserPermissionToObject ( final User user , final PermissionName permissionName , final SecurableModel securableModel ) { if ( user == null ) { throw new IllegalArgumentException ( ""User can not be null."" ) ; } if ( permissionName == null ) { throw new IllegalArgumentException ( ""Permission Name can not be null."" ) ; } if ( securableModel == null ) { throw new IllegalArgumentException ( ""SecurableModel can not be null."" ) ; } if ( hasUserPermissionToClass ( user , permissionName , securableModel . getClass ( ) . getCanonicalName ( ) ) ) { return true ; } final List < Long > userRoleIds = new ArrayList < > ( ) ; for ( Role role : user . getRoles ( ) ) { userRoleIds . add ( role . getId ( ) ) ; } final List < AclEntry > rolesAclEntries = aclEntryService . findAll ( permissionName , AclSidType . ROLE , userRoleIds , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( rolesAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on the ACL security settings."" ) ; } return true ; } if ( securableModel . getCreatedBy ( ) != null && securableModel . getCreatedBy ( ) . getId ( ) . equals ( user . getId ( ) ) ) { List < AclEntry > ownerAclEntries = aclEntryService . findAll ( permissionName , AclSidType . OWNER , 0L , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( ownerAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { } return true ; } } if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has not permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""]."" ) ; } return false ; } +","public Boolean hasUserPermissionToObject ( final User user , final PermissionName permissionName , final SecurableModel securableModel ) { if ( user == null ) { throw new IllegalArgumentException ( ""User can not be null."" ) ; } if ( permissionName == null ) { throw new IllegalArgumentException ( ""Permission Name can not be null."" ) ; } if ( securableModel == null ) { throw new IllegalArgumentException ( ""SecurableModel can not be null."" ) ; } if ( hasUserPermissionToClass ( user , permissionName , securableModel . getClass ( ) . getCanonicalName ( ) ) ) { return true ; } final List < Long > userRoleIds = new ArrayList < > ( ) ; for ( Role role : user . getRoles ( ) ) { userRoleIds . add ( role . getId ( ) ) ; } final List < AclEntry > rolesAclEntries = aclEntryService . findAll ( permissionName , AclSidType . ROLE , userRoleIds , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( rolesAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on the ACL security settings."" ) ; } return true ; } if ( securableModel . getCreatedBy ( ) != null && securableModel . getCreatedBy ( ) . getId ( ) . equals ( user . getId ( ) ) ) { List < AclEntry > ownerAclEntries = aclEntryService . findAll ( permissionName , AclSidType . OWNER , 0L , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( ownerAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on that he is the owner."" ) ; } return true ; } } if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has not permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""]."" ) ; } return false ; } +" +150,"public Boolean hasUserPermissionToObject ( final User user , final PermissionName permissionName , final SecurableModel securableModel ) { if ( user == null ) { throw new IllegalArgumentException ( ""User can not be null."" ) ; } if ( permissionName == null ) { throw new IllegalArgumentException ( ""Permission Name can not be null."" ) ; } if ( securableModel == null ) { throw new IllegalArgumentException ( ""SecurableModel can not be null."" ) ; } if ( hasUserPermissionToClass ( user , permissionName , securableModel . getClass ( ) . getCanonicalName ( ) ) ) { return true ; } final List < Long > userRoleIds = new ArrayList < > ( ) ; for ( Role role : user . getRoles ( ) ) { userRoleIds . add ( role . getId ( ) ) ; } final List < AclEntry > rolesAclEntries = aclEntryService . findAll ( permissionName , AclSidType . ROLE , userRoleIds , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( rolesAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on the ACL security settings."" ) ; } return true ; } if ( securableModel . getCreatedBy ( ) != null && securableModel . getCreatedBy ( ) . getId ( ) . equals ( user . getId ( ) ) ) { List < AclEntry > ownerAclEntries = aclEntryService . findAll ( permissionName , AclSidType . OWNER , 0L , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( ownerAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on that he is the owner."" ) ; } return true ; } } if ( log . isDebugEnabled ( ) ) { } return false ; } +","public Boolean hasUserPermissionToObject ( final User user , final PermissionName permissionName , final SecurableModel securableModel ) { if ( user == null ) { throw new IllegalArgumentException ( ""User can not be null."" ) ; } if ( permissionName == null ) { throw new IllegalArgumentException ( ""Permission Name can not be null."" ) ; } if ( securableModel == null ) { throw new IllegalArgumentException ( ""SecurableModel can not be null."" ) ; } if ( hasUserPermissionToClass ( user , permissionName , securableModel . getClass ( ) . getCanonicalName ( ) ) ) { return true ; } final List < Long > userRoleIds = new ArrayList < > ( ) ; for ( Role role : user . getRoles ( ) ) { userRoleIds . add ( role . getId ( ) ) ; } final List < AclEntry > rolesAclEntries = aclEntryService . findAll ( permissionName , AclSidType . ROLE , userRoleIds , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( rolesAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on the ACL security settings."" ) ; } return true ; } if ( securableModel . getCreatedBy ( ) != null && securableModel . getCreatedBy ( ) . getId ( ) . equals ( user . getId ( ) ) ) { List < AclEntry > ownerAclEntries = aclEntryService . findAll ( permissionName , AclSidType . OWNER , 0L , AclClassName . getByName ( securableModel . getClass ( ) . getCanonicalName ( ) ) , securableModel . getId ( ) ) ; if ( ownerAclEntries . size ( ) > 0 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""] based on that he is the owner."" ) ; } return true ; } } if ( log . isDebugEnabled ( ) ) { log . debug ( ""User "" + user . getUsername ( ) + "" has not permission "" + permissionName + "" to object "" + securableModel . getClass ( ) . getCanonicalName ( ) + ""[id="" + securableModel . getId ( ) + ""]."" ) ; } return false ; } +" +151,"private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +","private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +" +152,"private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +","private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +" +153,"private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +","private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +" +154,"private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +","private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +" +155,"private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } } } +","private void connectToCloudname ( ) { final ServiceData myServiceData = new ServiceData ( ) ; myServiceData . addEndpoint ( new Endpoint ( ""http"" , ""0.0.0.0"" , httpPort ) ) ; try ( final ServiceHandle handle = service . registerService ( ServiceCoordinate . parse ( myCoordinate ) , myServiceData ) ) { final String [ ] ghostNames = new String [ ] { ""pinky"" , ""blinky"" , ""inky"" , ""clyde"" } ; for ( final String name : ghostNames ) { final ServiceCoordinate ghostCoordinate = new ServiceCoordinate . Builder ( ) . fromCoordinate ( ServiceCoordinate . parse ( myCoordinate ) ) . setService ( name ) . build ( ) ; LOG . info ( ""Listening for "" + ghostCoordinate ) ; service . addServiceListener ( ghostCoordinate , new ServiceListener ( ) { @ Override public void onServiceCreated ( final InstanceCoordinate coordinate , final ServiceData serviceData ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" with serviceData "" + serviceData + "" is created"" ) ; publisher . publish ( getCreateNotification ( coordinate , serviceData ) ) ; } @ Override public void onServiceDataChanged ( final InstanceCoordinate coordinate , final ServiceData data ) { LOG . info ( ""Service data changed for: "" + coordinate . toCanonicalString ( ) + "" to: "" + data . toString ( ) ) ; } @ Override public void onServiceRemoved ( final InstanceCoordinate coordinate ) { LOG . info ( ""Service "" + coordinate . toCanonicalString ( ) + "" was removed"" ) ; publisher . publish ( getRemoveNotification ( coordinate ) ) ; } } ) ; } LOG . info ( ""Connected, using coordinate "" + handle . getCoordinate ( ) . toCanonicalString ( ) ) ; } } +" +156,"private boolean processRequest ( final Socket socket , final URI socketURI , final InputStream in , final OutputStream out ) { HttpResponseImpl response = null ; try { response = process ( socket , socketURI , in ) ; return response != null ; } catch ( final Throwable t ) { response = HttpResponseImpl . createError ( t . getMessage ( ) , t ) ; return true ; } finally { try { if ( response != null ) { response . writeMessage ( out , false ) ; if ( print . size ( ) > 0 && print . contains ( Output . RESPONSE ) ) { response . writeMessage ( new LoggerOutputStream ( log , ""debug"" ) , indent ) ; } } } catch ( final Throwable t2 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""Could not write response"" , t2 ) ; } else { if ( ! SocketException . class . isInstance ( t2 ) ) { log . warning ( ""Could not write response:"" + t2 ) ; } } } } } +","private boolean processRequest ( final Socket socket , final URI socketURI , final InputStream in , final OutputStream out ) { HttpResponseImpl response = null ; try { response = process ( socket , socketURI , in ) ; return response != null ; } catch ( final Throwable t ) { log . error ( t . getMessage ( ) , t ) ; response = HttpResponseImpl . createError ( t . getMessage ( ) , t ) ; return true ; } finally { try { if ( response != null ) { response . writeMessage ( out , false ) ; if ( print . size ( ) > 0 && print . contains ( Output . RESPONSE ) ) { response . writeMessage ( new LoggerOutputStream ( log , ""debug"" ) , indent ) ; } } } catch ( final Throwable t2 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""Could not write response"" , t2 ) ; } else { if ( ! SocketException . class . isInstance ( t2 ) ) { log . warning ( ""Could not write response:"" + t2 ) ; } } } } } +" +157,"private boolean processRequest ( final Socket socket , final URI socketURI , final InputStream in , final OutputStream out ) { HttpResponseImpl response = null ; try { response = process ( socket , socketURI , in ) ; return response != null ; } catch ( final Throwable t ) { log . error ( t . getMessage ( ) , t ) ; response = HttpResponseImpl . createError ( t . getMessage ( ) , t ) ; return true ; } finally { try { if ( response != null ) { response . writeMessage ( out , false ) ; if ( print . size ( ) > 0 && print . contains ( Output . RESPONSE ) ) { response . writeMessage ( new LoggerOutputStream ( log , ""debug"" ) , indent ) ; } } } catch ( final Throwable t2 ) { if ( log . isDebugEnabled ( ) ) { } else { if ( ! SocketException . class . isInstance ( t2 ) ) { log . warning ( ""Could not write response:"" + t2 ) ; } } } } } +","private boolean processRequest ( final Socket socket , final URI socketURI , final InputStream in , final OutputStream out ) { HttpResponseImpl response = null ; try { response = process ( socket , socketURI , in ) ; return response != null ; } catch ( final Throwable t ) { log . error ( t . getMessage ( ) , t ) ; response = HttpResponseImpl . createError ( t . getMessage ( ) , t ) ; return true ; } finally { try { if ( response != null ) { response . writeMessage ( out , false ) ; if ( print . size ( ) > 0 && print . contains ( Output . RESPONSE ) ) { response . writeMessage ( new LoggerOutputStream ( log , ""debug"" ) , indent ) ; } } } catch ( final Throwable t2 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""Could not write response"" , t2 ) ; } else { if ( ! SocketException . class . isInstance ( t2 ) ) { log . warning ( ""Could not write response:"" + t2 ) ; } } } } } +" +158,"private boolean processRequest ( final Socket socket , final URI socketURI , final InputStream in , final OutputStream out ) { HttpResponseImpl response = null ; try { response = process ( socket , socketURI , in ) ; return response != null ; } catch ( final Throwable t ) { log . error ( t . getMessage ( ) , t ) ; response = HttpResponseImpl . createError ( t . getMessage ( ) , t ) ; return true ; } finally { try { if ( response != null ) { response . writeMessage ( out , false ) ; if ( print . size ( ) > 0 && print . contains ( Output . RESPONSE ) ) { response . writeMessage ( new LoggerOutputStream ( log , ""debug"" ) , indent ) ; } } } catch ( final Throwable t2 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""Could not write response"" , t2 ) ; } else { if ( ! SocketException . class . isInstance ( t2 ) ) { } } } } } +","private boolean processRequest ( final Socket socket , final URI socketURI , final InputStream in , final OutputStream out ) { HttpResponseImpl response = null ; try { response = process ( socket , socketURI , in ) ; return response != null ; } catch ( final Throwable t ) { log . error ( t . getMessage ( ) , t ) ; response = HttpResponseImpl . createError ( t . getMessage ( ) , t ) ; return true ; } finally { try { if ( response != null ) { response . writeMessage ( out , false ) ; if ( print . size ( ) > 0 && print . contains ( Output . RESPONSE ) ) { response . writeMessage ( new LoggerOutputStream ( log , ""debug"" ) , indent ) ; } } } catch ( final Throwable t2 ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""Could not write response"" , t2 ) ; } else { if ( ! SocketException . class . isInstance ( t2 ) ) { log . warning ( ""Could not write response:"" + t2 ) ; } } } } } +" +159,"public CompletableFuture < String > asyncGet ( ) { CompletableFuture < String > future = new CompletableFuture < > ( ) ; get ( new TradfriCoapHandler ( future ) ) ; return future ; } +","public CompletableFuture < String > asyncGet ( ) { logger . debug ( ""CoAP GET request\nuri: {}"" , getURI ( ) ) ; CompletableFuture < String > future = new CompletableFuture < > ( ) ; get ( new TradfriCoapHandler ( future ) ) ; return future ; } +" +160,"public void searchPlaces ( final GeoQuery query ) { } +","public void searchPlaces ( final GeoQuery query ) { getDispatcher ( ) . invokeLater ( new AsyncTask ( SEARCH_PLACES , listeners ) { @ Override public void invoke ( List < TwitterListener > listeners ) throws TwitterException { ResponseList < Place > places = twitter . searchPlaces ( query ) ; for ( TwitterListener listener : listeners ) { try { listener . searchedPlaces ( places ) ; } catch ( Exception e ) { logger . warn ( ""Exception at searchPlaces"" , e ) ; } } } } ) ; } +" +161,"public void searchPlaces ( final GeoQuery query ) { getDispatcher ( ) . invokeLater ( new AsyncTask ( SEARCH_PLACES , listeners ) { @ Override public void invoke ( List < TwitterListener > listeners ) throws TwitterException { ResponseList < Place > places = twitter . searchPlaces ( query ) ; for ( TwitterListener listener : listeners ) { try { listener . searchedPlaces ( places ) ; } catch ( Exception e ) { } } } } ) ; } +","public void searchPlaces ( final GeoQuery query ) { getDispatcher ( ) . invokeLater ( new AsyncTask ( SEARCH_PLACES , listeners ) { @ Override public void invoke ( List < TwitterListener > listeners ) throws TwitterException { ResponseList < Place > places = twitter . searchPlaces ( query ) ; for ( TwitterListener listener : listeners ) { try { listener . searchedPlaces ( places ) ; } catch ( Exception e ) { logger . warn ( ""Exception at searchPlaces"" , e ) ; } } } } ) ; } +" +162,"public static int getArticlesCount ( HttpPrincipal httpPrincipal , long groupId , long folderId , int status ) { try { MethodKey methodKey = new MethodKey ( JournalArticleServiceUtil . class , ""getArticlesCount"" , _getArticlesCountParameterTypes31 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , groupId , folderId , status ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( ( Integer ) returnObj ) . intValue ( ) ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { throw systemException ; } } +","public static int getArticlesCount ( HttpPrincipal httpPrincipal , long groupId , long folderId , int status ) { try { MethodKey methodKey = new MethodKey ( JournalArticleServiceUtil . class , ""getArticlesCount"" , _getArticlesCountParameterTypes31 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , groupId , folderId , status ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( ( Integer ) returnObj ) . intValue ( ) ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { _log . error ( systemException , systemException ) ; throw systemException ; } } +" +163,"public void initSchema ( ) { DbType dbType = getDbType ( ) ; String initSqlPath = """" ; if ( dbType != null ) { switch ( dbType ) { case MYSQL : initSqlPath = ""/sql/create/release-1.0.0_schema/mysql/"" ; initSchema ( initSqlPath ) ; break ; case POSTGRESQL : initSqlPath = ""/sql/create/release-1.2.0_schema/postgresql/"" ; initSchema ( initSqlPath ) ; break ; default : throw new IllegalArgumentException ( ""not support sql type,can't upgrade"" ) ; } } } +","public void initSchema ( ) { DbType dbType = getDbType ( ) ; String initSqlPath = """" ; if ( dbType != null ) { switch ( dbType ) { case MYSQL : initSqlPath = ""/sql/create/release-1.0.0_schema/mysql/"" ; initSchema ( initSqlPath ) ; break ; case POSTGRESQL : initSqlPath = ""/sql/create/release-1.2.0_schema/postgresql/"" ; initSchema ( initSqlPath ) ; break ; default : logger . error ( ""not support sql type: {},can't upgrade"" , dbType ) ; throw new IllegalArgumentException ( ""not support sql type,can't upgrade"" ) ; } } } +" +164,"public void export ( Dashboard dashboard , Path path , ExportType type ) { DashboardSerializer serializer = serializerFor ( type ) ; validate ( dashboard ) ; Path temp = createTempDashboardFile ( ) ; try ( FileOutputStream fos = new FileOutputStream ( temp . toFile ( ) ) ) { serializer . serialize ( dashboard , fos ) ; Files . move ( temp , path , StandardCopyOption . REPLACE_EXISTING ) ; } catch ( FileNotFoundException e ) { throw new IllegalArgumentException ( ""File not found: "" + path , e ) ; } catch ( IOException e ) { throw new RuntimeException ( ""Error writing to file "" + path , e ) ; } finally { try { Files . deleteIfExists ( temp ) ; } catch ( IOException e ) { } } } +","public void export ( Dashboard dashboard , Path path , ExportType type ) { DashboardSerializer serializer = serializerFor ( type ) ; validate ( dashboard ) ; Path temp = createTempDashboardFile ( ) ; try ( FileOutputStream fos = new FileOutputStream ( temp . toFile ( ) ) ) { serializer . serialize ( dashboard , fos ) ; Files . move ( temp , path , StandardCopyOption . REPLACE_EXISTING ) ; } catch ( FileNotFoundException e ) { throw new IllegalArgumentException ( ""File not found: "" + path , e ) ; } catch ( IOException e ) { throw new RuntimeException ( ""Error writing to file "" + path , e ) ; } finally { try { Files . deleteIfExists ( temp ) ; } catch ( IOException e ) { logger . error ( ""Error deleting temp file"" , e ) ; } } } +" +165,"public void dispose ( ) { super . dispose ( ) ; try { XTextContent xTextContent = UNO . XTextContent ( inputField ) ; xTextContent . getAnchor ( ) . getText ( ) . removeTextContent ( xTextContent ) ; } catch ( NoSuchElementException e ) { } } +","public void dispose ( ) { super . dispose ( ) ; try { XTextContent xTextContent = UNO . XTextContent ( inputField ) ; xTextContent . getAnchor ( ) . getText ( ) . removeTextContent ( xTextContent ) ; } catch ( NoSuchElementException e ) { LOGGER . info ( """" , e ) ; } } +" +166,"public static < T > void attachOutputPortToInputPort ( DefaultOutputPort < T > outputPort , final DefaultInputPort < T > inputPort ) { } +","public static < T > void attachOutputPortToInputPort ( DefaultOutputPort < T > outputPort , final DefaultInputPort < T > inputPort ) { outputPort . setSink ( new Sink < Object > ( ) { @ Override @ SuppressWarnings ( ""unchecked"" ) public void put ( Object tuple ) { LOG . debug ( ""processing tuple"" ) ; inputPort . process ( ( T ) tuple ) ; } @ Override public int getCount ( boolean reset ) { return 0 ; } } ) ; } +" +167,"public static < T > void attachOutputPortToInputPort ( DefaultOutputPort < T > outputPort , final DefaultInputPort < T > inputPort ) { outputPort . setSink ( new Sink < Object > ( ) { @ Override @ SuppressWarnings ( ""unchecked"" ) public void put ( Object tuple ) { inputPort . process ( ( T ) tuple ) ; } @ Override public int getCount ( boolean reset ) { return 0 ; } } ) ; } +","public static < T > void attachOutputPortToInputPort ( DefaultOutputPort < T > outputPort , final DefaultInputPort < T > inputPort ) { outputPort . setSink ( new Sink < Object > ( ) { @ Override @ SuppressWarnings ( ""unchecked"" ) public void put ( Object tuple ) { LOG . debug ( ""processing tuple"" ) ; inputPort . process ( ( T ) tuple ) ; } @ Override public int getCount ( boolean reset ) { return 0 ; } } ) ; } +" +168,"public static void compareFromSameFolder ( final File sourceFolder , final IOFileFilter sourceFileFilter , final Transformer < String > toTargetFileName , final ResourcePreProcessor processor ) { final Collection < File > files = FileUtils . listFiles ( sourceFolder , sourceFileFilter , FalseFileFilter . INSTANCE ) ; int processedNumber = 0 ; for ( final File file : files ) { File targetFile ; try { targetFile = new File ( sourceFolder , toTargetFileName . transform ( file . getName ( ) ) ) ; } catch ( Exception e ) { throw WroRuntimeException . wrap ( e ) ; } try ( InputStream sourceFileStream = new FileInputStream ( file ) ; InputStream targetFileStream = new FileInputStream ( targetFile ) ) { LOG . debug ( ""comparing with: {}"" , targetFile . getName ( ) ) ; try { compare ( sourceFileStream , targetFileStream , new ResourcePostProcessor ( ) { public void process ( final Reader reader , final Writer writer ) throws IOException { processor . process ( Resource . create ( ""file:"" + file . getPath ( ) , ResourceType . CSS ) , reader , writer ) ; } } ) ; } catch ( final ComparisonFailure e ) { LOG . error ( ""Failed comparing: {}"" , file . getName ( ) ) ; throw e ; } processedNumber ++ ; } catch ( final IOException e ) { LOG . debug ( ""Skip comparison because couldn't find the TARGET file {}. Original cause: {}"" , targetFile , e . getCause ( ) ) ; } catch ( final Exception e ) { throw WroRuntimeException . wrap ( e ) ; } } logSuccess ( processedNumber ) ; } +","public static void compareFromSameFolder ( final File sourceFolder , final IOFileFilter sourceFileFilter , final Transformer < String > toTargetFileName , final ResourcePreProcessor processor ) { final Collection < File > files = FileUtils . listFiles ( sourceFolder , sourceFileFilter , FalseFileFilter . INSTANCE ) ; int processedNumber = 0 ; for ( final File file : files ) { LOG . debug ( ""processing: {}"" , file . getName ( ) ) ; File targetFile ; try { targetFile = new File ( sourceFolder , toTargetFileName . transform ( file . getName ( ) ) ) ; } catch ( Exception e ) { throw WroRuntimeException . wrap ( e ) ; } try ( InputStream sourceFileStream = new FileInputStream ( file ) ; InputStream targetFileStream = new FileInputStream ( targetFile ) ) { LOG . debug ( ""comparing with: {}"" , targetFile . getName ( ) ) ; try { compare ( sourceFileStream , targetFileStream , new ResourcePostProcessor ( ) { public void process ( final Reader reader , final Writer writer ) throws IOException { processor . process ( Resource . create ( ""file:"" + file . getPath ( ) , ResourceType . CSS ) , reader , writer ) ; } } ) ; } catch ( final ComparisonFailure e ) { LOG . error ( ""Failed comparing: {}"" , file . getName ( ) ) ; throw e ; } processedNumber ++ ; } catch ( final IOException e ) { LOG . debug ( ""Skip comparison because couldn't find the TARGET file {}. Original cause: {}"" , targetFile , e . getCause ( ) ) ; } catch ( final Exception e ) { throw WroRuntimeException . wrap ( e ) ; } } logSuccess ( processedNumber ) ; } +" +169,"public static void compareFromSameFolder ( final File sourceFolder , final IOFileFilter sourceFileFilter , final Transformer < String > toTargetFileName , final ResourcePreProcessor processor ) { final Collection < File > files = FileUtils . listFiles ( sourceFolder , sourceFileFilter , FalseFileFilter . INSTANCE ) ; int processedNumber = 0 ; for ( final File file : files ) { LOG . debug ( ""processing: {}"" , file . getName ( ) ) ; File targetFile ; try { targetFile = new File ( sourceFolder , toTargetFileName . transform ( file . getName ( ) ) ) ; } catch ( Exception e ) { throw WroRuntimeException . wrap ( e ) ; } try ( InputStream sourceFileStream = new FileInputStream ( file ) ; InputStream targetFileStream = new FileInputStream ( targetFile ) ) { try { compare ( sourceFileStream , targetFileStream , new ResourcePostProcessor ( ) { public void process ( final Reader reader , final Writer writer ) throws IOException { processor . process ( Resource . create ( ""file:"" + file . getPath ( ) , ResourceType . CSS ) , reader , writer ) ; } } ) ; } catch ( final ComparisonFailure e ) { LOG . error ( ""Failed comparing: {}"" , file . getName ( ) ) ; throw e ; } processedNumber ++ ; } catch ( final IOException e ) { LOG . debug ( ""Skip comparison because couldn't find the TARGET file {}. Original cause: {}"" , targetFile , e . getCause ( ) ) ; } catch ( final Exception e ) { throw WroRuntimeException . wrap ( e ) ; } } logSuccess ( processedNumber ) ; } +","public static void compareFromSameFolder ( final File sourceFolder , final IOFileFilter sourceFileFilter , final Transformer < String > toTargetFileName , final ResourcePreProcessor processor ) { final Collection < File > files = FileUtils . listFiles ( sourceFolder , sourceFileFilter , FalseFileFilter . INSTANCE ) ; int processedNumber = 0 ; for ( final File file : files ) { LOG . debug ( ""processing: {}"" , file . getName ( ) ) ; File targetFile ; try { targetFile = new File ( sourceFolder , toTargetFileName . transform ( file . getName ( ) ) ) ; } catch ( Exception e ) { throw WroRuntimeException . wrap ( e ) ; } try ( InputStream sourceFileStream = new FileInputStream ( file ) ; InputStream targetFileStream = new FileInputStream ( targetFile ) ) { LOG . debug ( ""comparing with: {}"" , targetFile . getName ( ) ) ; try { compare ( sourceFileStream , targetFileStream , new ResourcePostProcessor ( ) { public void process ( final Reader reader , final Writer writer ) throws IOException { processor . process ( Resource . create ( ""file:"" + file . getPath ( ) , ResourceType . CSS ) , reader , writer ) ; } } ) ; } catch ( final ComparisonFailure e ) { LOG . error ( ""Failed comparing: {}"" , file . getName ( ) ) ; throw e ; } processedNumber ++ ; } catch ( final IOException e ) { LOG . debug ( ""Skip comparison because couldn't find the TARGET file {}. Original cause: {}"" , targetFile , e . getCause ( ) ) ; } catch ( final Exception e ) { throw WroRuntimeException . wrap ( e ) ; } } logSuccess ( processedNumber ) ; } +" +170,"public static void compareFromSameFolder ( final File sourceFolder , final IOFileFilter sourceFileFilter , final Transformer < String > toTargetFileName , final ResourcePreProcessor processor ) { final Collection < File > files = FileUtils . listFiles ( sourceFolder , sourceFileFilter , FalseFileFilter . INSTANCE ) ; int processedNumber = 0 ; for ( final File file : files ) { LOG . debug ( ""processing: {}"" , file . getName ( ) ) ; File targetFile ; try { targetFile = new File ( sourceFolder , toTargetFileName . transform ( file . getName ( ) ) ) ; } catch ( Exception e ) { throw WroRuntimeException . wrap ( e ) ; } try ( InputStream sourceFileStream = new FileInputStream ( file ) ; InputStream targetFileStream = new FileInputStream ( targetFile ) ) { LOG . debug ( ""comparing with: {}"" , targetFile . getName ( ) ) ; try { compare ( sourceFileStream , targetFileStream , new ResourcePostProcessor ( ) { public void process ( final Reader reader , final Writer writer ) throws IOException { processor . process ( Resource . create ( ""file:"" + file . getPath ( ) , ResourceType . CSS ) , reader , writer ) ; } } ) ; } catch ( final ComparisonFailure e ) { throw e ; } processedNumber ++ ; } catch ( final IOException e ) { LOG . debug ( ""Skip comparison because couldn't find the TARGET file {}. Original cause: {}"" , targetFile , e . getCause ( ) ) ; } catch ( final Exception e ) { throw WroRuntimeException . wrap ( e ) ; } } logSuccess ( processedNumber ) ; } +","public static void compareFromSameFolder ( final File sourceFolder , final IOFileFilter sourceFileFilter , final Transformer < String > toTargetFileName , final ResourcePreProcessor processor ) { final Collection < File > files = FileUtils . listFiles ( sourceFolder , sourceFileFilter , FalseFileFilter . INSTANCE ) ; int processedNumber = 0 ; for ( final File file : files ) { LOG . debug ( ""processing: {}"" , file . getName ( ) ) ; File targetFile ; try { targetFile = new File ( sourceFolder , toTargetFileName . transform ( file . getName ( ) ) ) ; } catch ( Exception e ) { throw WroRuntimeException . wrap ( e ) ; } try ( InputStream sourceFileStream = new FileInputStream ( file ) ; InputStream targetFileStream = new FileInputStream ( targetFile ) ) { LOG . debug ( ""comparing with: {}"" , targetFile . getName ( ) ) ; try { compare ( sourceFileStream , targetFileStream , new ResourcePostProcessor ( ) { public void process ( final Reader reader , final Writer writer ) throws IOException { processor . process ( Resource . create ( ""file:"" + file . getPath ( ) , ResourceType . CSS ) , reader , writer ) ; } } ) ; } catch ( final ComparisonFailure e ) { LOG . error ( ""Failed comparing: {}"" , file . getName ( ) ) ; throw e ; } processedNumber ++ ; } catch ( final IOException e ) { LOG . debug ( ""Skip comparison because couldn't find the TARGET file {}. Original cause: {}"" , targetFile , e . getCause ( ) ) ; } catch ( final Exception e ) { throw WroRuntimeException . wrap ( e ) ; } } logSuccess ( processedNumber ) ; } +" +171,"public static void compareFromSameFolder ( final File sourceFolder , final IOFileFilter sourceFileFilter , final Transformer < String > toTargetFileName , final ResourcePreProcessor processor ) { final Collection < File > files = FileUtils . listFiles ( sourceFolder , sourceFileFilter , FalseFileFilter . INSTANCE ) ; int processedNumber = 0 ; for ( final File file : files ) { LOG . debug ( ""processing: {}"" , file . getName ( ) ) ; File targetFile ; try { targetFile = new File ( sourceFolder , toTargetFileName . transform ( file . getName ( ) ) ) ; } catch ( Exception e ) { throw WroRuntimeException . wrap ( e ) ; } try ( InputStream sourceFileStream = new FileInputStream ( file ) ; InputStream targetFileStream = new FileInputStream ( targetFile ) ) { LOG . debug ( ""comparing with: {}"" , targetFile . getName ( ) ) ; try { compare ( sourceFileStream , targetFileStream , new ResourcePostProcessor ( ) { public void process ( final Reader reader , final Writer writer ) throws IOException { processor . process ( Resource . create ( ""file:"" + file . getPath ( ) , ResourceType . CSS ) , reader , writer ) ; } } ) ; } catch ( final ComparisonFailure e ) { LOG . error ( ""Failed comparing: {}"" , file . getName ( ) ) ; throw e ; } processedNumber ++ ; } catch ( final IOException e ) { } catch ( final Exception e ) { throw WroRuntimeException . wrap ( e ) ; } } logSuccess ( processedNumber ) ; } +","public static void compareFromSameFolder ( final File sourceFolder , final IOFileFilter sourceFileFilter , final Transformer < String > toTargetFileName , final ResourcePreProcessor processor ) { final Collection < File > files = FileUtils . listFiles ( sourceFolder , sourceFileFilter , FalseFileFilter . INSTANCE ) ; int processedNumber = 0 ; for ( final File file : files ) { LOG . debug ( ""processing: {}"" , file . getName ( ) ) ; File targetFile ; try { targetFile = new File ( sourceFolder , toTargetFileName . transform ( file . getName ( ) ) ) ; } catch ( Exception e ) { throw WroRuntimeException . wrap ( e ) ; } try ( InputStream sourceFileStream = new FileInputStream ( file ) ; InputStream targetFileStream = new FileInputStream ( targetFile ) ) { LOG . debug ( ""comparing with: {}"" , targetFile . getName ( ) ) ; try { compare ( sourceFileStream , targetFileStream , new ResourcePostProcessor ( ) { public void process ( final Reader reader , final Writer writer ) throws IOException { processor . process ( Resource . create ( ""file:"" + file . getPath ( ) , ResourceType . CSS ) , reader , writer ) ; } } ) ; } catch ( final ComparisonFailure e ) { LOG . error ( ""Failed comparing: {}"" , file . getName ( ) ) ; throw e ; } processedNumber ++ ; } catch ( final IOException e ) { LOG . debug ( ""Skip comparison because couldn't find the TARGET file {}. Original cause: {}"" , targetFile , e . getCause ( ) ) ; } catch ( final Exception e ) { throw WroRuntimeException . wrap ( e ) ; } } logSuccess ( processedNumber ) ; } +" +172,"public JobExecution abandon ( long jobExecutionId ) throws NoSuchJobExecutionException , JobExecutionAlreadyRunningException { JobExecution jobExecution = findExecutionById ( jobExecutionId ) ; if ( jobExecution . getStatus ( ) . isLessThan ( BatchStatus . STOPPING ) ) { throw new JobExecutionAlreadyRunningException ( ""JobExecution is running or complete and therefore cannot be aborted"" ) ; } if ( logger . isInfoEnabled ( ) ) { } jobExecution . upgradeStatus ( BatchStatus . ABANDONED ) ; jobExecution . setEndTime ( new Date ( ) ) ; jobRepository . update ( jobExecution ) ; return jobExecution ; } +","public JobExecution abandon ( long jobExecutionId ) throws NoSuchJobExecutionException , JobExecutionAlreadyRunningException { JobExecution jobExecution = findExecutionById ( jobExecutionId ) ; if ( jobExecution . getStatus ( ) . isLessThan ( BatchStatus . STOPPING ) ) { throw new JobExecutionAlreadyRunningException ( ""JobExecution is running or complete and therefore cannot be aborted"" ) ; } if ( logger . isInfoEnabled ( ) ) { logger . info ( ""Aborting job execution: "" + jobExecution ) ; } jobExecution . upgradeStatus ( BatchStatus . ABANDONED ) ; jobExecution . setEndTime ( new Date ( ) ) ; jobRepository . update ( jobExecution ) ; return jobExecution ; } +" +173,"private void _generateImages ( FileVersion sourceFileVersion , FileVersion destinationFileVersion ) throws Exception { try { if ( sourceFileVersion != null ) { copy ( sourceFileVersion , destinationFileVersion ) ; return ; } if ( ! PropsValues . DL_FILE_ENTRY_THUMBNAIL_ENABLED && ! PropsValues . DL_FILE_ENTRY_PREVIEW_ENABLED ) { return ; } try ( InputStream inputStream = destinationFileVersion . getContentStream ( false ) ) { byte [ ] bytes = FileUtil . getBytes ( inputStream ) ; ImageBag imageBag = ImageToolUtil . read ( bytes ) ; RenderedImage renderedImage = imageBag . getRenderedImage ( ) ; if ( renderedImage == null ) { _fileVersionPreviewEventListener . onFailure ( destinationFileVersion ) ; return ; } ColorModel colorModel = renderedImage . getColorModel ( ) ; if ( colorModel . getNumColorComponents ( ) == 4 ) { Future < RenderedImage > future = ImageToolUtil . convertCMYKtoRGB ( bytes , imageBag . getType ( ) ) ; if ( future == null ) { _fileVersionPreviewEventListener . onFailure ( destinationFileVersion ) ; return ; } String processIdentity = String . valueOf ( destinationFileVersion . getFileVersionId ( ) ) ; futures . put ( processIdentity , future ) ; RenderedImage convertedRenderedImage = future . get ( ) ; if ( convertedRenderedImage != null ) { renderedImage = convertedRenderedImage ; } } if ( ! _hasPreview ( destinationFileVersion ) ) { _storePreviewImage ( destinationFileVersion , renderedImage ) ; } if ( ! hasThumbnails ( destinationFileVersion ) ) { storeThumbnailImages ( destinationFileVersion , renderedImage ) ; } _fileVersionPreviewEventListener . onSuccess ( destinationFileVersion ) ; } } catch ( NoSuchFileEntryException noSuchFileEntryException ) { if ( _log . isDebugEnabled ( ) ) { } _fileVersionPreviewEventListener . onFailure ( destinationFileVersion ) ; } finally { _fileVersionIds . remove ( destinationFileVersion . getFileVersionId ( ) ) ; } } +","private void _generateImages ( FileVersion sourceFileVersion , FileVersion destinationFileVersion ) throws Exception { try { if ( sourceFileVersion != null ) { copy ( sourceFileVersion , destinationFileVersion ) ; return ; } if ( ! PropsValues . DL_FILE_ENTRY_THUMBNAIL_ENABLED && ! PropsValues . DL_FILE_ENTRY_PREVIEW_ENABLED ) { return ; } try ( InputStream inputStream = destinationFileVersion . getContentStream ( false ) ) { byte [ ] bytes = FileUtil . getBytes ( inputStream ) ; ImageBag imageBag = ImageToolUtil . read ( bytes ) ; RenderedImage renderedImage = imageBag . getRenderedImage ( ) ; if ( renderedImage == null ) { _fileVersionPreviewEventListener . onFailure ( destinationFileVersion ) ; return ; } ColorModel colorModel = renderedImage . getColorModel ( ) ; if ( colorModel . getNumColorComponents ( ) == 4 ) { Future < RenderedImage > future = ImageToolUtil . convertCMYKtoRGB ( bytes , imageBag . getType ( ) ) ; if ( future == null ) { _fileVersionPreviewEventListener . onFailure ( destinationFileVersion ) ; return ; } String processIdentity = String . valueOf ( destinationFileVersion . getFileVersionId ( ) ) ; futures . put ( processIdentity , future ) ; RenderedImage convertedRenderedImage = future . get ( ) ; if ( convertedRenderedImage != null ) { renderedImage = convertedRenderedImage ; } } if ( ! _hasPreview ( destinationFileVersion ) ) { _storePreviewImage ( destinationFileVersion , renderedImage ) ; } if ( ! hasThumbnails ( destinationFileVersion ) ) { storeThumbnailImages ( destinationFileVersion , renderedImage ) ; } _fileVersionPreviewEventListener . onSuccess ( destinationFileVersion ) ; } } catch ( NoSuchFileEntryException noSuchFileEntryException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( noSuchFileEntryException , noSuchFileEntryException ) ; } _fileVersionPreviewEventListener . onFailure ( destinationFileVersion ) ; } finally { _fileVersionIds . remove ( destinationFileVersion . getFileVersionId ( ) ) ; } } +" +174,"public void createHttpPrincipal ( LocalKadmin kadmin ) throws HasException { String httpPrincipal = getHttpPrincipal ( ) ; IdentityBackend backend = kdcServer . getIdentityService ( ) ; try { if ( backend . getIdentity ( httpPrincipal ) == null ) { kadmin . addPrincipal ( httpPrincipal ) ; } else { } } catch ( KrbException e ) { throw new HasException ( ""Failed to add principal, "" + e . getMessage ( ) ) ; } } +","public void createHttpPrincipal ( LocalKadmin kadmin ) throws HasException { String httpPrincipal = getHttpPrincipal ( ) ; IdentityBackend backend = kdcServer . getIdentityService ( ) ; try { if ( backend . getIdentity ( httpPrincipal ) == null ) { kadmin . addPrincipal ( httpPrincipal ) ; } else { LOG . info ( ""The http principal already exists in backend."" ) ; } } catch ( KrbException e ) { throw new HasException ( ""Failed to add principal, "" + e . getMessage ( ) ) ; } } +" +175,"public List < Token > tokenize ( final String pattern ) { if ( pattern == null ) { return Arrays . asList ( NullToken . INSTANCE ) ; } if ( """" . equals ( pattern ) ) { return Arrays . asList ( BlankToken . INSTANCE ) ; } final List < Token > tokens ; if ( _predefinedTokens ) { final List < PredefinedTokenDefinition > predefinedTokens = _configuration . getPredefinedTokens ( ) ; final PredefinedTokenTokenizer tokenizer = new PredefinedTokenTokenizer ( predefinedTokens ) ; tokens = tokenizer . tokenize ( pattern ) ; for ( final ListIterator < Token > it = tokens . listIterator ( ) ; it . hasNext ( ) ; ) { final Token token = it . next ( ) ; final TokenType tokenType = token . getType ( ) ; if ( tokenType == TokenType . UNDEFINED ) { final List < SimpleToken > replacementTokens = tokenizeInternal ( token . getString ( ) ) ; boolean replace = true ; if ( replacementTokens . size ( ) == 1 ) { if ( token . equals ( replacementTokens . get ( 0 ) ) ) { replace = false ; } } if ( replace ) { it . remove ( ) ; for ( final SimpleToken replacementToken : replacementTokens ) { it . add ( replacementToken ) ; } } } } } else { tokens = new ArrayList < > ( ) ; tokens . addAll ( tokenizeInternal ( pattern ) ) ; } return tokens ; } +","public List < Token > tokenize ( final String pattern ) { if ( pattern == null ) { return Arrays . asList ( NullToken . INSTANCE ) ; } if ( """" . equals ( pattern ) ) { return Arrays . asList ( BlankToken . INSTANCE ) ; } final List < Token > tokens ; if ( _predefinedTokens ) { final List < PredefinedTokenDefinition > predefinedTokens = _configuration . getPredefinedTokens ( ) ; final PredefinedTokenTokenizer tokenizer = new PredefinedTokenTokenizer ( predefinedTokens ) ; tokens = tokenizer . tokenize ( pattern ) ; for ( final ListIterator < Token > it = tokens . listIterator ( ) ; it . hasNext ( ) ; ) { final Token token = it . next ( ) ; final TokenType tokenType = token . getType ( ) ; logger . debug ( ""Next token type is: {}"" , tokenType ) ; if ( tokenType == TokenType . UNDEFINED ) { final List < SimpleToken > replacementTokens = tokenizeInternal ( token . getString ( ) ) ; boolean replace = true ; if ( replacementTokens . size ( ) == 1 ) { if ( token . equals ( replacementTokens . get ( 0 ) ) ) { replace = false ; } } if ( replace ) { it . remove ( ) ; for ( final SimpleToken replacementToken : replacementTokens ) { it . add ( replacementToken ) ; } } } } } else { tokens = new ArrayList < > ( ) ; tokens . addAll ( tokenizeInternal ( pattern ) ) ; } return tokens ; } +" +176,"public void execute ( JobExecutionContext context ) throws JobExecutionException { final JobDetail detail = context . getJobDetail ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { } final JobDataMap jdm = detail . getJobDataMap ( ) ; if ( jdm . get ( FileSystemMonitorSPI . TYPE_KEY ) == FileSystemEventType . POLLING_EVENT ) { final GBEventNotifier notifier = ( GBEventNotifier ) jdm . get ( EVENT_NOTIFIER_KEY ) ; notifier . notifyEvent ( new File ( Long . toString ( System . currentTimeMillis ( ) ) ) , FileSystemEventType . POLLING_EVENT ) ; return ; } FileAlterationObserver observer = null ; if ( ( observer = getObserver ( jdm ) ) == null ) { try { long wait = WAITING_LOCK_TIME_DEFAULT ; try { wait = jdm . getLong ( WAITING_LOCK_TIME_KEY ) ; } catch ( ClassCastException cce ) { } lock . tryLock ( wait , TimeUnit . MILLISECONDS ) ; if ( ( observer = getObserver ( jdm ) ) == null ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Building the observer tree..."" ) ; observer = buildObserver ( jdm ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Observer tree complete."" ) ; } } catch ( InterruptedException ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } catch ( Exception ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } finally { lock . unlock ( ) ; } } observer . checkAndNotify ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""job named: "" + detail . getKey ( ) + "" completed"" ) ; } } +","public void execute ( JobExecutionContext context ) throws JobExecutionException { final JobDetail detail = context . getJobDetail ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""Starting FSM job named: "" + detail . getKey ( ) ) ; } final JobDataMap jdm = detail . getJobDataMap ( ) ; if ( jdm . get ( FileSystemMonitorSPI . TYPE_KEY ) == FileSystemEventType . POLLING_EVENT ) { final GBEventNotifier notifier = ( GBEventNotifier ) jdm . get ( EVENT_NOTIFIER_KEY ) ; notifier . notifyEvent ( new File ( Long . toString ( System . currentTimeMillis ( ) ) ) , FileSystemEventType . POLLING_EVENT ) ; return ; } FileAlterationObserver observer = null ; if ( ( observer = getObserver ( jdm ) ) == null ) { try { long wait = WAITING_LOCK_TIME_DEFAULT ; try { wait = jdm . getLong ( WAITING_LOCK_TIME_KEY ) ; } catch ( ClassCastException cce ) { } lock . tryLock ( wait , TimeUnit . MILLISECONDS ) ; if ( ( observer = getObserver ( jdm ) ) == null ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Building the observer tree..."" ) ; observer = buildObserver ( jdm ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Observer tree complete."" ) ; } } catch ( InterruptedException ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } catch ( Exception ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } finally { lock . unlock ( ) ; } } observer . checkAndNotify ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""job named: "" + detail . getKey ( ) + "" completed"" ) ; } } +" +177,"public void execute ( JobExecutionContext context ) throws JobExecutionException { final JobDetail detail = context . getJobDetail ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""Starting FSM job named: "" + detail . getKey ( ) ) ; } final JobDataMap jdm = detail . getJobDataMap ( ) ; if ( jdm . get ( FileSystemMonitorSPI . TYPE_KEY ) == FileSystemEventType . POLLING_EVENT ) { final GBEventNotifier notifier = ( GBEventNotifier ) jdm . get ( EVENT_NOTIFIER_KEY ) ; notifier . notifyEvent ( new File ( Long . toString ( System . currentTimeMillis ( ) ) ) , FileSystemEventType . POLLING_EVENT ) ; return ; } FileAlterationObserver observer = null ; if ( ( observer = getObserver ( jdm ) ) == null ) { try { long wait = WAITING_LOCK_TIME_DEFAULT ; try { wait = jdm . getLong ( WAITING_LOCK_TIME_KEY ) ; } catch ( ClassCastException cce ) { } lock . tryLock ( wait , TimeUnit . MILLISECONDS ) ; if ( ( observer = getObserver ( jdm ) ) == null ) { if ( LOGGER . isDebugEnabled ( ) ) observer = buildObserver ( jdm ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Observer tree complete."" ) ; } } catch ( InterruptedException ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } catch ( Exception ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } finally { lock . unlock ( ) ; } } observer . checkAndNotify ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""job named: "" + detail . getKey ( ) + "" completed"" ) ; } } +","public void execute ( JobExecutionContext context ) throws JobExecutionException { final JobDetail detail = context . getJobDetail ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""Starting FSM job named: "" + detail . getKey ( ) ) ; } final JobDataMap jdm = detail . getJobDataMap ( ) ; if ( jdm . get ( FileSystemMonitorSPI . TYPE_KEY ) == FileSystemEventType . POLLING_EVENT ) { final GBEventNotifier notifier = ( GBEventNotifier ) jdm . get ( EVENT_NOTIFIER_KEY ) ; notifier . notifyEvent ( new File ( Long . toString ( System . currentTimeMillis ( ) ) ) , FileSystemEventType . POLLING_EVENT ) ; return ; } FileAlterationObserver observer = null ; if ( ( observer = getObserver ( jdm ) ) == null ) { try { long wait = WAITING_LOCK_TIME_DEFAULT ; try { wait = jdm . getLong ( WAITING_LOCK_TIME_KEY ) ; } catch ( ClassCastException cce ) { } lock . tryLock ( wait , TimeUnit . MILLISECONDS ) ; if ( ( observer = getObserver ( jdm ) ) == null ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Building the observer tree..."" ) ; observer = buildObserver ( jdm ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Observer tree complete."" ) ; } } catch ( InterruptedException ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } catch ( Exception ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } finally { lock . unlock ( ) ; } } observer . checkAndNotify ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""job named: "" + detail . getKey ( ) + "" completed"" ) ; } } +" +178,"public void execute ( JobExecutionContext context ) throws JobExecutionException { final JobDetail detail = context . getJobDetail ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""Starting FSM job named: "" + detail . getKey ( ) ) ; } final JobDataMap jdm = detail . getJobDataMap ( ) ; if ( jdm . get ( FileSystemMonitorSPI . TYPE_KEY ) == FileSystemEventType . POLLING_EVENT ) { final GBEventNotifier notifier = ( GBEventNotifier ) jdm . get ( EVENT_NOTIFIER_KEY ) ; notifier . notifyEvent ( new File ( Long . toString ( System . currentTimeMillis ( ) ) ) , FileSystemEventType . POLLING_EVENT ) ; return ; } FileAlterationObserver observer = null ; if ( ( observer = getObserver ( jdm ) ) == null ) { try { long wait = WAITING_LOCK_TIME_DEFAULT ; try { wait = jdm . getLong ( WAITING_LOCK_TIME_KEY ) ; } catch ( ClassCastException cce ) { } lock . tryLock ( wait , TimeUnit . MILLISECONDS ) ; if ( ( observer = getObserver ( jdm ) ) == null ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Building the observer tree..."" ) ; observer = buildObserver ( jdm ) ; if ( LOGGER . isDebugEnabled ( ) ) } } catch ( InterruptedException ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } catch ( Exception ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } finally { lock . unlock ( ) ; } } observer . checkAndNotify ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""job named: "" + detail . getKey ( ) + "" completed"" ) ; } } +","public void execute ( JobExecutionContext context ) throws JobExecutionException { final JobDetail detail = context . getJobDetail ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""Starting FSM job named: "" + detail . getKey ( ) ) ; } final JobDataMap jdm = detail . getJobDataMap ( ) ; if ( jdm . get ( FileSystemMonitorSPI . TYPE_KEY ) == FileSystemEventType . POLLING_EVENT ) { final GBEventNotifier notifier = ( GBEventNotifier ) jdm . get ( EVENT_NOTIFIER_KEY ) ; notifier . notifyEvent ( new File ( Long . toString ( System . currentTimeMillis ( ) ) ) , FileSystemEventType . POLLING_EVENT ) ; return ; } FileAlterationObserver observer = null ; if ( ( observer = getObserver ( jdm ) ) == null ) { try { long wait = WAITING_LOCK_TIME_DEFAULT ; try { wait = jdm . getLong ( WAITING_LOCK_TIME_KEY ) ; } catch ( ClassCastException cce ) { } lock . tryLock ( wait , TimeUnit . MILLISECONDS ) ; if ( ( observer = getObserver ( jdm ) ) == null ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Building the observer tree..."" ) ; observer = buildObserver ( jdm ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Observer tree complete."" ) ; } } catch ( InterruptedException ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } catch ( Exception ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } finally { lock . unlock ( ) ; } } observer . checkAndNotify ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""job named: "" + detail . getKey ( ) + "" completed"" ) ; } } +" +179,"public void execute ( JobExecutionContext context ) throws JobExecutionException { final JobDetail detail = context . getJobDetail ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""Starting FSM job named: "" + detail . getKey ( ) ) ; } final JobDataMap jdm = detail . getJobDataMap ( ) ; if ( jdm . get ( FileSystemMonitorSPI . TYPE_KEY ) == FileSystemEventType . POLLING_EVENT ) { final GBEventNotifier notifier = ( GBEventNotifier ) jdm . get ( EVENT_NOTIFIER_KEY ) ; notifier . notifyEvent ( new File ( Long . toString ( System . currentTimeMillis ( ) ) ) , FileSystemEventType . POLLING_EVENT ) ; return ; } FileAlterationObserver observer = null ; if ( ( observer = getObserver ( jdm ) ) == null ) { try { long wait = WAITING_LOCK_TIME_DEFAULT ; try { wait = jdm . getLong ( WAITING_LOCK_TIME_KEY ) ; } catch ( ClassCastException cce ) { } lock . tryLock ( wait , TimeUnit . MILLISECONDS ) ; if ( ( observer = getObserver ( jdm ) ) == null ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Building the observer tree..."" ) ; observer = buildObserver ( jdm ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Observer tree complete."" ) ; } } catch ( InterruptedException ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } catch ( Exception ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } finally { lock . unlock ( ) ; } } observer . checkAndNotify ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { } } +","public void execute ( JobExecutionContext context ) throws JobExecutionException { final JobDetail detail = context . getJobDetail ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""Starting FSM job named: "" + detail . getKey ( ) ) ; } final JobDataMap jdm = detail . getJobDataMap ( ) ; if ( jdm . get ( FileSystemMonitorSPI . TYPE_KEY ) == FileSystemEventType . POLLING_EVENT ) { final GBEventNotifier notifier = ( GBEventNotifier ) jdm . get ( EVENT_NOTIFIER_KEY ) ; notifier . notifyEvent ( new File ( Long . toString ( System . currentTimeMillis ( ) ) ) , FileSystemEventType . POLLING_EVENT ) ; return ; } FileAlterationObserver observer = null ; if ( ( observer = getObserver ( jdm ) ) == null ) { try { long wait = WAITING_LOCK_TIME_DEFAULT ; try { wait = jdm . getLong ( WAITING_LOCK_TIME_KEY ) ; } catch ( ClassCastException cce ) { } lock . tryLock ( wait , TimeUnit . MILLISECONDS ) ; if ( ( observer = getObserver ( jdm ) ) == null ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Building the observer tree..."" ) ; observer = buildObserver ( jdm ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""Observer tree complete."" ) ; } } catch ( InterruptedException ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } catch ( Exception ex ) { LOGGER . error ( ""GBFileSystemMonitorJob interrupted during setup"" , ex ) ; } finally { lock . unlock ( ) ; } } observer . checkAndNotify ( ) ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""job named: "" + detail . getKey ( ) + "" completed"" ) ; } } +" +180,"public void updateNonExistent ( String testName ) throws Exception { setupUpdateNonExistent ( ) ; AcquisitionClient client = new AcquisitionClient ( ) ; PoxPayloadOut multipart = createAcquisitionInstance ( NON_EXISTENT_ID ) ; Response res = client . update ( NON_EXISTENT_ID , multipart ) ; try { int statusCode = res . getStatus ( ) ; if ( logger . isDebugEnabled ( ) ) { } Assert . assertTrue ( testRequestType . isValidStatusCode ( statusCode ) , invalidStatusCodeMessage ( testRequestType , statusCode ) ) ; Assert . assertEquals ( statusCode , testExpectedStatusCode ) ; } finally { res . close ( ) ; } } +","public void updateNonExistent ( String testName ) throws Exception { setupUpdateNonExistent ( ) ; AcquisitionClient client = new AcquisitionClient ( ) ; PoxPayloadOut multipart = createAcquisitionInstance ( NON_EXISTENT_ID ) ; Response res = client . update ( NON_EXISTENT_ID , multipart ) ; try { int statusCode = res . getStatus ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( testName + "": status = "" + statusCode ) ; } Assert . assertTrue ( testRequestType . isValidStatusCode ( statusCode ) , invalidStatusCodeMessage ( testRequestType , statusCode ) ) ; Assert . assertEquals ( statusCode , testExpectedStatusCode ) ; } finally { res . close ( ) ; } } +" +181,"private void reuse ( OutOfOffHeapMemoryListener oooml , OffHeapMemoryStats newStats , long offHeapMemorySize , Slab [ ] slabs ) { if ( isClosed ( ) ) { throw new IllegalStateException ( ""Can not reuse a closed off-heap memory manager."" ) ; } if ( oooml == null ) { throw new IllegalArgumentException ( ""OutOfOffHeapMemoryListener is null"" ) ; } if ( getTotalMemory ( ) != offHeapMemorySize ) { } if ( ! this . freeList . okToReuse ( slabs ) ) { throw new IllegalStateException ( ""attempted to reuse existing off-heap memory even though new off-heap memory was allocated"" ) ; } this . ooohml = oooml ; newStats . initialize ( this . stats ) ; this . stats = newStats ; } +","private void reuse ( OutOfOffHeapMemoryListener oooml , OffHeapMemoryStats newStats , long offHeapMemorySize , Slab [ ] slabs ) { if ( isClosed ( ) ) { throw new IllegalStateException ( ""Can not reuse a closed off-heap memory manager."" ) ; } if ( oooml == null ) { throw new IllegalArgumentException ( ""OutOfOffHeapMemoryListener is null"" ) ; } if ( getTotalMemory ( ) != offHeapMemorySize ) { logger . warn ( ""Using {} bytes of existing off-heap memory instead of the requested {}."" , getTotalMemory ( ) , offHeapMemorySize ) ; } if ( ! this . freeList . okToReuse ( slabs ) ) { throw new IllegalStateException ( ""attempted to reuse existing off-heap memory even though new off-heap memory was allocated"" ) ; } this . ooohml = oooml ; newStats . initialize ( this . stats ) ; this . stats = newStats ; } +" +182,"public IIdea getIdea ( String code ) { IIdea idea = null ; try { idea = this . getIdeaManager ( ) . getIdea ( code ) ; if ( null != idea && idea . getStatus ( ) != IIdea . STATUS_APPROVED ) { return null ; } } catch ( Throwable t ) { throw new RuntimeException ( ""Errore in caricamento idea "" + code ) ; } return idea ; } +","public IIdea getIdea ( String code ) { IIdea idea = null ; try { idea = this . getIdeaManager ( ) . getIdea ( code ) ; if ( null != idea && idea . getStatus ( ) != IIdea . STATUS_APPROVED ) { return null ; } } catch ( Throwable t ) { _logger . error ( ""error in getIdea"" , t ) ; throw new RuntimeException ( ""Errore in caricamento idea "" + code ) ; } return idea ; } +" +183,"public Future < SummaryCollection > gather ( ExecutorService es ) { int numFiles = countFiles ( ) ; if ( numFiles == 0 ) { return CompletableFuture . completedFuture ( new SummaryCollection ( ) ) ; } int numRequest = Math . max ( numFiles / 100_000 , 1 ) ; List < CompletableFuture < SummaryCollection > > futures = new ArrayList < > ( ) ; AtomicBoolean cancelFlag = new AtomicBoolean ( false ) ; TInfo tinfo = TraceUtil . traceInfo ( ) ; for ( int i = 0 ; i < numRequest ; i ++ ) { futures . add ( CompletableFuture . supplyAsync ( new GatherRequest ( tinfo , i , numRequest , cancelFlag ) , es ) ) ; } Future < SummaryCollection > future = CompletableFutureUtil . merge ( futures , ( sc1 , sc2 ) -> SummaryCollection . merge ( sc1 , sc2 , factory ) , SummaryCollection :: new ) ; return new CancelFlagFuture < > ( future , cancelFlag ) ; } +","public Future < SummaryCollection > gather ( ExecutorService es ) { int numFiles = countFiles ( ) ; log . debug ( ""Gathering summaries from {} files"" , numFiles ) ; if ( numFiles == 0 ) { return CompletableFuture . completedFuture ( new SummaryCollection ( ) ) ; } int numRequest = Math . max ( numFiles / 100_000 , 1 ) ; List < CompletableFuture < SummaryCollection > > futures = new ArrayList < > ( ) ; AtomicBoolean cancelFlag = new AtomicBoolean ( false ) ; TInfo tinfo = TraceUtil . traceInfo ( ) ; for ( int i = 0 ; i < numRequest ; i ++ ) { futures . add ( CompletableFuture . supplyAsync ( new GatherRequest ( tinfo , i , numRequest , cancelFlag ) , es ) ) ; } Future < SummaryCollection > future = CompletableFutureUtil . merge ( futures , ( sc1 , sc2 ) -> SummaryCollection . merge ( sc1 , sc2 , factory ) , SummaryCollection :: new ) ; return new CancelFlagFuture < > ( future , cancelFlag ) ; } +" +184,"public Promise < MethodWithMultipleByteBufferParametersDeferred > methodWithMultipleByteBufferParameters ( Byte [ ] byteBufferArg1 , Byte [ ] byteBufferArg2 ) { logger . info ( ""***********************************************************"" ) ; logger . info ( ""***********************************************************"" ) ; MethodWithMultipleByteBufferParametersDeferred deferred = new MethodWithMultipleByteBufferParametersDeferred ( ) ; Byte [ ] result = IltUtil . concatenateByteArrays ( byteBufferArg1 , byteBufferArg2 ) ; deferred . resolve ( result ) ; return new Promise < MethodWithMultipleByteBufferParametersDeferred > ( deferred ) ; } +","public Promise < MethodWithMultipleByteBufferParametersDeferred > methodWithMultipleByteBufferParameters ( Byte [ ] byteBufferArg1 , Byte [ ] byteBufferArg2 ) { logger . info ( ""***********************************************************"" ) ; logger . info ( ""* IltProvider.methodWithMultipleByteBufferParameters called"" ) ; logger . info ( ""***********************************************************"" ) ; MethodWithMultipleByteBufferParametersDeferred deferred = new MethodWithMultipleByteBufferParametersDeferred ( ) ; Byte [ ] result = IltUtil . concatenateByteArrays ( byteBufferArg1 , byteBufferArg2 ) ; deferred . resolve ( result ) ; return new Promise < MethodWithMultipleByteBufferParametersDeferred > ( deferred ) ; } +" +185,"@ Merged @ ViewChanged public CompletionStage < Void > viewChanged ( ViewChangedEvent event ) { if ( this . cache . getAdvancedCache ( ) . getDistributionManager ( ) != null ) { this . views . put ( event . getViewId ( ) , event . isMergeView ( ) ) ; } else { Membership previousMembership = new CacheMembership ( event . getLocalAddress ( ) , event . getOldMembers ( ) , this ) ; Membership membership = new CacheMembership ( event . getLocalAddress ( ) , event . getNewMembers ( ) , this ) ; for ( Map . Entry < GroupListener , ExecutorService > entry : this . listeners . entrySet ( ) ) { GroupListener listener = entry . getKey ( ) ; ExecutorService executor = entry . getValue ( ) ; Runnable listenerTask = new Runnable ( ) { @ Override public void run ( ) { try { listener . membershipChanged ( previousMembership , membership , event . isMergeView ( ) ) ; } catch ( Throwable e ) { } } } ; try { executor . submit ( listenerTask ) ; } catch ( RejectedExecutionException e ) { } } } return CompletableFutures . completedNull ( ) ; } +","@ Merged @ ViewChanged public CompletionStage < Void > viewChanged ( ViewChangedEvent event ) { if ( this . cache . getAdvancedCache ( ) . getDistributionManager ( ) != null ) { this . views . put ( event . getViewId ( ) , event . isMergeView ( ) ) ; } else { Membership previousMembership = new CacheMembership ( event . getLocalAddress ( ) , event . getOldMembers ( ) , this ) ; Membership membership = new CacheMembership ( event . getLocalAddress ( ) , event . getNewMembers ( ) , this ) ; for ( Map . Entry < GroupListener , ExecutorService > entry : this . listeners . entrySet ( ) ) { GroupListener listener = entry . getKey ( ) ; ExecutorService executor = entry . getValue ( ) ; Runnable listenerTask = new Runnable ( ) { @ Override public void run ( ) { try { listener . membershipChanged ( previousMembership , membership , event . isMergeView ( ) ) ; } catch ( Throwable e ) { ClusteringServerLogger . ROOT_LOGGER . warn ( e . getLocalizedMessage ( ) , e ) ; } } } ; try { executor . submit ( listenerTask ) ; } catch ( RejectedExecutionException e ) { } } } return CompletableFutures . completedNull ( ) ; } +" +186,"@ Test public void testDeleteAllOlderThan ( ) { try { long start = System . currentTimeMillis ( ) ; doDeleteAllOlderThan ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan finished, time taken = ["" + ( System . currentTimeMillis ( ) - start ) + ""]ms"" ) ; } catch ( Exception e ) { LOG . error ( ""error:"" , e ) ; fail ( e . getMessage ( ) ) ; } } +","@ Test public void testDeleteAllOlderThan ( ) { try { long start = System . currentTimeMillis ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan, testDir="" + dataStoreDir ) ; doDeleteAllOlderThan ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan finished, time taken = ["" + ( System . currentTimeMillis ( ) - start ) + ""]ms"" ) ; } catch ( Exception e ) { LOG . error ( ""error:"" , e ) ; fail ( e . getMessage ( ) ) ; } } +" +187,"@ Test public void testDeleteAllOlderThan ( ) { try { long start = System . currentTimeMillis ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan, testDir="" + dataStoreDir ) ; doDeleteAllOlderThan ( ) ; } catch ( Exception e ) { LOG . error ( ""error:"" , e ) ; fail ( e . getMessage ( ) ) ; } } +","@ Test public void testDeleteAllOlderThan ( ) { try { long start = System . currentTimeMillis ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan, testDir="" + dataStoreDir ) ; doDeleteAllOlderThan ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan finished, time taken = ["" + ( System . currentTimeMillis ( ) - start ) + ""]ms"" ) ; } catch ( Exception e ) { LOG . error ( ""error:"" , e ) ; fail ( e . getMessage ( ) ) ; } } +" +188,"@ Test public void testDeleteAllOlderThan ( ) { try { long start = System . currentTimeMillis ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan, testDir="" + dataStoreDir ) ; doDeleteAllOlderThan ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan finished, time taken = ["" + ( System . currentTimeMillis ( ) - start ) + ""]ms"" ) ; } catch ( Exception e ) { fail ( e . getMessage ( ) ) ; } } +","@ Test public void testDeleteAllOlderThan ( ) { try { long start = System . currentTimeMillis ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan, testDir="" + dataStoreDir ) ; doDeleteAllOlderThan ( ) ; LOG . info ( ""Testcase: "" + this . getClass ( ) . getName ( ) + ""#testDeleteAllOlderThan finished, time taken = ["" + ( System . currentTimeMillis ( ) - start ) + ""]ms"" ) ; } catch ( Exception e ) { LOG . error ( ""error:"" , e ) ; fail ( e . getMessage ( ) ) ; } } +" +189,"public LocalParameter createKineticParameter ( String id ) { Reaction lastReaction = getLastElementOf ( listOfReactions ) ; KineticLaw lastKineticLaw = null ; if ( lastReaction == null ) { return null ; } else { lastKineticLaw = lastReaction . getKineticLaw ( ) ; if ( lastKineticLaw == null ) { return null ; } } LocalParameter parameter = new LocalParameter ( id , getLevel ( ) , getVersion ( ) ) ; lastKineticLaw . addLocalParameter ( parameter ) ; return parameter ; } +","public LocalParameter createKineticParameter ( String id ) { Reaction lastReaction = getLastElementOf ( listOfReactions ) ; KineticLaw lastKineticLaw = null ; if ( lastReaction == null ) { logger . warn ( MessageFormat . format ( COULD_NOT_CREATE_ELEMENT_MSG , ""LocalParameter for KineticLaw"" , ""reactions"" ) ) ; return null ; } else { lastKineticLaw = lastReaction . getKineticLaw ( ) ; if ( lastKineticLaw == null ) { return null ; } } LocalParameter parameter = new LocalParameter ( id , getLevel ( ) , getVersion ( ) ) ; lastKineticLaw . addLocalParameter ( parameter ) ; return parameter ; } +" +190,"public void messageReceived ( ChannelHandlerContext ctx , MobileStateHolder state , StringMessage message ) { var splitBody = split2 ( message . body ) ; var user = state . user ; var energyAmountToAdd = Integer . parseInt ( splitBody [ 0 ] ) ; ResponseMessage response ; if ( splitBody . length == 2 && isValidTransactionId ( splitBody [ 1 ] ) ) { double price = calcPrice ( energyAmountToAdd ) ; insertPurchase ( user . email , energyAmountToAdd , price , splitBody [ 1 ] ) ; user . addEnergy ( energyAmountToAdd ) ; response = ok ( message . id ) ; } else { if ( ! wasErrorPrinted ) { wasErrorPrinted = true ; } response = notAllowed ( message . id ) ; } ctx . writeAndFlush ( response , ctx . voidPromise ( ) ) ; } +","public void messageReceived ( ChannelHandlerContext ctx , MobileStateHolder state , StringMessage message ) { var splitBody = split2 ( message . body ) ; var user = state . user ; var energyAmountToAdd = Integer . parseInt ( splitBody [ 0 ] ) ; ResponseMessage response ; if ( splitBody . length == 2 && isValidTransactionId ( splitBody [ 1 ] ) ) { double price = calcPrice ( energyAmountToAdd ) ; insertPurchase ( user . email , energyAmountToAdd , price , splitBody [ 1 ] ) ; user . addEnergy ( energyAmountToAdd ) ; response = ok ( message . id ) ; } else { if ( ! wasErrorPrinted ) { log . warn ( ""Purchase {} with invalid transaction id '{}'. {} ({})."" , splitBody [ 0 ] , splitBody [ 1 ] , user . email , state . version ) ; wasErrorPrinted = true ; } response = notAllowed ( message . id ) ; } ctx . writeAndFlush ( response , ctx . voidPromise ( ) ) ; } +" +191,"public void startElement ( String uri , String localName , String qName , Attributes attributes ) { String name = localName ; if ( uri != null && ! """" . equals ( uri ) ) name = uri + "":"" + name ; events . start ( name ) ; for ( int i = 0 ; i < attributes . getLength ( ) ; i ++ ) { String k = attributes . getLocalName ( i ) ; String auri = attributes . getURI ( i ) ; if ( auri != null && ( XMLNS . equals ( auri ) || XML . equals ( auri ) ) ) continue ; if ( auri != null && ! """" . equals ( auri ) ) k = auri + "":"" + k ; events . start ( ""@"" + k ) ; events . text ( attributes . getValue ( i ) ) ; events . end ( ) ; } } +","public void startElement ( String uri , String localName , String qName , Attributes attributes ) { log . trace ( String . format ( ""Start of element <%s>"" , localName ) ) ; String name = localName ; if ( uri != null && ! """" . equals ( uri ) ) name = uri + "":"" + name ; events . start ( name ) ; for ( int i = 0 ; i < attributes . getLength ( ) ; i ++ ) { String k = attributes . getLocalName ( i ) ; String auri = attributes . getURI ( i ) ; if ( auri != null && ( XMLNS . equals ( auri ) || XML . equals ( auri ) ) ) continue ; if ( auri != null && ! """" . equals ( auri ) ) k = auri + "":"" + k ; events . start ( ""@"" + k ) ; events . text ( attributes . getValue ( i ) ) ; events . end ( ) ; } } +" +192,"protected List < String > resolveTableSchema ( @ Nonnull final String schema , @ Nonnull final String table ) { List < String > columnSet = new Vector < > ( ) ; try ( final Statement st = conn . createStatement ( ) ) { st . execute ( ""use default"" ) ; String ddl = ""desc "" + HiveUtils . quoteIdentifier ( schema , table ) ; ResultSet rs = doSelectSQL ( st , ddl ) ; while ( rs . next ( ) ) { if ( StringUtils . isEmpty ( rs . getString ( 1 ) ) ) { break ; } columnSet . add ( rs . getString ( 1 ) ) ; } } catch ( SQLException e ) { throw new RuntimeException ( ""Failed to inspect schema"" , e ) ; } return columnSet ; } +","protected List < String > resolveTableSchema ( @ Nonnull final String schema , @ Nonnull final String table ) { List < String > columnSet = new Vector < > ( ) ; try ( final Statement st = conn . createStatement ( ) ) { st . execute ( ""use default"" ) ; String ddl = ""desc "" + HiveUtils . quoteIdentifier ( schema , table ) ; logger . info ( ""Resolving table schema [{}]"" , ddl ) ; ResultSet rs = doSelectSQL ( st , ddl ) ; while ( rs . next ( ) ) { if ( StringUtils . isEmpty ( rs . getString ( 1 ) ) ) { break ; } columnSet . add ( rs . getString ( 1 ) ) ; } } catch ( SQLException e ) { throw new RuntimeException ( ""Failed to inspect schema"" , e ) ; } return columnSet ; } +" +193,"public void run ( ) { ConsumerIterator < byte [ ] , byte [ ] > it = m_stream . iterator ( ) ; try { while ( it . hasNext ( ) ) { String message = new String ( it . next ( ) . message ( ) ) ; logger . warn ( ""adding message: "" + message ) ; messages . add ( message ) ; } } catch ( Exception e ) { logger . error ( ""Exception waiting on Kafka..."" , e ) ; } latch . countDown ( ) ; logger . warn ( ""Shutting down Thread: "" + m_threadNumber ) ; } +","public void run ( ) { logger . warn ( ""calling PollingKafkaConsumer.run()"" ) ; ConsumerIterator < byte [ ] , byte [ ] > it = m_stream . iterator ( ) ; try { while ( it . hasNext ( ) ) { String message = new String ( it . next ( ) . message ( ) ) ; logger . warn ( ""adding message: "" + message ) ; messages . add ( message ) ; } } catch ( Exception e ) { logger . error ( ""Exception waiting on Kafka..."" , e ) ; } latch . countDown ( ) ; logger . warn ( ""Shutting down Thread: "" + m_threadNumber ) ; } +" +194,"public void run ( ) { logger . warn ( ""calling PollingKafkaConsumer.run()"" ) ; ConsumerIterator < byte [ ] , byte [ ] > it = m_stream . iterator ( ) ; try { while ( it . hasNext ( ) ) { String message = new String ( it . next ( ) . message ( ) ) ; messages . add ( message ) ; } } catch ( Exception e ) { logger . error ( ""Exception waiting on Kafka..."" , e ) ; } latch . countDown ( ) ; logger . warn ( ""Shutting down Thread: "" + m_threadNumber ) ; } +","public void run ( ) { logger . warn ( ""calling PollingKafkaConsumer.run()"" ) ; ConsumerIterator < byte [ ] , byte [ ] > it = m_stream . iterator ( ) ; try { while ( it . hasNext ( ) ) { String message = new String ( it . next ( ) . message ( ) ) ; logger . warn ( ""adding message: "" + message ) ; messages . add ( message ) ; } } catch ( Exception e ) { logger . error ( ""Exception waiting on Kafka..."" , e ) ; } latch . countDown ( ) ; logger . warn ( ""Shutting down Thread: "" + m_threadNumber ) ; } +" +195,"public void run ( ) { logger . warn ( ""calling PollingKafkaConsumer.run()"" ) ; ConsumerIterator < byte [ ] , byte [ ] > it = m_stream . iterator ( ) ; try { while ( it . hasNext ( ) ) { String message = new String ( it . next ( ) . message ( ) ) ; logger . warn ( ""adding message: "" + message ) ; messages . add ( message ) ; } } catch ( Exception e ) { } latch . countDown ( ) ; logger . warn ( ""Shutting down Thread: "" + m_threadNumber ) ; } +","public void run ( ) { logger . warn ( ""calling PollingKafkaConsumer.run()"" ) ; ConsumerIterator < byte [ ] , byte [ ] > it = m_stream . iterator ( ) ; try { while ( it . hasNext ( ) ) { String message = new String ( it . next ( ) . message ( ) ) ; logger . warn ( ""adding message: "" + message ) ; messages . add ( message ) ; } } catch ( Exception e ) { logger . error ( ""Exception waiting on Kafka..."" , e ) ; } latch . countDown ( ) ; logger . warn ( ""Shutting down Thread: "" + m_threadNumber ) ; } +" +196,"public void run ( ) { logger . warn ( ""calling PollingKafkaConsumer.run()"" ) ; ConsumerIterator < byte [ ] , byte [ ] > it = m_stream . iterator ( ) ; try { while ( it . hasNext ( ) ) { String message = new String ( it . next ( ) . message ( ) ) ; logger . warn ( ""adding message: "" + message ) ; messages . add ( message ) ; } } catch ( Exception e ) { logger . error ( ""Exception waiting on Kafka..."" , e ) ; } latch . countDown ( ) ; } +","public void run ( ) { logger . warn ( ""calling PollingKafkaConsumer.run()"" ) ; ConsumerIterator < byte [ ] , byte [ ] > it = m_stream . iterator ( ) ; try { while ( it . hasNext ( ) ) { String message = new String ( it . next ( ) . message ( ) ) ; logger . warn ( ""adding message: "" + message ) ; messages . add ( message ) ; } } catch ( Exception e ) { logger . error ( ""Exception waiting on Kafka..."" , e ) ; } latch . countDown ( ) ; logger . warn ( ""Shutting down Thread: "" + m_threadNumber ) ; } +" +197,"public void run ( ) { boolean duplicateFound = false ; for ( RadioStation station : stationsList ) { if ( ! duplicateFound && station . getName ( ) . equals ( radioStation . getName ( ) ) ) { duplicateFound = true ; deferred . reject ( AddFavoriteStationErrorEnum . DUPLICATE_RADIOSTATION ) ; break ; } } if ( ! duplicateFound ) { stationsList . add ( radioStation ) ; deferred . resolve ( true ) ; } } +","public void run ( ) { boolean duplicateFound = false ; for ( RadioStation station : stationsList ) { if ( ! duplicateFound && station . getName ( ) . equals ( radioStation . getName ( ) ) ) { duplicateFound = true ; deferred . reject ( AddFavoriteStationErrorEnum . DUPLICATE_RADIOSTATION ) ; break ; } } if ( ! duplicateFound ) { logger . info ( PRINT_BORDER + ""addFavoriteStation("" + radioStation + "")"" + PRINT_BORDER ) ; stationsList . add ( radioStation ) ; deferred . resolve ( true ) ; } } +" +198,"private void updateLogProperties ( File karafHome , ExamSystem _system ) throws IOException { DoNotModifyLogOption [ ] modifyLog = _system . getOptions ( DoNotModifyLogOption . class ) ; if ( modifyLog != null && modifyLog . length != 0 ) { return ; } LoggingBackend loggingBackend = getLoggingBackend ( karafHome ) ; String realLogLevel = retrieveRealLogLevel ( _system ) ; File customPropertiesFile = new File ( karafHome , framework . getKarafEtc ( ) + ""/org.ops4j.pax.logging.cfg"" ) ; Properties karafPropertyFile = new Properties ( ) ; karafPropertyFile . load ( new FileInputStream ( customPropertiesFile ) ) ; loggingBackend . updatePaxLoggingConfiguration ( karafPropertyFile , realLogLevel ) ; karafPropertyFile . store ( new FileOutputStream ( customPropertiesFile ) , ""updated by pax-exam"" ) ; } +","private void updateLogProperties ( File karafHome , ExamSystem _system ) throws IOException { DoNotModifyLogOption [ ] modifyLog = _system . getOptions ( DoNotModifyLogOption . class ) ; if ( modifyLog != null && modifyLog . length != 0 ) { LOGGER . info ( ""Log file should not be modified by the test framework"" ) ; return ; } LoggingBackend loggingBackend = getLoggingBackend ( karafHome ) ; String realLogLevel = retrieveRealLogLevel ( _system ) ; File customPropertiesFile = new File ( karafHome , framework . getKarafEtc ( ) + ""/org.ops4j.pax.logging.cfg"" ) ; Properties karafPropertyFile = new Properties ( ) ; karafPropertyFile . load ( new FileInputStream ( customPropertiesFile ) ) ; loggingBackend . updatePaxLoggingConfiguration ( karafPropertyFile , realLogLevel ) ; karafPropertyFile . store ( new FileOutputStream ( customPropertiesFile ) , ""updated by pax-exam"" ) ; } +" +199,"public void onClick ( AjaxRequestTarget target ) { AbstractExecutionAdapter bkp = model . getObject ( ) ; if ( ! doSelectReady ( bkp ) ) { setLinkEnabled ( ( AjaxLink ) downLoadLink . getParent ( ) . get ( ""cancel"" ) , false , target ) ; } else { try { backupFacade ( ) . abandonExecution ( bkp . getId ( ) ) ; PageParameters pp = new PageParameters ( ) ; pp . add ( ""id"" , bkp . getId ( ) ) ; if ( bkp instanceof BackupExecutionAdapter ) { pp . add ( ""clazz"" , BackupExecutionAdapter . class . getSimpleName ( ) ) ; } else if ( bkp instanceof RestoreExecutionAdapter ) { pp . add ( ""clazz"" , RestoreExecutionAdapter . class . getSimpleName ( ) ) ; } setResponsePage ( BackupRestorePage . class , pp ) ; } catch ( NoSuchJobExecutionException | JobExecutionAlreadyRunningException e ) { error ( e ) ; } } } +","public void onClick ( AjaxRequestTarget target ) { AbstractExecutionAdapter bkp = model . getObject ( ) ; if ( ! doSelectReady ( bkp ) ) { setLinkEnabled ( ( AjaxLink ) downLoadLink . getParent ( ) . get ( ""cancel"" ) , false , target ) ; } else { try { backupFacade ( ) . abandonExecution ( bkp . getId ( ) ) ; PageParameters pp = new PageParameters ( ) ; pp . add ( ""id"" , bkp . getId ( ) ) ; if ( bkp instanceof BackupExecutionAdapter ) { pp . add ( ""clazz"" , BackupExecutionAdapter . class . getSimpleName ( ) ) ; } else if ( bkp instanceof RestoreExecutionAdapter ) { pp . add ( ""clazz"" , RestoreExecutionAdapter . class . getSimpleName ( ) ) ; } setResponsePage ( BackupRestorePage . class , pp ) ; } catch ( NoSuchJobExecutionException | JobExecutionAlreadyRunningException e ) { error ( e ) ; LOGGER . log ( Level . WARNING , """" , e ) ; } } } +" +200,"public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; deleteGatewayStoragePreference_result result = new deleteGatewayStoragePreference_result ( ) ; if ( e instanceof org . apache . airavata . model . error . InvalidRequestException ) { result . ire = ( org . apache . airavata . model . error . InvalidRequestException ) e ; result . setIreIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataClientException ) { result . ace = ( org . apache . airavata . model . error . AiravataClientException ) e ; result . setAceIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataSystemException ) { result . ase = ( org . apache . airavata . model . error . AiravataSystemException ) e ; result . setAseIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AuthorizationException ) { result . ae = ( org . apache . airavata . model . error . AuthorizationException ) e ; result . setAeIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( ""TApplicationException inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( ""Exception inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( ""Exception writing to internal frame buffer"" , ex ) ; fb . close ( ) ; } } +","public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; deleteGatewayStoragePreference_result result = new deleteGatewayStoragePreference_result ( ) ; if ( e instanceof org . apache . airavata . model . error . InvalidRequestException ) { result . ire = ( org . apache . airavata . model . error . InvalidRequestException ) e ; result . setIreIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataClientException ) { result . ace = ( org . apache . airavata . model . error . AiravataClientException ) e ; result . setAceIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataSystemException ) { result . ase = ( org . apache . airavata . model . error . AiravataSystemException ) e ; result . setAseIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AuthorizationException ) { result . ae = ( org . apache . airavata . model . error . AuthorizationException ) e ; result . setAeIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( ""TTransportException inside handler"" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( ""TApplicationException inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( ""Exception inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( ""Exception writing to internal frame buffer"" , ex ) ; fb . close ( ) ; } } +" +201,"public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; deleteGatewayStoragePreference_result result = new deleteGatewayStoragePreference_result ( ) ; if ( e instanceof org . apache . airavata . model . error . InvalidRequestException ) { result . ire = ( org . apache . airavata . model . error . InvalidRequestException ) e ; result . setIreIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataClientException ) { result . ace = ( org . apache . airavata . model . error . AiravataClientException ) e ; result . setAceIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataSystemException ) { result . ase = ( org . apache . airavata . model . error . AiravataSystemException ) e ; result . setAseIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AuthorizationException ) { result . ae = ( org . apache . airavata . model . error . AuthorizationException ) e ; result . setAeIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( ""TTransportException inside handler"" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( ""Exception inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( ""Exception writing to internal frame buffer"" , ex ) ; fb . close ( ) ; } } +","public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; deleteGatewayStoragePreference_result result = new deleteGatewayStoragePreference_result ( ) ; if ( e instanceof org . apache . airavata . model . error . InvalidRequestException ) { result . ire = ( org . apache . airavata . model . error . InvalidRequestException ) e ; result . setIreIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataClientException ) { result . ace = ( org . apache . airavata . model . error . AiravataClientException ) e ; result . setAceIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataSystemException ) { result . ase = ( org . apache . airavata . model . error . AiravataSystemException ) e ; result . setAseIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AuthorizationException ) { result . ae = ( org . apache . airavata . model . error . AuthorizationException ) e ; result . setAeIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( ""TTransportException inside handler"" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( ""TApplicationException inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( ""Exception inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( ""Exception writing to internal frame buffer"" , ex ) ; fb . close ( ) ; } } +" +202,"public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; deleteGatewayStoragePreference_result result = new deleteGatewayStoragePreference_result ( ) ; if ( e instanceof org . apache . airavata . model . error . InvalidRequestException ) { result . ire = ( org . apache . airavata . model . error . InvalidRequestException ) e ; result . setIreIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataClientException ) { result . ace = ( org . apache . airavata . model . error . AiravataClientException ) e ; result . setAceIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataSystemException ) { result . ase = ( org . apache . airavata . model . error . AiravataSystemException ) e ; result . setAseIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AuthorizationException ) { result . ae = ( org . apache . airavata . model . error . AuthorizationException ) e ; result . setAeIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( ""TTransportException inside handler"" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( ""TApplicationException inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( ""Exception writing to internal frame buffer"" , ex ) ; fb . close ( ) ; } } +","public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; deleteGatewayStoragePreference_result result = new deleteGatewayStoragePreference_result ( ) ; if ( e instanceof org . apache . airavata . model . error . InvalidRequestException ) { result . ire = ( org . apache . airavata . model . error . InvalidRequestException ) e ; result . setIreIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataClientException ) { result . ace = ( org . apache . airavata . model . error . AiravataClientException ) e ; result . setAceIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataSystemException ) { result . ase = ( org . apache . airavata . model . error . AiravataSystemException ) e ; result . setAseIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AuthorizationException ) { result . ae = ( org . apache . airavata . model . error . AuthorizationException ) e ; result . setAeIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( ""TTransportException inside handler"" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( ""TApplicationException inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( ""Exception inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( ""Exception writing to internal frame buffer"" , ex ) ; fb . close ( ) ; } } +" +203,"public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; deleteGatewayStoragePreference_result result = new deleteGatewayStoragePreference_result ( ) ; if ( e instanceof org . apache . airavata . model . error . InvalidRequestException ) { result . ire = ( org . apache . airavata . model . error . InvalidRequestException ) e ; result . setIreIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataClientException ) { result . ace = ( org . apache . airavata . model . error . AiravataClientException ) e ; result . setAceIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataSystemException ) { result . ase = ( org . apache . airavata . model . error . AiravataSystemException ) e ; result . setAseIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AuthorizationException ) { result . ae = ( org . apache . airavata . model . error . AuthorizationException ) e ; result . setAeIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( ""TTransportException inside handler"" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( ""TApplicationException inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( ""Exception inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { fb . close ( ) ; } } +","public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; deleteGatewayStoragePreference_result result = new deleteGatewayStoragePreference_result ( ) ; if ( e instanceof org . apache . airavata . model . error . InvalidRequestException ) { result . ire = ( org . apache . airavata . model . error . InvalidRequestException ) e ; result . setIreIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataClientException ) { result . ace = ( org . apache . airavata . model . error . AiravataClientException ) e ; result . setAceIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AiravataSystemException ) { result . ase = ( org . apache . airavata . model . error . AiravataSystemException ) e ; result . setAseIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . airavata . model . error . AuthorizationException ) { result . ae = ( org . apache . airavata . model . error . AuthorizationException ) e ; result . setAeIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( ""TTransportException inside handler"" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( ""TApplicationException inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( ""Exception inside handler"" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( ""Exception writing to internal frame buffer"" , ex ) ; fb . close ( ) ; } } +" +204,"static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +","static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +" +205,"static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +","static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +" +206,"static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +","static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +" +207,"static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +","static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +" +208,"static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +","static OBSFileStatus innerFsGetObjectStatus ( final OBSFileSystem owner , final Path f ) throws IOException { final Path path = OBSCommonUtils . qualify ( owner , f ) ; String key = OBSCommonUtils . pathToKey ( owner , path ) ; LOG . debug ( ""Getting path status for {} ({})"" , path , key ) ; if ( key . isEmpty ( ) ) { LOG . debug ( ""Found root directory"" ) ; return new OBSFileStatus ( path , owner . getUsername ( ) ) ; } try { final GetAttributeRequest getAttrRequest = new GetAttributeRequest ( owner . getBucket ( ) , key ) ; ObsFSAttribute meta = owner . getObsClient ( ) . getAttribute ( getAttrRequest ) ; owner . getSchemeStatistics ( ) . incrementReadOps ( 1 ) ; if ( fsIsFolder ( meta ) ) { LOG . debug ( ""Found file (with /): fake directory"" ) ; return new OBSFileStatus ( path , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , owner . getUsername ( ) ) ; } else { LOG . debug ( ""Found file (with /): real file? should not happen: {}"" , key ) ; return new OBSFileStatus ( meta . getContentLength ( ) , OBSCommonUtils . dateToLong ( meta . getLastModified ( ) ) , path , owner . getDefaultBlockSize ( path ) , owner . getUsername ( ) ) ; } } catch ( ObsException e ) { if ( e . getResponseCode ( ) == OBSCommonUtils . NOT_FOUND_CODE ) { LOG . debug ( ""Not Found: {}"" , path ) ; throw new FileNotFoundException ( ""No such file or directory: "" + path ) ; } if ( e . getResponseCode ( ) == OBSCommonUtils . CONFLICT_CODE ) { throw new FileConflictException ( ""file conflicts: "" + e . getResponseStatus ( ) ) ; } throw OBSCommonUtils . translateException ( ""getFileStatus"" , path , e ) ; } } +" +209,"public void delete ( ScriptVersionTraceKey scriptVersionTraceKey ) { String deleteStatement = deleteStatement ( scriptVersionTraceKey ) ; getMetadataRepository ( ) . executeUpdate ( deleteStatement ) ; } +","public void delete ( ScriptVersionTraceKey scriptVersionTraceKey ) { LOGGER . trace ( MessageFormat . format ( ""Deleting ScriptVersionTrace {0}."" , scriptVersionTraceKey . toString ( ) ) ) ; String deleteStatement = deleteStatement ( scriptVersionTraceKey ) ; getMetadataRepository ( ) . executeUpdate ( deleteStatement ) ; } +" +210,"@ PostConstruct public void init ( ) { logger = LoggerFactory . getLogger ( getClass ( ) ) ; attributes = Attributes . getComponentAttributesFromFacesConfig ( UIQueue . class , getClass ( ) ) ; attributes . setAttribute ( ""rendered"" , true ) ; attributes . setAttribute ( ""requestDelay"" , 750 ) ; } +","@ PostConstruct public void init ( ) { logger = LoggerFactory . getLogger ( getClass ( ) ) ; logger . debug ( ""initializing bean "" + getClass ( ) . getName ( ) ) ; attributes = Attributes . getComponentAttributesFromFacesConfig ( UIQueue . class , getClass ( ) ) ; attributes . setAttribute ( ""rendered"" , true ) ; attributes . setAttribute ( ""requestDelay"" , 750 ) ; } +" +211,"@ SuppressWarnings ( { ""rawtypes"" , ""unchecked"" } ) @ Test public void testSqlSelect ( ) { SelectOperator oper = new SelectOperator ( ) ; oper . addIndex ( new ColumnIndex ( ""b"" , null ) ) ; oper . addIndex ( new ColumnIndex ( ""c"" , null ) ) ; BetweenCondition cond = new BetweenCondition ( ""a"" , 0 , 2 ) ; oper . setCondition ( cond ) ; CollectorTestSink sink = new CollectorTestSink ( ) ; oper . outport . setSink ( sink ) ; oper . setup ( null ) ; oper . beginWindow ( 1 ) ; HashMap < String , Object > tuple = new HashMap < String , Object > ( ) ; tuple . put ( ""a"" , 0 ) ; tuple . put ( ""b"" , 1 ) ; tuple . put ( ""c"" , 2 ) ; oper . inport . process ( tuple ) ; tuple = new HashMap < String , Object > ( ) ; tuple . put ( ""a"" , 1 ) ; tuple . put ( ""b"" , 3 ) ; tuple . put ( ""c"" , 4 ) ; oper . inport . process ( tuple ) ; tuple = new HashMap < String , Object > ( ) ; tuple . put ( ""a"" , 2 ) ; tuple . put ( ""b"" , 5 ) ; tuple . put ( ""c"" , 6 ) ; oper . inport . process ( tuple ) ; tuple = new HashMap < String , Object > ( ) ; tuple . put ( ""a"" , 3 ) ; tuple . put ( ""b"" , 7 ) ; tuple . put ( ""c"" , 8 ) ; oper . inport . process ( tuple ) ; oper . endWindow ( ) ; oper . teardown ( ) ; } +","@ SuppressWarnings ( { ""rawtypes"" , ""unchecked"" } ) @ Test public void testSqlSelect ( ) { SelectOperator oper = new SelectOperator ( ) ; oper . addIndex ( new ColumnIndex ( ""b"" , null ) ) ; oper . addIndex ( new ColumnIndex ( ""c"" , null ) ) ; BetweenCondition cond = new BetweenCondition ( ""a"" , 0 , 2 ) ; oper . setCondition ( cond ) ; CollectorTestSink sink = new CollectorTestSink ( ) ; oper . outport . setSink ( sink ) ; oper . setup ( null ) ; oper . beginWindow ( 1 ) ; HashMap < String , Object > tuple = new HashMap < String , Object > ( ) ; tuple . put ( ""a"" , 0 ) ; tuple . put ( ""b"" , 1 ) ; tuple . put ( ""c"" , 2 ) ; oper . inport . process ( tuple ) ; tuple = new HashMap < String , Object > ( ) ; tuple . put ( ""a"" , 1 ) ; tuple . put ( ""b"" , 3 ) ; tuple . put ( ""c"" , 4 ) ; oper . inport . process ( tuple ) ; tuple = new HashMap < String , Object > ( ) ; tuple . put ( ""a"" , 2 ) ; tuple . put ( ""b"" , 5 ) ; tuple . put ( ""c"" , 6 ) ; oper . inport . process ( tuple ) ; tuple = new HashMap < String , Object > ( ) ; tuple . put ( ""a"" , 3 ) ; tuple . put ( ""b"" , 7 ) ; tuple . put ( ""c"" , 8 ) ; oper . inport . process ( tuple ) ; oper . endWindow ( ) ; oper . teardown ( ) ; LOG . debug ( ""{}"" , sink . collectedTuples ) ; } +" +212,"OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document fileDocument , String status ) throws CatalogDBException , CatalogParameterException , CatalogAuthorizationException { long tmpStartTime = startQuery ( ) ; long fileUid = fileDocument . getLong ( PRIVATE_UID ) ; long studyUid = fileDocument . getLong ( PRIVATE_STUDY_UID ) ; String path = fileDocument . getString ( QueryParams . PATH . key ( ) ) ; Query query = new Query ( QueryParams . STUDY_UID . key ( ) , studyUid ) ; if ( File . Type . FILE . name ( ) . equals ( fileDocument . getString ( QueryParams . TYPE . key ( ) ) ) ) { query . append ( QueryParams . UID . key ( ) , fileUid ) ; } else { query . append ( QueryParams . PATH . key ( ) , ""~^"" + path + ""*"" ) ; } if ( FileStatus . TRASHED . equals ( status ) ) { Bson update = Updates . set ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) ) ; QueryOptions multi = new QueryOptions ( MongoDBCollection . MULTI , true ) ; return endWrite ( tmpStartTime , fileCollection . update ( parseQuery ( query ) , update , multi ) ) ; } else { QueryOptions options = new QueryOptions ( ) . append ( QueryOptions . SORT , QueryParams . PATH . key ( ) ) . append ( QueryOptions . ORDER , QueryOptions . DESCENDING ) ; DBIterator < Document > iterator = nativeIterator ( clientSession , query , options ) ; long numFiles = 0 ; while ( iterator . hasNext ( ) ) { Document tmpFile = iterator . next ( ) ; long tmpFileUid = tmpFile . getLong ( PRIVATE_UID ) ; dbAdaptorFactory . getCatalogJobDBAdaptor ( ) . removeFileReferences ( clientSession , studyUid , tmpFileUid , tmpFile ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) , tmpFile ) ; deletedFileCollection . insert ( clientSession , tmpFile , null ) ; Bson bsonQuery = parseQuery ( new Query ( QueryParams . UID . key ( ) , tmpFileUid ) ) ; DataResult remove = fileCollection . remove ( clientSession , bsonQuery , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" could not be deleted"" ) ; } logger . debug ( ""File uid '{}' deleted from main FILE collection"" , tmpFileUid ) ; numFiles ++ ; } logger . debug ( ""File {}({}) deleted"" , path , fileUid ) ; return endWrite ( tmpStartTime , numFiles , 0 , 0 , numFiles , Collections . emptyList ( ) ) ; } } +","OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document fileDocument , String status ) throws CatalogDBException , CatalogParameterException , CatalogAuthorizationException { long tmpStartTime = startQuery ( ) ; long fileUid = fileDocument . getLong ( PRIVATE_UID ) ; long studyUid = fileDocument . getLong ( PRIVATE_STUDY_UID ) ; String path = fileDocument . getString ( QueryParams . PATH . key ( ) ) ; Query query = new Query ( QueryParams . STUDY_UID . key ( ) , studyUid ) ; if ( File . Type . FILE . name ( ) . equals ( fileDocument . getString ( QueryParams . TYPE . key ( ) ) ) ) { query . append ( QueryParams . UID . key ( ) , fileUid ) ; } else { query . append ( QueryParams . PATH . key ( ) , ""~^"" + path + ""*"" ) ; } if ( FileStatus . TRASHED . equals ( status ) ) { Bson update = Updates . set ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) ) ; QueryOptions multi = new QueryOptions ( MongoDBCollection . MULTI , true ) ; return endWrite ( tmpStartTime , fileCollection . update ( parseQuery ( query ) , update , multi ) ) ; } else { QueryOptions options = new QueryOptions ( ) . append ( QueryOptions . SORT , QueryParams . PATH . key ( ) ) . append ( QueryOptions . ORDER , QueryOptions . DESCENDING ) ; DBIterator < Document > iterator = nativeIterator ( clientSession , query , options ) ; long numFiles = 0 ; while ( iterator . hasNext ( ) ) { Document tmpFile = iterator . next ( ) ; long tmpFileUid = tmpFile . getLong ( PRIVATE_UID ) ; dbAdaptorFactory . getCatalogJobDBAdaptor ( ) . removeFileReferences ( clientSession , studyUid , tmpFileUid , tmpFile ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) , tmpFile ) ; deletedFileCollection . insert ( clientSession , tmpFile , null ) ; logger . debug ( ""Inserted file uid '{}' in DELETE collection"" , tmpFileUid ) ; Bson bsonQuery = parseQuery ( new Query ( QueryParams . UID . key ( ) , tmpFileUid ) ) ; DataResult remove = fileCollection . remove ( clientSession , bsonQuery , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" could not be deleted"" ) ; } logger . debug ( ""File uid '{}' deleted from main FILE collection"" , tmpFileUid ) ; numFiles ++ ; } logger . debug ( ""File {}({}) deleted"" , path , fileUid ) ; return endWrite ( tmpStartTime , numFiles , 0 , 0 , numFiles , Collections . emptyList ( ) ) ; } } +" +213,"OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document fileDocument , String status ) throws CatalogDBException , CatalogParameterException , CatalogAuthorizationException { long tmpStartTime = startQuery ( ) ; long fileUid = fileDocument . getLong ( PRIVATE_UID ) ; long studyUid = fileDocument . getLong ( PRIVATE_STUDY_UID ) ; String path = fileDocument . getString ( QueryParams . PATH . key ( ) ) ; Query query = new Query ( QueryParams . STUDY_UID . key ( ) , studyUid ) ; if ( File . Type . FILE . name ( ) . equals ( fileDocument . getString ( QueryParams . TYPE . key ( ) ) ) ) { query . append ( QueryParams . UID . key ( ) , fileUid ) ; } else { query . append ( QueryParams . PATH . key ( ) , ""~^"" + path + ""*"" ) ; } if ( FileStatus . TRASHED . equals ( status ) ) { Bson update = Updates . set ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) ) ; QueryOptions multi = new QueryOptions ( MongoDBCollection . MULTI , true ) ; return endWrite ( tmpStartTime , fileCollection . update ( parseQuery ( query ) , update , multi ) ) ; } else { QueryOptions options = new QueryOptions ( ) . append ( QueryOptions . SORT , QueryParams . PATH . key ( ) ) . append ( QueryOptions . ORDER , QueryOptions . DESCENDING ) ; DBIterator < Document > iterator = nativeIterator ( clientSession , query , options ) ; long numFiles = 0 ; while ( iterator . hasNext ( ) ) { Document tmpFile = iterator . next ( ) ; long tmpFileUid = tmpFile . getLong ( PRIVATE_UID ) ; dbAdaptorFactory . getCatalogJobDBAdaptor ( ) . removeFileReferences ( clientSession , studyUid , tmpFileUid , tmpFile ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) , tmpFile ) ; deletedFileCollection . insert ( clientSession , tmpFile , null ) ; logger . debug ( ""Inserted file uid '{}' in DELETE collection"" , tmpFileUid ) ; Bson bsonQuery = parseQuery ( new Query ( QueryParams . UID . key ( ) , tmpFileUid ) ) ; DataResult remove = fileCollection . remove ( clientSession , bsonQuery , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" could not be deleted"" ) ; } numFiles ++ ; } logger . debug ( ""File {}({}) deleted"" , path , fileUid ) ; return endWrite ( tmpStartTime , numFiles , 0 , 0 , numFiles , Collections . emptyList ( ) ) ; } } +","OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document fileDocument , String status ) throws CatalogDBException , CatalogParameterException , CatalogAuthorizationException { long tmpStartTime = startQuery ( ) ; long fileUid = fileDocument . getLong ( PRIVATE_UID ) ; long studyUid = fileDocument . getLong ( PRIVATE_STUDY_UID ) ; String path = fileDocument . getString ( QueryParams . PATH . key ( ) ) ; Query query = new Query ( QueryParams . STUDY_UID . key ( ) , studyUid ) ; if ( File . Type . FILE . name ( ) . equals ( fileDocument . getString ( QueryParams . TYPE . key ( ) ) ) ) { query . append ( QueryParams . UID . key ( ) , fileUid ) ; } else { query . append ( QueryParams . PATH . key ( ) , ""~^"" + path + ""*"" ) ; } if ( FileStatus . TRASHED . equals ( status ) ) { Bson update = Updates . set ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) ) ; QueryOptions multi = new QueryOptions ( MongoDBCollection . MULTI , true ) ; return endWrite ( tmpStartTime , fileCollection . update ( parseQuery ( query ) , update , multi ) ) ; } else { QueryOptions options = new QueryOptions ( ) . append ( QueryOptions . SORT , QueryParams . PATH . key ( ) ) . append ( QueryOptions . ORDER , QueryOptions . DESCENDING ) ; DBIterator < Document > iterator = nativeIterator ( clientSession , query , options ) ; long numFiles = 0 ; while ( iterator . hasNext ( ) ) { Document tmpFile = iterator . next ( ) ; long tmpFileUid = tmpFile . getLong ( PRIVATE_UID ) ; dbAdaptorFactory . getCatalogJobDBAdaptor ( ) . removeFileReferences ( clientSession , studyUid , tmpFileUid , tmpFile ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) , tmpFile ) ; deletedFileCollection . insert ( clientSession , tmpFile , null ) ; logger . debug ( ""Inserted file uid '{}' in DELETE collection"" , tmpFileUid ) ; Bson bsonQuery = parseQuery ( new Query ( QueryParams . UID . key ( ) , tmpFileUid ) ) ; DataResult remove = fileCollection . remove ( clientSession , bsonQuery , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" could not be deleted"" ) ; } logger . debug ( ""File uid '{}' deleted from main FILE collection"" , tmpFileUid ) ; numFiles ++ ; } logger . debug ( ""File {}({}) deleted"" , path , fileUid ) ; return endWrite ( tmpStartTime , numFiles , 0 , 0 , numFiles , Collections . emptyList ( ) ) ; } } +" +214,"OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document fileDocument , String status ) throws CatalogDBException , CatalogParameterException , CatalogAuthorizationException { long tmpStartTime = startQuery ( ) ; long fileUid = fileDocument . getLong ( PRIVATE_UID ) ; long studyUid = fileDocument . getLong ( PRIVATE_STUDY_UID ) ; String path = fileDocument . getString ( QueryParams . PATH . key ( ) ) ; Query query = new Query ( QueryParams . STUDY_UID . key ( ) , studyUid ) ; if ( File . Type . FILE . name ( ) . equals ( fileDocument . getString ( QueryParams . TYPE . key ( ) ) ) ) { query . append ( QueryParams . UID . key ( ) , fileUid ) ; } else { query . append ( QueryParams . PATH . key ( ) , ""~^"" + path + ""*"" ) ; } if ( FileStatus . TRASHED . equals ( status ) ) { Bson update = Updates . set ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) ) ; QueryOptions multi = new QueryOptions ( MongoDBCollection . MULTI , true ) ; return endWrite ( tmpStartTime , fileCollection . update ( parseQuery ( query ) , update , multi ) ) ; } else { QueryOptions options = new QueryOptions ( ) . append ( QueryOptions . SORT , QueryParams . PATH . key ( ) ) . append ( QueryOptions . ORDER , QueryOptions . DESCENDING ) ; DBIterator < Document > iterator = nativeIterator ( clientSession , query , options ) ; long numFiles = 0 ; while ( iterator . hasNext ( ) ) { Document tmpFile = iterator . next ( ) ; long tmpFileUid = tmpFile . getLong ( PRIVATE_UID ) ; dbAdaptorFactory . getCatalogJobDBAdaptor ( ) . removeFileReferences ( clientSession , studyUid , tmpFileUid , tmpFile ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) , tmpFile ) ; deletedFileCollection . insert ( clientSession , tmpFile , null ) ; logger . debug ( ""Inserted file uid '{}' in DELETE collection"" , tmpFileUid ) ; Bson bsonQuery = parseQuery ( new Query ( QueryParams . UID . key ( ) , tmpFileUid ) ) ; DataResult remove = fileCollection . remove ( clientSession , bsonQuery , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" could not be deleted"" ) ; } logger . debug ( ""File uid '{}' deleted from main FILE collection"" , tmpFileUid ) ; numFiles ++ ; } return endWrite ( tmpStartTime , numFiles , 0 , 0 , numFiles , Collections . emptyList ( ) ) ; } } +","OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document fileDocument , String status ) throws CatalogDBException , CatalogParameterException , CatalogAuthorizationException { long tmpStartTime = startQuery ( ) ; long fileUid = fileDocument . getLong ( PRIVATE_UID ) ; long studyUid = fileDocument . getLong ( PRIVATE_STUDY_UID ) ; String path = fileDocument . getString ( QueryParams . PATH . key ( ) ) ; Query query = new Query ( QueryParams . STUDY_UID . key ( ) , studyUid ) ; if ( File . Type . FILE . name ( ) . equals ( fileDocument . getString ( QueryParams . TYPE . key ( ) ) ) ) { query . append ( QueryParams . UID . key ( ) , fileUid ) ; } else { query . append ( QueryParams . PATH . key ( ) , ""~^"" + path + ""*"" ) ; } if ( FileStatus . TRASHED . equals ( status ) ) { Bson update = Updates . set ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) ) ; QueryOptions multi = new QueryOptions ( MongoDBCollection . MULTI , true ) ; return endWrite ( tmpStartTime , fileCollection . update ( parseQuery ( query ) , update , multi ) ) ; } else { QueryOptions options = new QueryOptions ( ) . append ( QueryOptions . SORT , QueryParams . PATH . key ( ) ) . append ( QueryOptions . ORDER , QueryOptions . DESCENDING ) ; DBIterator < Document > iterator = nativeIterator ( clientSession , query , options ) ; long numFiles = 0 ; while ( iterator . hasNext ( ) ) { Document tmpFile = iterator . next ( ) ; long tmpFileUid = tmpFile . getLong ( PRIVATE_UID ) ; dbAdaptorFactory . getCatalogJobDBAdaptor ( ) . removeFileReferences ( clientSession , studyUid , tmpFileUid , tmpFile ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new FileStatus ( status ) , ""status"" ) , tmpFile ) ; deletedFileCollection . insert ( clientSession , tmpFile , null ) ; logger . debug ( ""Inserted file uid '{}' in DELETE collection"" , tmpFileUid ) ; Bson bsonQuery = parseQuery ( new Query ( QueryParams . UID . key ( ) , tmpFileUid ) ) ; DataResult remove = fileCollection . remove ( clientSession , bsonQuery , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""File "" + tmpFileUid + "" could not be deleted"" ) ; } logger . debug ( ""File uid '{}' deleted from main FILE collection"" , tmpFileUid ) ; numFiles ++ ; } logger . debug ( ""File {}({}) deleted"" , path , fileUid ) ; return endWrite ( tmpStartTime , numFiles , 0 , 0 , numFiles , Collections . emptyList ( ) ) ; } } +" +215,"public synchronized Map < String , LocalResource > call ( ) throws IOException { final Map < String , LocalResource > globalResources = new HashMap < > ( 1 ) ; if ( ! this . isUploaded ) { this . pathToGlobalJar = this . uploader . uploadToJobFolder ( makeGlobalJar ( ) ) ; this . isUploaded = true ; } final LocalResource updatedGlobalJarResource = this . uploader . makeLocalResourceForJarFile ( this . pathToGlobalJar ) ; if ( this . globalJarResource != null && this . globalJarResource . getTimestamp ( ) != updatedGlobalJarResource . getTimestamp ( ) ) { } this . globalJarResource = updatedGlobalJarResource ; globalResources . put ( this . fileNames . getGlobalFolderPath ( ) , updatedGlobalJarResource ) ; return globalResources ; } +","public synchronized Map < String , LocalResource > call ( ) throws IOException { final Map < String , LocalResource > globalResources = new HashMap < > ( 1 ) ; if ( ! this . isUploaded ) { this . pathToGlobalJar = this . uploader . uploadToJobFolder ( makeGlobalJar ( ) ) ; this . isUploaded = true ; } final LocalResource updatedGlobalJarResource = this . uploader . makeLocalResourceForJarFile ( this . pathToGlobalJar ) ; if ( this . globalJarResource != null && this . globalJarResource . getTimestamp ( ) != updatedGlobalJarResource . getTimestamp ( ) ) { LOG . log ( Level . WARNING , ""The global JAR LocalResource timestamp has been changed from "" + this . globalJarResource . getTimestamp ( ) + "" to "" + updatedGlobalJarResource . getTimestamp ( ) ) ; } this . globalJarResource = updatedGlobalJarResource ; globalResources . put ( this . fileNames . getGlobalFolderPath ( ) , updatedGlobalJarResource ) ; return globalResources ; } +" +216,"@ RequestMapping ( value = ""/{id}"" , method = RequestMethod . GET ) public String handleRequest ( Model model , @ PathVariable Long id ) { LetterToAllophoneMapping letterToAllophoneMapping = letterToAllophoneMappingDao . read ( id ) ; model . addAttribute ( ""letterToAllophoneMapping"" , letterToAllophoneMapping ) ; List < Letter > letters = letterDao . readAllOrdered ( ) ; model . addAttribute ( ""letters"" , letters ) ; List < Allophone > allophones = allophoneDao . readAllOrdered ( ) ; model . addAttribute ( ""allophones"" , allophones ) ; return ""content/letter-to-allophone-mapping/edit"" ; } +","@ RequestMapping ( value = ""/{id}"" , method = RequestMethod . GET ) public String handleRequest ( Model model , @ PathVariable Long id ) { logger . info ( ""handleRequest"" ) ; LetterToAllophoneMapping letterToAllophoneMapping = letterToAllophoneMappingDao . read ( id ) ; model . addAttribute ( ""letterToAllophoneMapping"" , letterToAllophoneMapping ) ; List < Letter > letters = letterDao . readAllOrdered ( ) ; model . addAttribute ( ""letters"" , letters ) ; List < Allophone > allophones = allophoneDao . readAllOrdered ( ) ; model . addAttribute ( ""allophones"" , allophones ) ; return ""content/letter-to-allophone-mapping/edit"" ; } +" +217,"public void addRelatedEntryFields ( Document document , Object object ) throws Exception { Comment comment = ( Comment ) object ; FileEntry fileEntry = null ; try { fileEntry = dlAppLocalService . getFileEntry ( comment . getClassPK ( ) ) ; } catch ( Exception exception ) { if ( _log . isDebugEnabled ( ) ) { } return ; } if ( fileEntry instanceof LiferayFileEntry ) { DLFileEntry dlFileEntry = ( DLFileEntry ) fileEntry . getModel ( ) ; document . addKeyword ( Field . FOLDER_ID , dlFileEntry . getFolderId ( ) ) ; document . addKeyword ( Field . HIDDEN , dlFileEntry . isInHiddenFolder ( ) ) ; document . addKeyword ( Field . TREE_PATH , StringUtil . split ( dlFileEntry . getTreePath ( ) , CharPool . SLASH ) ) ; } } +","public void addRelatedEntryFields ( Document document , Object object ) throws Exception { Comment comment = ( Comment ) object ; FileEntry fileEntry = null ; try { fileEntry = dlAppLocalService . getFileEntry ( comment . getClassPK ( ) ) ; } catch ( Exception exception ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( exception , exception ) ; } return ; } if ( fileEntry instanceof LiferayFileEntry ) { DLFileEntry dlFileEntry = ( DLFileEntry ) fileEntry . getModel ( ) ; document . addKeyword ( Field . FOLDER_ID , dlFileEntry . getFolderId ( ) ) ; document . addKeyword ( Field . HIDDEN , dlFileEntry . isInHiddenFolder ( ) ) ; document . addKeyword ( Field . TREE_PATH , StringUtil . split ( dlFileEntry . getTreePath ( ) , CharPool . SLASH ) ) ; } } +" +218,"public void merge ( Collection < W > toBeMerged , W mergeResult ) { if ( LOG . isDebugEnabled ( ) ) { } mergeResults . put ( mergeResult , toBeMerged ) ; } +","public void merge ( Collection < W > toBeMerged , W mergeResult ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( ""Merging {} into {}"" , toBeMerged , mergeResult ) ; } mergeResults . put ( mergeResult , toBeMerged ) ; } +" +219,"protected final void flush0 ( ) { SelectionKey tmpKey = null ; Selector writeSelector = null ; int attempts = 0 ; try { while ( true ) { if ( writeSelector == null ) { writeSelector = SelectorFactory . getSelector ( ) ; if ( writeSelector == null ) { return ; } tmpKey = selectableChannel . register ( writeSelector , SelectionKey . OP_WRITE ) ; } if ( writeSelector . select ( 1000 ) == 0 ) { attempts ++ ; if ( attempts > 2 ) { return ; } } else { break ; } } onWrite ( selectableChannel . keyFor ( writeSelector ) ) ; } catch ( ClosedChannelException cce ) { onException ( cce ) ; close ( ) ; } catch ( IOException ioe ) { onException ( ioe ) ; log . error ( ""Flush error"" , ioe ) ; close ( ) ; } finally { if ( tmpKey != null ) { tmpKey . cancel ( ) ; tmpKey = null ; } if ( writeSelector != null ) { try { writeSelector . selectNow ( ) ; } catch ( IOException e ) { log . error ( ""Temp selector selectNow error"" , e ) ; } SelectorFactory . returnSelector ( writeSelector ) ; } } } +","protected final void flush0 ( ) { SelectionKey tmpKey = null ; Selector writeSelector = null ; int attempts = 0 ; try { while ( true ) { if ( writeSelector == null ) { writeSelector = SelectorFactory . getSelector ( ) ; if ( writeSelector == null ) { return ; } tmpKey = selectableChannel . register ( writeSelector , SelectionKey . OP_WRITE ) ; } if ( writeSelector . select ( 1000 ) == 0 ) { attempts ++ ; if ( attempts > 2 ) { return ; } } else { break ; } } onWrite ( selectableChannel . keyFor ( writeSelector ) ) ; } catch ( ClosedChannelException cce ) { onException ( cce ) ; log . error ( ""Flush error"" , cce ) ; close ( ) ; } catch ( IOException ioe ) { onException ( ioe ) ; log . error ( ""Flush error"" , ioe ) ; close ( ) ; } finally { if ( tmpKey != null ) { tmpKey . cancel ( ) ; tmpKey = null ; } if ( writeSelector != null ) { try { writeSelector . selectNow ( ) ; } catch ( IOException e ) { log . error ( ""Temp selector selectNow error"" , e ) ; } SelectorFactory . returnSelector ( writeSelector ) ; } } } +" +220,"protected final void flush0 ( ) { SelectionKey tmpKey = null ; Selector writeSelector = null ; int attempts = 0 ; try { while ( true ) { if ( writeSelector == null ) { writeSelector = SelectorFactory . getSelector ( ) ; if ( writeSelector == null ) { return ; } tmpKey = selectableChannel . register ( writeSelector , SelectionKey . OP_WRITE ) ; } if ( writeSelector . select ( 1000 ) == 0 ) { attempts ++ ; if ( attempts > 2 ) { return ; } } else { break ; } } onWrite ( selectableChannel . keyFor ( writeSelector ) ) ; } catch ( ClosedChannelException cce ) { onException ( cce ) ; log . error ( ""Flush error"" , cce ) ; close ( ) ; } catch ( IOException ioe ) { onException ( ioe ) ; close ( ) ; } finally { if ( tmpKey != null ) { tmpKey . cancel ( ) ; tmpKey = null ; } if ( writeSelector != null ) { try { writeSelector . selectNow ( ) ; } catch ( IOException e ) { log . error ( ""Temp selector selectNow error"" , e ) ; } SelectorFactory . returnSelector ( writeSelector ) ; } } } +","protected final void flush0 ( ) { SelectionKey tmpKey = null ; Selector writeSelector = null ; int attempts = 0 ; try { while ( true ) { if ( writeSelector == null ) { writeSelector = SelectorFactory . getSelector ( ) ; if ( writeSelector == null ) { return ; } tmpKey = selectableChannel . register ( writeSelector , SelectionKey . OP_WRITE ) ; } if ( writeSelector . select ( 1000 ) == 0 ) { attempts ++ ; if ( attempts > 2 ) { return ; } } else { break ; } } onWrite ( selectableChannel . keyFor ( writeSelector ) ) ; } catch ( ClosedChannelException cce ) { onException ( cce ) ; log . error ( ""Flush error"" , cce ) ; close ( ) ; } catch ( IOException ioe ) { onException ( ioe ) ; log . error ( ""Flush error"" , ioe ) ; close ( ) ; } finally { if ( tmpKey != null ) { tmpKey . cancel ( ) ; tmpKey = null ; } if ( writeSelector != null ) { try { writeSelector . selectNow ( ) ; } catch ( IOException e ) { log . error ( ""Temp selector selectNow error"" , e ) ; } SelectorFactory . returnSelector ( writeSelector ) ; } } } +" +221,"protected final void flush0 ( ) { SelectionKey tmpKey = null ; Selector writeSelector = null ; int attempts = 0 ; try { while ( true ) { if ( writeSelector == null ) { writeSelector = SelectorFactory . getSelector ( ) ; if ( writeSelector == null ) { return ; } tmpKey = selectableChannel . register ( writeSelector , SelectionKey . OP_WRITE ) ; } if ( writeSelector . select ( 1000 ) == 0 ) { attempts ++ ; if ( attempts > 2 ) { return ; } } else { break ; } } onWrite ( selectableChannel . keyFor ( writeSelector ) ) ; } catch ( ClosedChannelException cce ) { onException ( cce ) ; log . error ( ""Flush error"" , cce ) ; close ( ) ; } catch ( IOException ioe ) { onException ( ioe ) ; log . error ( ""Flush error"" , ioe ) ; close ( ) ; } finally { if ( tmpKey != null ) { tmpKey . cancel ( ) ; tmpKey = null ; } if ( writeSelector != null ) { try { writeSelector . selectNow ( ) ; } catch ( IOException e ) { } SelectorFactory . returnSelector ( writeSelector ) ; } } } +","protected final void flush0 ( ) { SelectionKey tmpKey = null ; Selector writeSelector = null ; int attempts = 0 ; try { while ( true ) { if ( writeSelector == null ) { writeSelector = SelectorFactory . getSelector ( ) ; if ( writeSelector == null ) { return ; } tmpKey = selectableChannel . register ( writeSelector , SelectionKey . OP_WRITE ) ; } if ( writeSelector . select ( 1000 ) == 0 ) { attempts ++ ; if ( attempts > 2 ) { return ; } } else { break ; } } onWrite ( selectableChannel . keyFor ( writeSelector ) ) ; } catch ( ClosedChannelException cce ) { onException ( cce ) ; log . error ( ""Flush error"" , cce ) ; close ( ) ; } catch ( IOException ioe ) { onException ( ioe ) ; log . error ( ""Flush error"" , ioe ) ; close ( ) ; } finally { if ( tmpKey != null ) { tmpKey . cancel ( ) ; tmpKey = null ; } if ( writeSelector != null ) { try { writeSelector . selectNow ( ) ; } catch ( IOException e ) { log . error ( ""Temp selector selectNow error"" , e ) ; } SelectorFactory . returnSelector ( writeSelector ) ; } } } +" +222,"protected String histItemFilterQueryProvider ( FilterCriteria filter , int numberDecimalcount , String table , String simpleName , ZoneId timeZone ) { String filterString = """" ; if ( filter . getBeginDate ( ) != null ) { filterString += filterString . isEmpty ( ) ? "" WHERE"" : "" AND"" ; filterString += "" TIME>'"" + JDBC_DATE_FORMAT . format ( filter . getBeginDate ( ) . withZoneSameInstant ( timeZone ) ) + ""'"" ; } if ( filter . getEndDate ( ) != null ) { filterString += filterString . isEmpty ( ) ? "" WHERE"" : "" AND"" ; filterString += "" TIME<'"" + JDBC_DATE_FORMAT . format ( filter . getEndDate ( ) . withZoneSameInstant ( timeZone ) ) + ""'"" ; } filterString += ( filter . getOrdering ( ) == Ordering . ASCENDING ) ? "" ORDER BY time ASC"" : "" ORDER BY time DESC"" ; if ( filter . getPageSize ( ) != 0x7fffffff ) { filterString += "" OFFSET "" + filter . getPageSize ( ) + "" ROWS FETCH FIRST "" + ( filter . getPageNumber ( ) * filter . getPageSize ( ) + 1 ) + "" ROWS ONLY"" ; } String queryString = ""SELECT time,"" ; if ( ""NUMBERITEM"" . equalsIgnoreCase ( simpleName ) && numberDecimalcount > - 1 ) { queryString += ""CAST(value 0."" ; for ( int i = 0 ; i < numberDecimalcount ; i ++ ) { queryString += ""0"" ; } queryString += ""5 AS DECIMAL(31,"" + numberDecimalcount + ""))"" ; } else { queryString += "" value FROM "" + table . toUpperCase ( ) ; } if ( ! filterString . isEmpty ( ) ) { queryString += filterString ; } logger . debug ( ""JDBC::query queryString = {}"" , queryString ) ; return queryString ; } +","protected String histItemFilterQueryProvider ( FilterCriteria filter , int numberDecimalcount , String table , String simpleName , ZoneId timeZone ) { logger . debug ( ""JDBC::getHistItemFilterQueryProvider filter = {}, numberDecimalcount = {}, table = {}, simpleName = {}"" , StringUtilsExt . filterToString ( filter ) , numberDecimalcount , table , simpleName ) ; String filterString = """" ; if ( filter . getBeginDate ( ) != null ) { filterString += filterString . isEmpty ( ) ? "" WHERE"" : "" AND"" ; filterString += "" TIME>'"" + JDBC_DATE_FORMAT . format ( filter . getBeginDate ( ) . withZoneSameInstant ( timeZone ) ) + ""'"" ; } if ( filter . getEndDate ( ) != null ) { filterString += filterString . isEmpty ( ) ? "" WHERE"" : "" AND"" ; filterString += "" TIME<'"" + JDBC_DATE_FORMAT . format ( filter . getEndDate ( ) . withZoneSameInstant ( timeZone ) ) + ""'"" ; } filterString += ( filter . getOrdering ( ) == Ordering . ASCENDING ) ? "" ORDER BY time ASC"" : "" ORDER BY time DESC"" ; if ( filter . getPageSize ( ) != 0x7fffffff ) { filterString += "" OFFSET "" + filter . getPageSize ( ) + "" ROWS FETCH FIRST "" + ( filter . getPageNumber ( ) * filter . getPageSize ( ) + 1 ) + "" ROWS ONLY"" ; } String queryString = ""SELECT time,"" ; if ( ""NUMBERITEM"" . equalsIgnoreCase ( simpleName ) && numberDecimalcount > - 1 ) { queryString += ""CAST(value 0."" ; for ( int i = 0 ; i < numberDecimalcount ; i ++ ) { queryString += ""0"" ; } queryString += ""5 AS DECIMAL(31,"" + numberDecimalcount + ""))"" ; } else { queryString += "" value FROM "" + table . toUpperCase ( ) ; } if ( ! filterString . isEmpty ( ) ) { queryString += filterString ; } logger . debug ( ""JDBC::query queryString = {}"" , queryString ) ; return queryString ; } +" +223,"protected String histItemFilterQueryProvider ( FilterCriteria filter , int numberDecimalcount , String table , String simpleName , ZoneId timeZone ) { logger . debug ( ""JDBC::getHistItemFilterQueryProvider filter = {}, numberDecimalcount = {}, table = {}, simpleName = {}"" , StringUtilsExt . filterToString ( filter ) , numberDecimalcount , table , simpleName ) ; String filterString = """" ; if ( filter . getBeginDate ( ) != null ) { filterString += filterString . isEmpty ( ) ? "" WHERE"" : "" AND"" ; filterString += "" TIME>'"" + JDBC_DATE_FORMAT . format ( filter . getBeginDate ( ) . withZoneSameInstant ( timeZone ) ) + ""'"" ; } if ( filter . getEndDate ( ) != null ) { filterString += filterString . isEmpty ( ) ? "" WHERE"" : "" AND"" ; filterString += "" TIME<'"" + JDBC_DATE_FORMAT . format ( filter . getEndDate ( ) . withZoneSameInstant ( timeZone ) ) + ""'"" ; } filterString += ( filter . getOrdering ( ) == Ordering . ASCENDING ) ? "" ORDER BY time ASC"" : "" ORDER BY time DESC"" ; if ( filter . getPageSize ( ) != 0x7fffffff ) { filterString += "" OFFSET "" + filter . getPageSize ( ) + "" ROWS FETCH FIRST "" + ( filter . getPageNumber ( ) * filter . getPageSize ( ) + 1 ) + "" ROWS ONLY"" ; } String queryString = ""SELECT time,"" ; if ( ""NUMBERITEM"" . equalsIgnoreCase ( simpleName ) && numberDecimalcount > - 1 ) { queryString += ""CAST(value 0."" ; for ( int i = 0 ; i < numberDecimalcount ; i ++ ) { queryString += ""0"" ; } queryString += ""5 AS DECIMAL(31,"" + numberDecimalcount + ""))"" ; } else { queryString += "" value FROM "" + table . toUpperCase ( ) ; } if ( ! filterString . isEmpty ( ) ) { queryString += filterString ; } return queryString ; } +","protected String histItemFilterQueryProvider ( FilterCriteria filter , int numberDecimalcount , String table , String simpleName , ZoneId timeZone ) { logger . debug ( ""JDBC::getHistItemFilterQueryProvider filter = {}, numberDecimalcount = {}, table = {}, simpleName = {}"" , StringUtilsExt . filterToString ( filter ) , numberDecimalcount , table , simpleName ) ; String filterString = """" ; if ( filter . getBeginDate ( ) != null ) { filterString += filterString . isEmpty ( ) ? "" WHERE"" : "" AND"" ; filterString += "" TIME>'"" + JDBC_DATE_FORMAT . format ( filter . getBeginDate ( ) . withZoneSameInstant ( timeZone ) ) + ""'"" ; } if ( filter . getEndDate ( ) != null ) { filterString += filterString . isEmpty ( ) ? "" WHERE"" : "" AND"" ; filterString += "" TIME<'"" + JDBC_DATE_FORMAT . format ( filter . getEndDate ( ) . withZoneSameInstant ( timeZone ) ) + ""'"" ; } filterString += ( filter . getOrdering ( ) == Ordering . ASCENDING ) ? "" ORDER BY time ASC"" : "" ORDER BY time DESC"" ; if ( filter . getPageSize ( ) != 0x7fffffff ) { filterString += "" OFFSET "" + filter . getPageSize ( ) + "" ROWS FETCH FIRST "" + ( filter . getPageNumber ( ) * filter . getPageSize ( ) + 1 ) + "" ROWS ONLY"" ; } String queryString = ""SELECT time,"" ; if ( ""NUMBERITEM"" . equalsIgnoreCase ( simpleName ) && numberDecimalcount > - 1 ) { queryString += ""CAST(value 0."" ; for ( int i = 0 ; i < numberDecimalcount ; i ++ ) { queryString += ""0"" ; } queryString += ""5 AS DECIMAL(31,"" + numberDecimalcount + ""))"" ; } else { queryString += "" value FROM "" + table . toUpperCase ( ) ; } if ( ! filterString . isEmpty ( ) ) { queryString += filterString ; } logger . debug ( ""JDBC::query queryString = {}"" , queryString ) ; return queryString ; } +" +224,"public void migrate ( Connection connection ) throws Exception { int total = countScreenshotsWithoutHash ( connection ) ; forEachScreenshotWithoutHash ( total , connection , swh -> { try { updateScreenshotHash ( connection , swh . getPkgScreenshotId ( ) , HASH_FUNCTION . hashBytes ( swh . getImageData ( ) ) . toString ( ) ) ; } catch ( SQLException se ) { throw new IllegalStateException ( ""unable to update a screenshot"" , se ) ; } } ) ; LOGGER . info ( ""did complete updating screenshots' hashes."" ) ; } +","public void migrate ( Connection connection ) throws Exception { int total = countScreenshotsWithoutHash ( connection ) ; LOGGER . info ( ""did find {} screenshots requiring a hash"" , total ) ; forEachScreenshotWithoutHash ( total , connection , swh -> { try { updateScreenshotHash ( connection , swh . getPkgScreenshotId ( ) , HASH_FUNCTION . hashBytes ( swh . getImageData ( ) ) . toString ( ) ) ; } catch ( SQLException se ) { throw new IllegalStateException ( ""unable to update a screenshot"" , se ) ; } } ) ; LOGGER . info ( ""did complete updating screenshots' hashes."" ) ; } +" +225,"public void migrate ( Connection connection ) throws Exception { int total = countScreenshotsWithoutHash ( connection ) ; LOGGER . info ( ""did find {} screenshots requiring a hash"" , total ) ; forEachScreenshotWithoutHash ( total , connection , swh -> { try { updateScreenshotHash ( connection , swh . getPkgScreenshotId ( ) , HASH_FUNCTION . hashBytes ( swh . getImageData ( ) ) . toString ( ) ) ; } catch ( SQLException se ) { throw new IllegalStateException ( ""unable to update a screenshot"" , se ) ; } } ) ; } +","public void migrate ( Connection connection ) throws Exception { int total = countScreenshotsWithoutHash ( connection ) ; LOGGER . info ( ""did find {} screenshots requiring a hash"" , total ) ; forEachScreenshotWithoutHash ( total , connection , swh -> { try { updateScreenshotHash ( connection , swh . getPkgScreenshotId ( ) , HASH_FUNCTION . hashBytes ( swh . getImageData ( ) ) . toString ( ) ) ; } catch ( SQLException se ) { throw new IllegalStateException ( ""unable to update a screenshot"" , se ) ; } } ) ; LOGGER . info ( ""did complete updating screenshots' hashes."" ) ; } +" +226,"public Object invoke ( List < ServiceReference > services , Method method , Object ... args ) throws Throwable { for ( ServiceReference ref : services ) { Object service = bundleContext . getService ( ref ) ; try { method . invoke ( service , args ) ; } catch ( InvocationTargetException e ) { LOGGER . debug ( ""ExceptionDetails: "" , e ) ; } } return null ; } +","public Object invoke ( List < ServiceReference > services , Method method , Object ... args ) throws Throwable { for ( ServiceReference ref : services ) { Object service = bundleContext . getService ( ref ) ; try { method . invoke ( service , args ) ; } catch ( InvocationTargetException e ) { LOGGER . warn ( ""connector in composition threw an Exception in method call %s"" , method . toString ( ) ) ; LOGGER . debug ( ""ExceptionDetails: "" , e ) ; } } return null ; } +" +227,"public Object invoke ( List < ServiceReference > services , Method method , Object ... args ) throws Throwable { for ( ServiceReference ref : services ) { Object service = bundleContext . getService ( ref ) ; try { method . invoke ( service , args ) ; } catch ( InvocationTargetException e ) { LOGGER . warn ( ""connector in composition threw an Exception in method call %s"" , method . toString ( ) ) ; } } return null ; } +","public Object invoke ( List < ServiceReference > services , Method method , Object ... args ) throws Throwable { for ( ServiceReference ref : services ) { Object service = bundleContext . getService ( ref ) ; try { method . invoke ( service , args ) ; } catch ( InvocationTargetException e ) { LOGGER . warn ( ""connector in composition threw an Exception in method call %s"" , method . toString ( ) ) ; LOGGER . debug ( ""ExceptionDetails: "" , e ) ; } } return null ; } +" +228,"public String getSupportedInvocationType ( final QName artifactType , final TArtifactTemplate artifactTemplate ) { if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( artifactType . toString ( ) ) ) { return artifactType . toString ( ) ; } else { final Document properties = ToscaEngine . getEntityTemplateProperties ( artifactTemplate ) ; final String invocationType = getInvocationType ( properties ) ; if ( invocationType != null ) { if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( invocationType ) ) { LOG . debug ( ""Found a supported invocation type in the artifact template properties"" ) ; return invocationType ; } } } LOG . debug ( ""Artifact type was not found in the list of currently supported types: {}"" , pluginRegistry . getInvocationPluginServices ( ) . toString ( ) ) ; return null ; } +","public String getSupportedInvocationType ( final QName artifactType , final TArtifactTemplate artifactTemplate ) { LOG . debug ( ""Searching if a invocation plug-in supports the type {}"" , artifactType ) ; if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( artifactType . toString ( ) ) ) { return artifactType . toString ( ) ; } else { final Document properties = ToscaEngine . getEntityTemplateProperties ( artifactTemplate ) ; final String invocationType = getInvocationType ( properties ) ; if ( invocationType != null ) { if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( invocationType ) ) { LOG . debug ( ""Found a supported invocation type in the artifact template properties"" ) ; return invocationType ; } } } LOG . debug ( ""Artifact type was not found in the list of currently supported types: {}"" , pluginRegistry . getInvocationPluginServices ( ) . toString ( ) ) ; return null ; } +" +229,"public String getSupportedInvocationType ( final QName artifactType , final TArtifactTemplate artifactTemplate ) { LOG . debug ( ""Searching if a invocation plug-in supports the type {}"" , artifactType ) ; if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( artifactType . toString ( ) ) ) { return artifactType . toString ( ) ; } else { final Document properties = ToscaEngine . getEntityTemplateProperties ( artifactTemplate ) ; final String invocationType = getInvocationType ( properties ) ; if ( invocationType != null ) { if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( invocationType ) ) { return invocationType ; } } } LOG . debug ( ""Artifact type was not found in the list of currently supported types: {}"" , pluginRegistry . getInvocationPluginServices ( ) . toString ( ) ) ; return null ; } +","public String getSupportedInvocationType ( final QName artifactType , final TArtifactTemplate artifactTemplate ) { LOG . debug ( ""Searching if a invocation plug-in supports the type {}"" , artifactType ) ; if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( artifactType . toString ( ) ) ) { return artifactType . toString ( ) ; } else { final Document properties = ToscaEngine . getEntityTemplateProperties ( artifactTemplate ) ; final String invocationType = getInvocationType ( properties ) ; if ( invocationType != null ) { if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( invocationType ) ) { LOG . debug ( ""Found a supported invocation type in the artifact template properties"" ) ; return invocationType ; } } } LOG . debug ( ""Artifact type was not found in the list of currently supported types: {}"" , pluginRegistry . getInvocationPluginServices ( ) . toString ( ) ) ; return null ; } +" +230,"public String getSupportedInvocationType ( final QName artifactType , final TArtifactTemplate artifactTemplate ) { LOG . debug ( ""Searching if a invocation plug-in supports the type {}"" , artifactType ) ; if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( artifactType . toString ( ) ) ) { return artifactType . toString ( ) ; } else { final Document properties = ToscaEngine . getEntityTemplateProperties ( artifactTemplate ) ; final String invocationType = getInvocationType ( properties ) ; if ( invocationType != null ) { if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( invocationType ) ) { LOG . debug ( ""Found a supported invocation type in the artifact template properties"" ) ; return invocationType ; } } } return null ; } +","public String getSupportedInvocationType ( final QName artifactType , final TArtifactTemplate artifactTemplate ) { LOG . debug ( ""Searching if a invocation plug-in supports the type {}"" , artifactType ) ; if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( artifactType . toString ( ) ) ) { return artifactType . toString ( ) ; } else { final Document properties = ToscaEngine . getEntityTemplateProperties ( artifactTemplate ) ; final String invocationType = getInvocationType ( properties ) ; if ( invocationType != null ) { if ( pluginRegistry . getInvocationPluginServices ( ) . containsKey ( invocationType ) ) { LOG . debug ( ""Found a supported invocation type in the artifact template properties"" ) ; return invocationType ; } } } LOG . debug ( ""Artifact type was not found in the list of currently supported types: {}"" , pluginRegistry . getInvocationPluginServices ( ) . toString ( ) ) ; return null ; } +" +231,"public boolean process ( Exchange exchange , final AsyncCallback callback ) { final Object oldStepId = exchange . removeProperty ( ExchangePropertyKey . STEP_ID ) ; exchange . setProperty ( ExchangePropertyKey . STEP_ID , stepId ) ; EventHelper . notifyStepStarted ( exchange . getContext ( ) , exchange , stepId ) ; return super . process ( exchange , sync -> { boolean failed = exchange . isFailed ( ) ; try { if ( failed ) { EventHelper . notifyStepFailed ( exchange . getContext ( ) , exchange , stepId ) ; } else { EventHelper . notifyStepDone ( exchange . getContext ( ) , exchange , stepId ) ; } } catch ( Throwable t ) { } finally { if ( oldStepId != null ) { exchange . setProperty ( ExchangePropertyKey . STEP_ID , oldStepId ) ; } else { exchange . removeProperty ( ExchangePropertyKey . STEP_ID ) ; } callback . done ( sync ) ; } } ) ; } +","public boolean process ( Exchange exchange , final AsyncCallback callback ) { final Object oldStepId = exchange . removeProperty ( ExchangePropertyKey . STEP_ID ) ; exchange . setProperty ( ExchangePropertyKey . STEP_ID , stepId ) ; EventHelper . notifyStepStarted ( exchange . getContext ( ) , exchange , stepId ) ; return super . process ( exchange , sync -> { boolean failed = exchange . isFailed ( ) ; try { if ( failed ) { EventHelper . notifyStepFailed ( exchange . getContext ( ) , exchange , stepId ) ; } else { EventHelper . notifyStepDone ( exchange . getContext ( ) , exchange , stepId ) ; } } catch ( Throwable t ) { LOG . warn ( ""Exception occurred during event notification. This exception will be ignored."" , t ) ; } finally { if ( oldStepId != null ) { exchange . setProperty ( ExchangePropertyKey . STEP_ID , oldStepId ) ; } else { exchange . removeProperty ( ExchangePropertyKey . STEP_ID ) ; } callback . done ( sync ) ; } } ) ; } +" +232,"public static void deleteLiferayUser ( PortletRequest request , org . eclipse . sw360 . datahandler . thrift . users . User user ) { try { User liferayUser = findLiferayUser ( request , user ) ; UserLocalServiceUtil . deleteUser ( liferayUser ) ; } catch ( PortalException | SystemException e ) { } } +","public static void deleteLiferayUser ( PortletRequest request , org . eclipse . sw360 . datahandler . thrift . users . User user ) { try { User liferayUser = findLiferayUser ( request , user ) ; UserLocalServiceUtil . deleteUser ( liferayUser ) ; } catch ( PortalException | SystemException e ) { log . error ( ""Could not delete Liferay user"" , e ) ; } } +" +233,"public void afterMessageProcessed ( PipeLineResult processResult , Object rawMessageOrWrapper , Map < String , Object > context ) throws ListenerException { FS fileSystem = getFileSystem ( ) ; if ( ( rawMessageOrWrapper instanceof MessageWrapper ) ) { MessageWrapper < ? > wrapper = ( MessageWrapper < ? > ) rawMessageOrWrapper ; if ( StringUtils . isNotEmpty ( getLogFolder ( ) ) || StringUtils . isNotEmpty ( getErrorFolder ( ) ) || StringUtils . isNotEmpty ( getProcessedFolder ( ) ) ) { } } else { @ SuppressWarnings ( ""unchecked"" ) F rawMessage = ( F ) rawMessageOrWrapper ; try { if ( StringUtils . isNotEmpty ( getLogFolder ( ) ) ) { FileSystemUtils . copyFile ( fileSystem , rawMessage , getLogFolder ( ) , isOverwrite ( ) , getNumberOfBackups ( ) , isCreateFolders ( ) ) ; } if ( isDelete ( ) && ( processResult . isSuccessful ( ) || StringUtils . isEmpty ( getErrorFolder ( ) ) ) ) { fileSystem . deleteFile ( rawMessage ) ; return ; } } catch ( FileSystemException e ) { throw new ListenerException ( ""Could not move or delete file ["" + fileSystem . getName ( rawMessage ) + ""]"" , e ) ; } } } +","public void afterMessageProcessed ( PipeLineResult processResult , Object rawMessageOrWrapper , Map < String , Object > context ) throws ListenerException { FS fileSystem = getFileSystem ( ) ; if ( ( rawMessageOrWrapper instanceof MessageWrapper ) ) { MessageWrapper < ? > wrapper = ( MessageWrapper < ? > ) rawMessageOrWrapper ; if ( StringUtils . isNotEmpty ( getLogFolder ( ) ) || StringUtils . isNotEmpty ( getErrorFolder ( ) ) || StringUtils . isNotEmpty ( getProcessedFolder ( ) ) ) { log . warn ( ""cannot write ["" + wrapper . getId ( ) + ""] to logFolder, errorFolder or processedFolder after manual retry from errorStorage"" ) ; } } else { @ SuppressWarnings ( ""unchecked"" ) F rawMessage = ( F ) rawMessageOrWrapper ; try { if ( StringUtils . isNotEmpty ( getLogFolder ( ) ) ) { FileSystemUtils . copyFile ( fileSystem , rawMessage , getLogFolder ( ) , isOverwrite ( ) , getNumberOfBackups ( ) , isCreateFolders ( ) ) ; } if ( isDelete ( ) && ( processResult . isSuccessful ( ) || StringUtils . isEmpty ( getErrorFolder ( ) ) ) ) { fileSystem . deleteFile ( rawMessage ) ; return ; } } catch ( FileSystemException e ) { throw new ListenerException ( ""Could not move or delete file ["" + fileSystem . getName ( rawMessage ) + ""]"" , e ) ; } } } +" +234,"public StgRechteRolleItv merge ( StgRechteRolleItv detachedInstance ) { try { StgRechteRolleItv result = ( StgRechteRolleItv ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +","public StgRechteRolleItv merge ( StgRechteRolleItv detachedInstance ) { log . debug ( ""merging StgRechteRolleItv instance"" ) ; try { StgRechteRolleItv result = ( StgRechteRolleItv ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +" +235,"public StgRechteRolleItv merge ( StgRechteRolleItv detachedInstance ) { log . debug ( ""merging StgRechteRolleItv instance"" ) ; try { StgRechteRolleItv result = ( StgRechteRolleItv ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +","public StgRechteRolleItv merge ( StgRechteRolleItv detachedInstance ) { log . debug ( ""merging StgRechteRolleItv instance"" ) ; try { StgRechteRolleItv result = ( StgRechteRolleItv ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +" +236,"public StgRechteRolleItv merge ( StgRechteRolleItv detachedInstance ) { log . debug ( ""merging StgRechteRolleItv instance"" ) ; try { StgRechteRolleItv result = ( StgRechteRolleItv ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { throw re ; } } +","public StgRechteRolleItv merge ( StgRechteRolleItv detachedInstance ) { log . debug ( ""merging StgRechteRolleItv instance"" ) ; try { StgRechteRolleItv result = ( StgRechteRolleItv ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +" +237,"private void showNotSpecifiedError ( @ NonNls final String resourceId , List < String > modules , String editorNameToSelect ) { String nameToSelect = null ; final StringBuilder names = StringBuilderSpinAllocator . alloc ( ) ; final String message ; try { final int maxModulesToShow = 10 ; for ( String name : modules . size ( ) > maxModulesToShow ? modules . subList ( 0 , maxModulesToShow ) : modules ) { if ( nameToSelect == null ) { nameToSelect = name ; } if ( names . length ( ) > 0 ) { names . append ( "",\n"" ) ; } names . append ( ""\"""" ) ; names . append ( name ) ; names . append ( ""\"""" ) ; } if ( modules . size ( ) > maxModulesToShow ) { names . append ( "",\n..."" ) ; } message = CompilerBundle . message ( resourceId , modules . size ( ) , names . toString ( ) ) ; } finally { StringBuilderSpinAllocator . dispose ( names ) ; } if ( ApplicationManager . getApplication ( ) . isUnitTestMode ( ) ) { } Messages . showMessageDialog ( myProject , message , CommonBundle . getErrorTitle ( ) , Messages . getErrorIcon ( ) ) ; showConfigurationDialog ( nameToSelect , editorNameToSelect ) ; } +","private void showNotSpecifiedError ( @ NonNls final String resourceId , List < String > modules , String editorNameToSelect ) { String nameToSelect = null ; final StringBuilder names = StringBuilderSpinAllocator . alloc ( ) ; final String message ; try { final int maxModulesToShow = 10 ; for ( String name : modules . size ( ) > maxModulesToShow ? modules . subList ( 0 , maxModulesToShow ) : modules ) { if ( nameToSelect == null ) { nameToSelect = name ; } if ( names . length ( ) > 0 ) { names . append ( "",\n"" ) ; } names . append ( ""\"""" ) ; names . append ( name ) ; names . append ( ""\"""" ) ; } if ( modules . size ( ) > maxModulesToShow ) { names . append ( "",\n..."" ) ; } message = CompilerBundle . message ( resourceId , modules . size ( ) , names . toString ( ) ) ; } finally { StringBuilderSpinAllocator . dispose ( names ) ; } if ( ApplicationManager . getApplication ( ) . isUnitTestMode ( ) ) { LOG . error ( message ) ; } Messages . showMessageDialog ( myProject , message , CommonBundle . getErrorTitle ( ) , Messages . getErrorIcon ( ) ) ; showConfigurationDialog ( nameToSelect , editorNameToSelect ) ; } +" +238,"private X509CRL getCrl ( String url ) throws CertificateException , CRLException , NoSuchProviderException , NoSuchParserException , StreamParsingException , MalformedURLException , IOException , ExecutionException { if ( ! ( url . startsWith ( ""http://"" ) || url . startsWith ( ""https://"" ) ) ) { return null ; } String cacheKey = url . toLowerCase ( ) ; X509CRL crl = crlCache . get ( cacheKey ) ; return crl ; } +","private X509CRL getCrl ( String url ) throws CertificateException , CRLException , NoSuchProviderException , NoSuchParserException , StreamParsingException , MalformedURLException , IOException , ExecutionException { if ( ! ( url . startsWith ( ""http://"" ) || url . startsWith ( ""https://"" ) ) ) { log . error ( ""It's possible to download CRL via HTTP and HTTPS only"" ) ; return null ; } String cacheKey = url . toLowerCase ( ) ; X509CRL crl = crlCache . get ( cacheKey ) ; return crl ; } +" +239,"public void run ( ) { try { Thread . sleep ( 1000 ) ; cubeService . updateOnNewSegmentReady ( cubeName ) ; } catch ( Throwable ex ) { } } +","public void run ( ) { try { Thread . sleep ( 1000 ) ; cubeService . updateOnNewSegmentReady ( cubeName ) ; } catch ( Throwable ex ) { logger . error ( ""Error in updateOnNewSegmentReady()"" , ex ) ; } } +" +240,"public void setUpCluster ( ) throws Exception { util = getTestingUtil ( getConf ( ) ) ; util . initializeCluster ( getMinServerCount ( ) ) ; LOG . debug ( ""Done initializing/checking cluster"" ) ; cluster = util . getHBaseClusterInterface ( ) ; deleteTableIfNecessary ( ) ; loadTool = new LoadTestTool ( ) ; loadTool . setConf ( util . getConfiguration ( ) ) ; initTable ( ) ; } +","public void setUpCluster ( ) throws Exception { util = getTestingUtil ( getConf ( ) ) ; LOG . debug ( ""Initializing/checking cluster has "" + SERVER_COUNT + "" servers"" ) ; util . initializeCluster ( getMinServerCount ( ) ) ; LOG . debug ( ""Done initializing/checking cluster"" ) ; cluster = util . getHBaseClusterInterface ( ) ; deleteTableIfNecessary ( ) ; loadTool = new LoadTestTool ( ) ; loadTool . setConf ( util . getConfiguration ( ) ) ; initTable ( ) ; } +" +241,"public void setUpCluster ( ) throws Exception { util = getTestingUtil ( getConf ( ) ) ; LOG . debug ( ""Initializing/checking cluster has "" + SERVER_COUNT + "" servers"" ) ; util . initializeCluster ( getMinServerCount ( ) ) ; cluster = util . getHBaseClusterInterface ( ) ; deleteTableIfNecessary ( ) ; loadTool = new LoadTestTool ( ) ; loadTool . setConf ( util . getConfiguration ( ) ) ; initTable ( ) ; } +","public void setUpCluster ( ) throws Exception { util = getTestingUtil ( getConf ( ) ) ; LOG . debug ( ""Initializing/checking cluster has "" + SERVER_COUNT + "" servers"" ) ; util . initializeCluster ( getMinServerCount ( ) ) ; LOG . debug ( ""Done initializing/checking cluster"" ) ; cluster = util . getHBaseClusterInterface ( ) ; deleteTableIfNecessary ( ) ; loadTool = new LoadTestTool ( ) ; loadTool . setConf ( util . getConfiguration ( ) ) ; initTable ( ) ; } +" +242,"private synchronized void addComputation ( String computationId , MapTask originalMapTask , Map < String , String > transformUserNameToStateFamily ) { MapTask mapTask = fixMultiOutputInfos . apply ( originalMapTask ) ; if ( ! computationMap . containsKey ( computationId ) ) { computationMap . put ( computationId , new ComputationState ( computationId , mapTask , workUnitExecutor , transformUserNameToStateFamily , stateCache . forComputation ( computationId ) ) ) ; } } +","private synchronized void addComputation ( String computationId , MapTask originalMapTask , Map < String , String > transformUserNameToStateFamily ) { MapTask mapTask = fixMultiOutputInfos . apply ( originalMapTask ) ; if ( ! computationMap . containsKey ( computationId ) ) { LOG . info ( ""Adding config for {}: {}"" , computationId , mapTask ) ; computationMap . put ( computationId , new ComputationState ( computationId , mapTask , workUnitExecutor , transformUserNameToStateFamily , stateCache . forComputation ( computationId ) ) ) ; } } +" +243,"@ Subscribe @ SuppressWarnings ( ""unused"" ) public void listen ( DeadEvent event ) { } +","@ Subscribe @ SuppressWarnings ( ""unused"" ) public void listen ( DeadEvent event ) { LOG . trace ( ""No subscribers were interested in this event: "" + event . getEvent ( ) ) ; } +" +244,"public void deleteSchemaVersion ( Long schemaVersionId ) throws SchemaNotFoundException , SchemaLifecycleException { } +","public void deleteSchemaVersion ( Long schemaVersionId ) throws SchemaNotFoundException , SchemaLifecycleException { LOG . info ( ""++++++++++++ deleteSchemaVersion {}"" , schemaVersionId ) ; } +" +245,"public void contribute ( Document document , KaleoInstanceToken kaleoInstanceToken ) { document . addKeyword ( KaleoInstanceTokenField . CLASS_NAME , kaleoInstanceToken . getClassName ( ) ) ; document . addKeyword ( Field . CLASS_PK , kaleoInstanceToken . getClassPK ( ) ) ; document . addKeywordSortable ( KaleoInstanceTokenField . COMPLETED , kaleoInstanceToken . isCompleted ( ) ) ; document . addDateSortable ( KaleoInstanceTokenField . COMPLETION_DATE , kaleoInstanceToken . getCompletionDate ( ) ) ; document . addDateSortable ( Field . CREATE_DATE , kaleoInstanceToken . getCreateDate ( ) ) ; document . addKeywordSortable ( KaleoInstanceTokenField . CURRENT_KALEO_NODE_NAME , kaleoInstanceToken . getCurrentKaleoNodeName ( ) ) ; document . addNumberSortable ( KaleoInstanceTokenField . KALEO_INSTANCE_ID , kaleoInstanceToken . getKaleoInstanceId ( ) ) ; document . addKeyword ( KaleoInstanceTokenField . KALEO_INSTANCE_TOKEN_ID , kaleoInstanceToken . getKaleoInstanceTokenId ( ) ) ; document . addDateSortable ( Field . MODIFIED_DATE , kaleoInstanceToken . getModifiedDate ( ) ) ; document . addKeyword ( KaleoInstanceTokenField . PARENT_KALEO_INSTANCE_TOKEN_ID , kaleoInstanceToken . getParentKaleoInstanceTokenId ( ) ) ; try { KaleoInstance kaleoInstance = kaleoInstanceLocalService . getKaleoInstance ( kaleoInstanceToken . getKaleoInstanceId ( ) ) ; document . addKeyword ( KaleoInstanceTokenField . KALEO_DEFINITION_NAME , kaleoInstance . getKaleoDefinitionName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isWarnEnabled ( ) ) { } } AssetEntry assetEntry = getAssetEntry ( kaleoInstanceToken ) ; if ( assetEntry != null ) { document . addLocalizedText ( KaleoInstanceTokenField . ASSET_DESCRIPTION , LocalizationUtil . populateLocalizationMap ( assetEntry . getDescriptionMap ( ) , assetEntry . getDefaultLanguageId ( ) , assetEntry . getGroupId ( ) ) ) ; document . addLocalizedText ( KaleoInstanceTokenField . ASSET_TITLE , LocalizationUtil . populateLocalizationMap ( assetEntry . getTitleMap ( ) , assetEntry . getDefaultLanguageId ( ) , assetEntry . getGroupId ( ) ) ) ; } else { WorkflowHandler < ? > workflowHandler = WorkflowHandlerRegistryUtil . getWorkflowHandler ( kaleoInstanceToken . getClassName ( ) ) ; for ( Locale availableLocale : LanguageUtil . getAvailableLocales ( kaleoInstanceToken . getGroupId ( ) ) ) { document . addText ( LocalizationUtil . getLocalizedName ( KaleoInstanceTokenField . ASSET_TITLE , availableLocale . getLanguage ( ) ) , workflowHandler . getTitle ( kaleoInstanceToken . getClassPK ( ) , availableLocale ) ) ; } } } +","public void contribute ( Document document , KaleoInstanceToken kaleoInstanceToken ) { document . addKeyword ( KaleoInstanceTokenField . CLASS_NAME , kaleoInstanceToken . getClassName ( ) ) ; document . addKeyword ( Field . CLASS_PK , kaleoInstanceToken . getClassPK ( ) ) ; document . addKeywordSortable ( KaleoInstanceTokenField . COMPLETED , kaleoInstanceToken . isCompleted ( ) ) ; document . addDateSortable ( KaleoInstanceTokenField . COMPLETION_DATE , kaleoInstanceToken . getCompletionDate ( ) ) ; document . addDateSortable ( Field . CREATE_DATE , kaleoInstanceToken . getCreateDate ( ) ) ; document . addKeywordSortable ( KaleoInstanceTokenField . CURRENT_KALEO_NODE_NAME , kaleoInstanceToken . getCurrentKaleoNodeName ( ) ) ; document . addNumberSortable ( KaleoInstanceTokenField . KALEO_INSTANCE_ID , kaleoInstanceToken . getKaleoInstanceId ( ) ) ; document . addKeyword ( KaleoInstanceTokenField . KALEO_INSTANCE_TOKEN_ID , kaleoInstanceToken . getKaleoInstanceTokenId ( ) ) ; document . addDateSortable ( Field . MODIFIED_DATE , kaleoInstanceToken . getModifiedDate ( ) ) ; document . addKeyword ( KaleoInstanceTokenField . PARENT_KALEO_INSTANCE_TOKEN_ID , kaleoInstanceToken . getParentKaleoInstanceTokenId ( ) ) ; try { KaleoInstance kaleoInstance = kaleoInstanceLocalService . getKaleoInstance ( kaleoInstanceToken . getKaleoInstanceId ( ) ) ; document . addKeyword ( KaleoInstanceTokenField . KALEO_DEFINITION_NAME , kaleoInstance . getKaleoDefinitionName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isWarnEnabled ( ) ) { _log . warn ( portalException , portalException ) ; } } AssetEntry assetEntry = getAssetEntry ( kaleoInstanceToken ) ; if ( assetEntry != null ) { document . addLocalizedText ( KaleoInstanceTokenField . ASSET_DESCRIPTION , LocalizationUtil . populateLocalizationMap ( assetEntry . getDescriptionMap ( ) , assetEntry . getDefaultLanguageId ( ) , assetEntry . getGroupId ( ) ) ) ; document . addLocalizedText ( KaleoInstanceTokenField . ASSET_TITLE , LocalizationUtil . populateLocalizationMap ( assetEntry . getTitleMap ( ) , assetEntry . getDefaultLanguageId ( ) , assetEntry . getGroupId ( ) ) ) ; } else { WorkflowHandler < ? > workflowHandler = WorkflowHandlerRegistryUtil . getWorkflowHandler ( kaleoInstanceToken . getClassName ( ) ) ; for ( Locale availableLocale : LanguageUtil . getAvailableLocales ( kaleoInstanceToken . getGroupId ( ) ) ) { document . addText ( LocalizationUtil . getLocalizedName ( KaleoInstanceTokenField . ASSET_TITLE , availableLocale . getLanguage ( ) ) , workflowHandler . getTitle ( kaleoInstanceToken . getClassPK ( ) , availableLocale ) ) ; } } } +" +246,"private void setSideBySideRectangles ( final Rectangle2D bounds , final List < Color > barElementColors , final double valRange , final AggregationMethod aggrMethod , final int baseLine , final HistogramHiliteCalculator calculator ) { final int barHeight = ( int ) bounds . getHeight ( ) ; final double heightPerVal = barHeight / valRange ; final int barX = ( int ) bounds . getX ( ) ; final int barY = ( int ) bounds . getY ( ) ; final int barWidth = ( int ) bounds . getWidth ( ) ; final int noOfBars = barElementColors . size ( ) ; final int elementWidth = calculateSideBySideElementWidth ( barElementColors , barWidth ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( ""Bar values (x,height,width, totalNoOf): "" + barX + "", "" + barHeight + "", "" + barWidth + noOfBars ) ; LOGGER . debug ( ""Value range: "" + valRange + "" height per value:"" + heightPerVal ) ; } int xCoord = barX ; for ( final Color elementColor : barElementColors ) { final BarElementDataModel element = getElement ( elementColor ) ; if ( element != null ) { final double aggrVal = element . getAggregationValue ( aggrMethod ) ; final Rectangle elementRect = BarDataModel . calculateBarRectangle ( baseLine , barHeight , barY , heightPerVal , aggrVal , xCoord , elementWidth ) ; element . setRectangle ( elementRect , calculator ) ; } xCoord += elementWidth + AbstractHistogramVizModel . SPACE_BETWEEN_ELEMENTS ; } LOGGER . debug ( ""Exiting setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; } +","private void setSideBySideRectangles ( final Rectangle2D bounds , final List < Color > barElementColors , final double valRange , final AggregationMethod aggrMethod , final int baseLine , final HistogramHiliteCalculator calculator ) { LOGGER . debug ( ""Entering setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; final int barHeight = ( int ) bounds . getHeight ( ) ; final double heightPerVal = barHeight / valRange ; final int barX = ( int ) bounds . getX ( ) ; final int barY = ( int ) bounds . getY ( ) ; final int barWidth = ( int ) bounds . getWidth ( ) ; final int noOfBars = barElementColors . size ( ) ; final int elementWidth = calculateSideBySideElementWidth ( barElementColors , barWidth ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( ""Bar values (x,height,width, totalNoOf): "" + barX + "", "" + barHeight + "", "" + barWidth + noOfBars ) ; LOGGER . debug ( ""Value range: "" + valRange + "" height per value:"" + heightPerVal ) ; } int xCoord = barX ; for ( final Color elementColor : barElementColors ) { final BarElementDataModel element = getElement ( elementColor ) ; if ( element != null ) { final double aggrVal = element . getAggregationValue ( aggrMethod ) ; final Rectangle elementRect = BarDataModel . calculateBarRectangle ( baseLine , barHeight , barY , heightPerVal , aggrVal , xCoord , elementWidth ) ; element . setRectangle ( elementRect , calculator ) ; } xCoord += elementWidth + AbstractHistogramVizModel . SPACE_BETWEEN_ELEMENTS ; } LOGGER . debug ( ""Exiting setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; } +" +247,"private void setSideBySideRectangles ( final Rectangle2D bounds , final List < Color > barElementColors , final double valRange , final AggregationMethod aggrMethod , final int baseLine , final HistogramHiliteCalculator calculator ) { LOGGER . debug ( ""Entering setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; final int barHeight = ( int ) bounds . getHeight ( ) ; final double heightPerVal = barHeight / valRange ; final int barX = ( int ) bounds . getX ( ) ; final int barY = ( int ) bounds . getY ( ) ; final int barWidth = ( int ) bounds . getWidth ( ) ; final int noOfBars = barElementColors . size ( ) ; final int elementWidth = calculateSideBySideElementWidth ( barElementColors , barWidth ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( ""Value range: "" + valRange + "" height per value:"" + heightPerVal ) ; } int xCoord = barX ; for ( final Color elementColor : barElementColors ) { final BarElementDataModel element = getElement ( elementColor ) ; if ( element != null ) { final double aggrVal = element . getAggregationValue ( aggrMethod ) ; final Rectangle elementRect = BarDataModel . calculateBarRectangle ( baseLine , barHeight , barY , heightPerVal , aggrVal , xCoord , elementWidth ) ; element . setRectangle ( elementRect , calculator ) ; } xCoord += elementWidth + AbstractHistogramVizModel . SPACE_BETWEEN_ELEMENTS ; } LOGGER . debug ( ""Exiting setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; } +","private void setSideBySideRectangles ( final Rectangle2D bounds , final List < Color > barElementColors , final double valRange , final AggregationMethod aggrMethod , final int baseLine , final HistogramHiliteCalculator calculator ) { LOGGER . debug ( ""Entering setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; final int barHeight = ( int ) bounds . getHeight ( ) ; final double heightPerVal = barHeight / valRange ; final int barX = ( int ) bounds . getX ( ) ; final int barY = ( int ) bounds . getY ( ) ; final int barWidth = ( int ) bounds . getWidth ( ) ; final int noOfBars = barElementColors . size ( ) ; final int elementWidth = calculateSideBySideElementWidth ( barElementColors , barWidth ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( ""Bar values (x,height,width, totalNoOf): "" + barX + "", "" + barHeight + "", "" + barWidth + noOfBars ) ; LOGGER . debug ( ""Value range: "" + valRange + "" height per value:"" + heightPerVal ) ; } int xCoord = barX ; for ( final Color elementColor : barElementColors ) { final BarElementDataModel element = getElement ( elementColor ) ; if ( element != null ) { final double aggrVal = element . getAggregationValue ( aggrMethod ) ; final Rectangle elementRect = BarDataModel . calculateBarRectangle ( baseLine , barHeight , barY , heightPerVal , aggrVal , xCoord , elementWidth ) ; element . setRectangle ( elementRect , calculator ) ; } xCoord += elementWidth + AbstractHistogramVizModel . SPACE_BETWEEN_ELEMENTS ; } LOGGER . debug ( ""Exiting setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; } +" +248,"private void setSideBySideRectangles ( final Rectangle2D bounds , final List < Color > barElementColors , final double valRange , final AggregationMethod aggrMethod , final int baseLine , final HistogramHiliteCalculator calculator ) { LOGGER . debug ( ""Entering setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; final int barHeight = ( int ) bounds . getHeight ( ) ; final double heightPerVal = barHeight / valRange ; final int barX = ( int ) bounds . getX ( ) ; final int barY = ( int ) bounds . getY ( ) ; final int barWidth = ( int ) bounds . getWidth ( ) ; final int noOfBars = barElementColors . size ( ) ; final int elementWidth = calculateSideBySideElementWidth ( barElementColors , barWidth ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( ""Bar values (x,height,width, totalNoOf): "" + barX + "", "" + barHeight + "", "" + barWidth + noOfBars ) ; } int xCoord = barX ; for ( final Color elementColor : barElementColors ) { final BarElementDataModel element = getElement ( elementColor ) ; if ( element != null ) { final double aggrVal = element . getAggregationValue ( aggrMethod ) ; final Rectangle elementRect = BarDataModel . calculateBarRectangle ( baseLine , barHeight , barY , heightPerVal , aggrVal , xCoord , elementWidth ) ; element . setRectangle ( elementRect , calculator ) ; } xCoord += elementWidth + AbstractHistogramVizModel . SPACE_BETWEEN_ELEMENTS ; } LOGGER . debug ( ""Exiting setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; } +","private void setSideBySideRectangles ( final Rectangle2D bounds , final List < Color > barElementColors , final double valRange , final AggregationMethod aggrMethod , final int baseLine , final HistogramHiliteCalculator calculator ) { LOGGER . debug ( ""Entering setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; final int barHeight = ( int ) bounds . getHeight ( ) ; final double heightPerVal = barHeight / valRange ; final int barX = ( int ) bounds . getX ( ) ; final int barY = ( int ) bounds . getY ( ) ; final int barWidth = ( int ) bounds . getWidth ( ) ; final int noOfBars = barElementColors . size ( ) ; final int elementWidth = calculateSideBySideElementWidth ( barElementColors , barWidth ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( ""Bar values (x,height,width, totalNoOf): "" + barX + "", "" + barHeight + "", "" + barWidth + noOfBars ) ; LOGGER . debug ( ""Value range: "" + valRange + "" height per value:"" + heightPerVal ) ; } int xCoord = barX ; for ( final Color elementColor : barElementColors ) { final BarElementDataModel element = getElement ( elementColor ) ; if ( element != null ) { final double aggrVal = element . getAggregationValue ( aggrMethod ) ; final Rectangle elementRect = BarDataModel . calculateBarRectangle ( baseLine , barHeight , barY , heightPerVal , aggrVal , xCoord , elementWidth ) ; element . setRectangle ( elementRect , calculator ) ; } xCoord += elementWidth + AbstractHistogramVizModel . SPACE_BETWEEN_ELEMENTS ; } LOGGER . debug ( ""Exiting setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; } +" +249,"private void setSideBySideRectangles ( final Rectangle2D bounds , final List < Color > barElementColors , final double valRange , final AggregationMethod aggrMethod , final int baseLine , final HistogramHiliteCalculator calculator ) { LOGGER . debug ( ""Entering setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; final int barHeight = ( int ) bounds . getHeight ( ) ; final double heightPerVal = barHeight / valRange ; final int barX = ( int ) bounds . getX ( ) ; final int barY = ( int ) bounds . getY ( ) ; final int barWidth = ( int ) bounds . getWidth ( ) ; final int noOfBars = barElementColors . size ( ) ; final int elementWidth = calculateSideBySideElementWidth ( barElementColors , barWidth ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( ""Bar values (x,height,width, totalNoOf): "" + barX + "", "" + barHeight + "", "" + barWidth + noOfBars ) ; LOGGER . debug ( ""Value range: "" + valRange + "" height per value:"" + heightPerVal ) ; } int xCoord = barX ; for ( final Color elementColor : barElementColors ) { final BarElementDataModel element = getElement ( elementColor ) ; if ( element != null ) { final double aggrVal = element . getAggregationValue ( aggrMethod ) ; final Rectangle elementRect = BarDataModel . calculateBarRectangle ( baseLine , barHeight , barY , heightPerVal , aggrVal , xCoord , elementWidth ) ; element . setRectangle ( elementRect , calculator ) ; } xCoord += elementWidth + AbstractHistogramVizModel . SPACE_BETWEEN_ELEMENTS ; } } +","private void setSideBySideRectangles ( final Rectangle2D bounds , final List < Color > barElementColors , final double valRange , final AggregationMethod aggrMethod , final int baseLine , final HistogramHiliteCalculator calculator ) { LOGGER . debug ( ""Entering setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; final int barHeight = ( int ) bounds . getHeight ( ) ; final double heightPerVal = barHeight / valRange ; final int barX = ( int ) bounds . getX ( ) ; final int barY = ( int ) bounds . getY ( ) ; final int barWidth = ( int ) bounds . getWidth ( ) ; final int noOfBars = barElementColors . size ( ) ; final int elementWidth = calculateSideBySideElementWidth ( barElementColors , barWidth ) ; if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( ""Bar values (x,height,width, totalNoOf): "" + barX + "", "" + barHeight + "", "" + barWidth + noOfBars ) ; LOGGER . debug ( ""Value range: "" + valRange + "" height per value:"" + heightPerVal ) ; } int xCoord = barX ; for ( final Color elementColor : barElementColors ) { final BarElementDataModel element = getElement ( elementColor ) ; if ( element != null ) { final double aggrVal = element . getAggregationValue ( aggrMethod ) ; final Rectangle elementRect = BarDataModel . calculateBarRectangle ( baseLine , barHeight , barY , heightPerVal , aggrVal , xCoord , elementWidth ) ; element . setRectangle ( elementRect , calculator ) ; } xCoord += elementWidth + AbstractHistogramVizModel . SPACE_BETWEEN_ELEMENTS ; } LOGGER . debug ( ""Exiting setSideBySideRectangles"" + ""(bounds, barElementColors, valRange, aggrMethod, baseLine) "" + ""of class BarDataModel."" ) ; } +" +250,"CacheSerializableRunnable getCacheSerializableRunnableForIndexUsageCheck ( ) { SerializableRunnable PrIndexCheck = new CacheSerializableRunnable ( ""PrIndexCheck"" ) { @ Override public void run2 ( ) { Cache cache = getCache ( ) ; QueryService qs = cache . getQueryService ( ) ; LogWriter logger = cache . getLogger ( ) ; Collection indexes = qs . getIndexes ( ) ; Iterator it = indexes . iterator ( ) ; while ( it . hasNext ( ) ) { PartitionedIndex ind = ( PartitionedIndex ) it . next ( ) ; int indexUsageWithSizeEstimation = 3 ; assertEquals ( 6 , ind . getStatistics ( ) . getTotalUses ( ) ) ; } } } ; return ( CacheSerializableRunnable ) PrIndexCheck ; } +","CacheSerializableRunnable getCacheSerializableRunnableForIndexUsageCheck ( ) { SerializableRunnable PrIndexCheck = new CacheSerializableRunnable ( ""PrIndexCheck"" ) { @ Override public void run2 ( ) { Cache cache = getCache ( ) ; QueryService qs = cache . getQueryService ( ) ; LogWriter logger = cache . getLogger ( ) ; Collection indexes = qs . getIndexes ( ) ; Iterator it = indexes . iterator ( ) ; while ( it . hasNext ( ) ) { PartitionedIndex ind = ( PartitionedIndex ) it . next ( ) ; int indexUsageWithSizeEstimation = 3 ; logger . info ( ""index uses for "" + ind . getNumberOfIndexedBuckets ( ) + "" index "" + ind . getName ( ) + "": "" + ind . getStatistics ( ) . getTotalUses ( ) ) ; assertEquals ( 6 , ind . getStatistics ( ) . getTotalUses ( ) ) ; } } } ; return ( CacheSerializableRunnable ) PrIndexCheck ; } +" +251,"public CommerceChannelHealthStatus getCommerceChannelHealthStatus ( String key ) { if ( Validator . isNull ( key ) ) { return null ; } ServiceWrapper < CommerceChannelHealthStatus > commerceChannelHealthStatusServiceWrapper = _commerceChannelHealthStatusRegistryMap . getService ( key ) ; if ( commerceChannelHealthStatusServiceWrapper == null ) { if ( _log . isDebugEnabled ( ) ) { } return null ; } return commerceChannelHealthStatusServiceWrapper . getService ( ) ; } +","public CommerceChannelHealthStatus getCommerceChannelHealthStatus ( String key ) { if ( Validator . isNull ( key ) ) { return null ; } ServiceWrapper < CommerceChannelHealthStatus > commerceChannelHealthStatusServiceWrapper = _commerceChannelHealthStatusRegistryMap . getService ( key ) ; if ( commerceChannelHealthStatusServiceWrapper == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""No commerce health status registered with key "" + key ) ; } return null ; } return commerceChannelHealthStatusServiceWrapper . getService ( ) ; } +" +252,"public void info ( String message , Object ... params ) { if ( params != null && params . length > 0 ) { message = String . format ( message , params ) ; } } +","public void info ( String message , Object ... params ) { if ( params != null && params . length > 0 ) { message = String . format ( message , params ) ; } logger . info ( message ) ; } +" +253,"protected Dump dumpFile ( final Archive < ? > archive ) { final String tmpDir = configuration . getAppWorkingDir ( ) ; Files . deleteOnExit ( new File ( tmpDir ) ) ; File file ; if ( configuration . isSingleDumpByArchiveName ( ) ) { file = new File ( tmpDir + File . separator + archive . getName ( ) ) ; Files . deleteOnExit ( file ) ; } else { int i = 0 ; do { file = new File ( tmpDir + File . separator + i ++ + File . separator + archive . getName ( ) ) ; } while ( file . getParentFile ( ) . exists ( ) ) ; Files . deleteOnExit ( file . getParentFile ( ) ) ; } if ( ! file . getParentFile ( ) . exists ( ) && ! file . getParentFile ( ) . mkdirs ( ) ) { } final Assignable finalArchive ; if ( isTestable ( archive , deployment . get ( ) ) ) { finalArchive = archiveWithTestInfo ( archive ) ; } else { finalArchive = archive ; } long size = - 1 ; if ( file . exists ( ) ) { size = file . length ( ) ; } final boolean created ; if ( ! configuration . isSingleDumpByArchiveName ( ) || ! file . exists ( ) ) { finalArchive . as ( ZipExporter . class ) . exportTo ( file , true ) ; created = true ; } else { created = false ; } if ( size > 0 && size != file . length ( ) ) { LOGGER . warning ( ""\nFile overwritten but size doesn't match: (now) "" + file . length ( ) + ""/(before) "" + size + "" name="" + file . getName ( ) + ( configuration . isSingleDumpByArchiveName ( ) ? "" maybe set singleDumpByArchiveName to false"" : """" ) + ""\n"" ) ; } return new Dump ( file , created ) ; } +","protected Dump dumpFile ( final Archive < ? > archive ) { final String tmpDir = configuration . getAppWorkingDir ( ) ; Files . deleteOnExit ( new File ( tmpDir ) ) ; File file ; if ( configuration . isSingleDumpByArchiveName ( ) ) { file = new File ( tmpDir + File . separator + archive . getName ( ) ) ; Files . deleteOnExit ( file ) ; } else { int i = 0 ; do { file = new File ( tmpDir + File . separator + i ++ + File . separator + archive . getName ( ) ) ; } while ( file . getParentFile ( ) . exists ( ) ) ; Files . deleteOnExit ( file . getParentFile ( ) ) ; } if ( ! file . getParentFile ( ) . exists ( ) && ! file . getParentFile ( ) . mkdirs ( ) ) { LOGGER . warning ( ""can't create "" + file . getParent ( ) ) ; } final Assignable finalArchive ; if ( isTestable ( archive , deployment . get ( ) ) ) { finalArchive = archiveWithTestInfo ( archive ) ; } else { finalArchive = archive ; } long size = - 1 ; if ( file . exists ( ) ) { size = file . length ( ) ; } final boolean created ; if ( ! configuration . isSingleDumpByArchiveName ( ) || ! file . exists ( ) ) { finalArchive . as ( ZipExporter . class ) . exportTo ( file , true ) ; created = true ; } else { created = false ; } if ( size > 0 && size != file . length ( ) ) { LOGGER . warning ( ""\nFile overwritten but size doesn't match: (now) "" + file . length ( ) + ""/(before) "" + size + "" name="" + file . getName ( ) + ( configuration . isSingleDumpByArchiveName ( ) ? "" maybe set singleDumpByArchiveName to false"" : """" ) + ""\n"" ) ; } return new Dump ( file , created ) ; } +" +254,"protected Dump dumpFile ( final Archive < ? > archive ) { final String tmpDir = configuration . getAppWorkingDir ( ) ; Files . deleteOnExit ( new File ( tmpDir ) ) ; File file ; if ( configuration . isSingleDumpByArchiveName ( ) ) { file = new File ( tmpDir + File . separator + archive . getName ( ) ) ; Files . deleteOnExit ( file ) ; } else { int i = 0 ; do { file = new File ( tmpDir + File . separator + i ++ + File . separator + archive . getName ( ) ) ; } while ( file . getParentFile ( ) . exists ( ) ) ; Files . deleteOnExit ( file . getParentFile ( ) ) ; } if ( ! file . getParentFile ( ) . exists ( ) && ! file . getParentFile ( ) . mkdirs ( ) ) { LOGGER . warning ( ""can't create "" + file . getParent ( ) ) ; } final Assignable finalArchive ; if ( isTestable ( archive , deployment . get ( ) ) ) { finalArchive = archiveWithTestInfo ( archive ) ; } else { finalArchive = archive ; } long size = - 1 ; if ( file . exists ( ) ) { size = file . length ( ) ; } final boolean created ; if ( ! configuration . isSingleDumpByArchiveName ( ) || ! file . exists ( ) ) { finalArchive . as ( ZipExporter . class ) . exportTo ( file , true ) ; created = true ; } else { created = false ; } if ( size > 0 && size != file . length ( ) ) { } return new Dump ( file , created ) ; } +","protected Dump dumpFile ( final Archive < ? > archive ) { final String tmpDir = configuration . getAppWorkingDir ( ) ; Files . deleteOnExit ( new File ( tmpDir ) ) ; File file ; if ( configuration . isSingleDumpByArchiveName ( ) ) { file = new File ( tmpDir + File . separator + archive . getName ( ) ) ; Files . deleteOnExit ( file ) ; } else { int i = 0 ; do { file = new File ( tmpDir + File . separator + i ++ + File . separator + archive . getName ( ) ) ; } while ( file . getParentFile ( ) . exists ( ) ) ; Files . deleteOnExit ( file . getParentFile ( ) ) ; } if ( ! file . getParentFile ( ) . exists ( ) && ! file . getParentFile ( ) . mkdirs ( ) ) { LOGGER . warning ( ""can't create "" + file . getParent ( ) ) ; } final Assignable finalArchive ; if ( isTestable ( archive , deployment . get ( ) ) ) { finalArchive = archiveWithTestInfo ( archive ) ; } else { finalArchive = archive ; } long size = - 1 ; if ( file . exists ( ) ) { size = file . length ( ) ; } final boolean created ; if ( ! configuration . isSingleDumpByArchiveName ( ) || ! file . exists ( ) ) { finalArchive . as ( ZipExporter . class ) . exportTo ( file , true ) ; created = true ; } else { created = false ; } if ( size > 0 && size != file . length ( ) ) { LOGGER . warning ( ""\nFile overwritten but size doesn't match: (now) "" + file . length ( ) + ""/(before) "" + size + "" name="" + file . getName ( ) + ( configuration . isSingleDumpByArchiveName ( ) ? "" maybe set singleDumpByArchiveName to false"" : """" ) + ""\n"" ) ; } return new Dump ( file , created ) ; } +" +255,"public static void deleteProperty ( String key ) throws IOException , KuraException { Properties properties = KuranetConfig . getProperties ( ) ; if ( properties . containsKey ( key ) ) { properties . remove ( key ) ; KuranetConfig . storeProperties ( properties ) ; } else { logger . debug ( ""Property does not exist {}"" , key ) ; } } +","public static void deleteProperty ( String key ) throws IOException , KuraException { Properties properties = KuranetConfig . getProperties ( ) ; if ( properties . containsKey ( key ) ) { logger . debug ( ""Deleting property {}"" , key ) ; properties . remove ( key ) ; KuranetConfig . storeProperties ( properties ) ; } else { logger . debug ( ""Property does not exist {}"" , key ) ; } } +" +256,"public static void deleteProperty ( String key ) throws IOException , KuraException { Properties properties = KuranetConfig . getProperties ( ) ; if ( properties . containsKey ( key ) ) { logger . debug ( ""Deleting property {}"" , key ) ; properties . remove ( key ) ; KuranetConfig . storeProperties ( properties ) ; } else { } } +","public static void deleteProperty ( String key ) throws IOException , KuraException { Properties properties = KuranetConfig . getProperties ( ) ; if ( properties . containsKey ( key ) ) { logger . debug ( ""Deleting property {}"" , key ) ; properties . remove ( key ) ; KuranetConfig . storeProperties ( properties ) ; } else { logger . debug ( ""Property does not exist {}"" , key ) ; } } +" +257,"public static void error ( Object msg , Throwable ex ) { Logger logger = LogUtil . getLogger ( ) ; if ( logger != null && logger . isErrorEnabled ( ) ) { } } +","public static void error ( Object msg , Throwable ex ) { Logger logger = LogUtil . getLogger ( ) ; if ( logger != null && logger . isErrorEnabled ( ) ) { logger . error ( LogUtil . getMsg ( msg ) , ex ) ; } } +" +258,"@ Test public void b_getExperimentsInProject ( ) { GetExperimentsInProject getExperiment = GetExperimentsInProject . newBuilder ( ) . setProjectId ( project . getId ( ) ) . build ( ) ; GetExperimentsInProject . Response experimentResponse = experimentServiceStub . getExperimentsInProject ( getExperiment ) ; LOGGER . info ( ""GetExperimentsInProject.Response "" + experimentResponse . getExperimentsCount ( ) ) ; assertEquals ( ""Experiments count not match with expected experiment count"" , 1 , experimentResponse . getExperimentsList ( ) . size ( ) ) ; assertEquals ( ""Experiment list not contain expected experiment"" , experiment , experimentResponse . getExperimentsList ( ) . get ( 0 ) ) ; LOGGER . info ( ""Get Experiment of project test stop................................"" ) ; } +","@ Test public void b_getExperimentsInProject ( ) { LOGGER . info ( ""Get Experiment of project test start................................"" ) ; GetExperimentsInProject getExperiment = GetExperimentsInProject . newBuilder ( ) . setProjectId ( project . getId ( ) ) . build ( ) ; GetExperimentsInProject . Response experimentResponse = experimentServiceStub . getExperimentsInProject ( getExperiment ) ; LOGGER . info ( ""GetExperimentsInProject.Response "" + experimentResponse . getExperimentsCount ( ) ) ; assertEquals ( ""Experiments count not match with expected experiment count"" , 1 , experimentResponse . getExperimentsList ( ) . size ( ) ) ; assertEquals ( ""Experiment list not contain expected experiment"" , experiment , experimentResponse . getExperimentsList ( ) . get ( 0 ) ) ; LOGGER . info ( ""Get Experiment of project test stop................................"" ) ; } +" +259,"@ Test public void b_getExperimentsInProject ( ) { LOGGER . info ( ""Get Experiment of project test start................................"" ) ; GetExperimentsInProject getExperiment = GetExperimentsInProject . newBuilder ( ) . setProjectId ( project . getId ( ) ) . build ( ) ; GetExperimentsInProject . Response experimentResponse = experimentServiceStub . getExperimentsInProject ( getExperiment ) ; assertEquals ( ""Experiments count not match with expected experiment count"" , 1 , experimentResponse . getExperimentsList ( ) . size ( ) ) ; assertEquals ( ""Experiment list not contain expected experiment"" , experiment , experimentResponse . getExperimentsList ( ) . get ( 0 ) ) ; LOGGER . info ( ""Get Experiment of project test stop................................"" ) ; } +","@ Test public void b_getExperimentsInProject ( ) { LOGGER . info ( ""Get Experiment of project test start................................"" ) ; GetExperimentsInProject getExperiment = GetExperimentsInProject . newBuilder ( ) . setProjectId ( project . getId ( ) ) . build ( ) ; GetExperimentsInProject . Response experimentResponse = experimentServiceStub . getExperimentsInProject ( getExperiment ) ; LOGGER . info ( ""GetExperimentsInProject.Response "" + experimentResponse . getExperimentsCount ( ) ) ; assertEquals ( ""Experiments count not match with expected experiment count"" , 1 , experimentResponse . getExperimentsList ( ) . size ( ) ) ; assertEquals ( ""Experiment list not contain expected experiment"" , experiment , experimentResponse . getExperimentsList ( ) . get ( 0 ) ) ; LOGGER . info ( ""Get Experiment of project test stop................................"" ) ; } +" +260,"@ Test public void b_getExperimentsInProject ( ) { LOGGER . info ( ""Get Experiment of project test start................................"" ) ; GetExperimentsInProject getExperiment = GetExperimentsInProject . newBuilder ( ) . setProjectId ( project . getId ( ) ) . build ( ) ; GetExperimentsInProject . Response experimentResponse = experimentServiceStub . getExperimentsInProject ( getExperiment ) ; LOGGER . info ( ""GetExperimentsInProject.Response "" + experimentResponse . getExperimentsCount ( ) ) ; assertEquals ( ""Experiments count not match with expected experiment count"" , 1 , experimentResponse . getExperimentsList ( ) . size ( ) ) ; assertEquals ( ""Experiment list not contain expected experiment"" , experiment , experimentResponse . getExperimentsList ( ) . get ( 0 ) ) ; } +","@ Test public void b_getExperimentsInProject ( ) { LOGGER . info ( ""Get Experiment of project test start................................"" ) ; GetExperimentsInProject getExperiment = GetExperimentsInProject . newBuilder ( ) . setProjectId ( project . getId ( ) ) . build ( ) ; GetExperimentsInProject . Response experimentResponse = experimentServiceStub . getExperimentsInProject ( getExperiment ) ; LOGGER . info ( ""GetExperimentsInProject.Response "" + experimentResponse . getExperimentsCount ( ) ) ; assertEquals ( ""Experiments count not match with expected experiment count"" , 1 , experimentResponse . getExperimentsList ( ) . size ( ) ) ; assertEquals ( ""Experiment list not contain expected experiment"" , experiment , experimentResponse . getExperimentsList ( ) . get ( 0 ) ) ; LOGGER . info ( ""Get Experiment of project test stop................................"" ) ; } +" +261,"private void check3VehicleJourney1 ( Context context , VehicleJourney vj , ValidationParameters parameters ) { if ( isEmpty ( vj . getVehicleJourneyAtStops ( ) ) ) { return ; } long maxDiffTime = parameters . getInterStopDurationMax ( ) ; List < VehicleJourneyAtStop > vjasList = vj . getVehicleJourneyAtStops ( ) ; for ( VehicleJourneyAtStop vjas : vjasList ) { long diffTime = Math . abs ( diffTime ( vjas . getArrivalTime ( ) , vjas . getArrivalDayOffset ( ) , vjas . getDepartureTime ( ) , vjas . getDepartureDayOffset ( ) ) ) ; if ( diffTime > maxDiffTime ) { DataLocation location = buildLocation ( context , vj ) ; DataLocation target = buildLocation ( context , vjas . getStopPoint ( ) . getContainedInStopArea ( ) ) ; ValidationReporter reporter = ValidationReporter . Factory . getInstance ( ) ; reporter . addCheckPointReportError ( context , VEHICLE_JOURNEY_1 , location , Long . toString ( diffTime ) , Long . toString ( maxDiffTime ) , target ) ; } } } +","private void check3VehicleJourney1 ( Context context , VehicleJourney vj , ValidationParameters parameters ) { if ( isEmpty ( vj . getVehicleJourneyAtStops ( ) ) ) { log . error ( ""vehicleJourney "" + vj . getObjectId ( ) + "" has no vehicleJourneyAtStop"" ) ; return ; } long maxDiffTime = parameters . getInterStopDurationMax ( ) ; List < VehicleJourneyAtStop > vjasList = vj . getVehicleJourneyAtStops ( ) ; for ( VehicleJourneyAtStop vjas : vjasList ) { long diffTime = Math . abs ( diffTime ( vjas . getArrivalTime ( ) , vjas . getArrivalDayOffset ( ) , vjas . getDepartureTime ( ) , vjas . getDepartureDayOffset ( ) ) ) ; if ( diffTime > maxDiffTime ) { DataLocation location = buildLocation ( context , vj ) ; DataLocation target = buildLocation ( context , vjas . getStopPoint ( ) . getContainedInStopArea ( ) ) ; ValidationReporter reporter = ValidationReporter . Factory . getInstance ( ) ; reporter . addCheckPointReportError ( context , VEHICLE_JOURNEY_1 , location , Long . toString ( diffTime ) , Long . toString ( maxDiffTime ) , target ) ; } } } +" +262,"@ Test public void testFirst ( ) throws Exception { Flowable . just ( ""Camel"" , ""rocks"" , ""streams"" , ""as"" , ""well"" ) . map ( String :: toUpperCase ) . doOnNext ( LOG :: info ) . subscribe ( ) ; } +","@ Test public void testFirst ( ) throws Exception { LOG . info ( ""Starting RX-Java2 Flowable first"" ) ; Flowable . just ( ""Camel"" , ""rocks"" , ""streams"" , ""as"" , ""well"" ) . map ( String :: toUpperCase ) . doOnNext ( LOG :: info ) . subscribe ( ) ; } +" +263,"private void addCommandReceptionTimer ( final CoapContext context , final AtomicBoolean requestProcessed , final Handler < AsyncResult < Void > > responseReady , final long delaySecs , final Span waitForCommandSpan ) { final Long timerId = vertx . setTimer ( delaySecs * 1000L , id -> { if ( requestProcessed . compareAndSet ( false , true ) ) { setTtdStatus ( context , TtdStatus . EXPIRED ) ; waitForCommandSpan . log ( String . format ( ""time to wait for command expired (%ds)"" , delaySecs ) ) ; responseReady . handle ( Future . succeededFuture ( ) ) ; } else { LOG . trace ( ""response already sent, nothing to do ..."" ) ; } } ) ; LOG . trace ( ""adding command reception timer [id: {}]"" , timerId ) ; context . put ( KEY_TIMER_ID , timerId ) ; } +","private void addCommandReceptionTimer ( final CoapContext context , final AtomicBoolean requestProcessed , final Handler < AsyncResult < Void > > responseReady , final long delaySecs , final Span waitForCommandSpan ) { final Long timerId = vertx . setTimer ( delaySecs * 1000L , id -> { LOG . trace ( ""time to wait [{}s] for command expired [timer id: {}]"" , delaySecs , id ) ; if ( requestProcessed . compareAndSet ( false , true ) ) { setTtdStatus ( context , TtdStatus . EXPIRED ) ; waitForCommandSpan . log ( String . format ( ""time to wait for command expired (%ds)"" , delaySecs ) ) ; responseReady . handle ( Future . succeededFuture ( ) ) ; } else { LOG . trace ( ""response already sent, nothing to do ..."" ) ; } } ) ; LOG . trace ( ""adding command reception timer [id: {}]"" , timerId ) ; context . put ( KEY_TIMER_ID , timerId ) ; } +" +264,"private void addCommandReceptionTimer ( final CoapContext context , final AtomicBoolean requestProcessed , final Handler < AsyncResult < Void > > responseReady , final long delaySecs , final Span waitForCommandSpan ) { final Long timerId = vertx . setTimer ( delaySecs * 1000L , id -> { LOG . trace ( ""time to wait [{}s] for command expired [timer id: {}]"" , delaySecs , id ) ; if ( requestProcessed . compareAndSet ( false , true ) ) { setTtdStatus ( context , TtdStatus . EXPIRED ) ; waitForCommandSpan . log ( String . format ( ""time to wait for command expired (%ds)"" , delaySecs ) ) ; responseReady . handle ( Future . succeededFuture ( ) ) ; } else { } } ) ; LOG . trace ( ""adding command reception timer [id: {}]"" , timerId ) ; context . put ( KEY_TIMER_ID , timerId ) ; } +","private void addCommandReceptionTimer ( final CoapContext context , final AtomicBoolean requestProcessed , final Handler < AsyncResult < Void > > responseReady , final long delaySecs , final Span waitForCommandSpan ) { final Long timerId = vertx . setTimer ( delaySecs * 1000L , id -> { LOG . trace ( ""time to wait [{}s] for command expired [timer id: {}]"" , delaySecs , id ) ; if ( requestProcessed . compareAndSet ( false , true ) ) { setTtdStatus ( context , TtdStatus . EXPIRED ) ; waitForCommandSpan . log ( String . format ( ""time to wait for command expired (%ds)"" , delaySecs ) ) ; responseReady . handle ( Future . succeededFuture ( ) ) ; } else { LOG . trace ( ""response already sent, nothing to do ..."" ) ; } } ) ; LOG . trace ( ""adding command reception timer [id: {}]"" , timerId ) ; context . put ( KEY_TIMER_ID , timerId ) ; } +" +265,"private void addCommandReceptionTimer ( final CoapContext context , final AtomicBoolean requestProcessed , final Handler < AsyncResult < Void > > responseReady , final long delaySecs , final Span waitForCommandSpan ) { final Long timerId = vertx . setTimer ( delaySecs * 1000L , id -> { LOG . trace ( ""time to wait [{}s] for command expired [timer id: {}]"" , delaySecs , id ) ; if ( requestProcessed . compareAndSet ( false , true ) ) { setTtdStatus ( context , TtdStatus . EXPIRED ) ; waitForCommandSpan . log ( String . format ( ""time to wait for command expired (%ds)"" , delaySecs ) ) ; responseReady . handle ( Future . succeededFuture ( ) ) ; } else { LOG . trace ( ""response already sent, nothing to do ..."" ) ; } } ) ; context . put ( KEY_TIMER_ID , timerId ) ; } +","private void addCommandReceptionTimer ( final CoapContext context , final AtomicBoolean requestProcessed , final Handler < AsyncResult < Void > > responseReady , final long delaySecs , final Span waitForCommandSpan ) { final Long timerId = vertx . setTimer ( delaySecs * 1000L , id -> { LOG . trace ( ""time to wait [{}s] for command expired [timer id: {}]"" , delaySecs , id ) ; if ( requestProcessed . compareAndSet ( false , true ) ) { setTtdStatus ( context , TtdStatus . EXPIRED ) ; waitForCommandSpan . log ( String . format ( ""time to wait for command expired (%ds)"" , delaySecs ) ) ; responseReady . handle ( Future . succeededFuture ( ) ) ; } else { LOG . trace ( ""response already sent, nothing to do ..."" ) ; } } ) ; LOG . trace ( ""adding command reception timer [id: {}]"" , timerId ) ; context . put ( KEY_TIMER_ID , timerId ) ; } +" +266,"public void sessionDestroyed ( HttpSessionEvent se ) { HttpSession session = se . getSession ( ) ; Context context = ( Context ) session . getAttribute ( Context . KEY ) ; String sid = ( String ) session . getAttribute ( ""sid"" ) ; if ( logger . isDebugEnabled ( ) ) { String key ; for ( @ SuppressWarnings ( ""unchecked"" ) Enumeration < String > e = session . getAttributeNames ( ) ; e . hasMoreElements ( ) ; ) { key = e . nextElement ( ) ; logger . debug ( "" "" + key + ""="" + session . getAttribute ( key ) ) ; } } WebUtils . getWac ( ) . publishEvent ( new SessionDestroyedEvent ( session , context , sid ) ) ; } +","public void sessionDestroyed ( HttpSessionEvent se ) { HttpSession session = se . getSession ( ) ; Context context = ( Context ) session . getAttribute ( Context . KEY ) ; String sid = ( String ) session . getAttribute ( ""sid"" ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""sessionDestroyed:sid="" + sid + "",session id="" + session . getId ( ) ) ; String key ; for ( @ SuppressWarnings ( ""unchecked"" ) Enumeration < String > e = session . getAttributeNames ( ) ; e . hasMoreElements ( ) ; ) { key = e . nextElement ( ) ; logger . debug ( "" "" + key + ""="" + session . getAttribute ( key ) ) ; } } WebUtils . getWac ( ) . publishEvent ( new SessionDestroyedEvent ( session , context , sid ) ) ; } +" +267,"public void sessionDestroyed ( HttpSessionEvent se ) { HttpSession session = se . getSession ( ) ; Context context = ( Context ) session . getAttribute ( Context . KEY ) ; String sid = ( String ) session . getAttribute ( ""sid"" ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""sessionDestroyed:sid="" + sid + "",session id="" + session . getId ( ) ) ; String key ; for ( @ SuppressWarnings ( ""unchecked"" ) Enumeration < String > e = session . getAttributeNames ( ) ; e . hasMoreElements ( ) ; ) { key = e . nextElement ( ) ; } } WebUtils . getWac ( ) . publishEvent ( new SessionDestroyedEvent ( session , context , sid ) ) ; } +","public void sessionDestroyed ( HttpSessionEvent se ) { HttpSession session = se . getSession ( ) ; Context context = ( Context ) session . getAttribute ( Context . KEY ) ; String sid = ( String ) session . getAttribute ( ""sid"" ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""sessionDestroyed:sid="" + sid + "",session id="" + session . getId ( ) ) ; String key ; for ( @ SuppressWarnings ( ""unchecked"" ) Enumeration < String > e = session . getAttributeNames ( ) ; e . hasMoreElements ( ) ; ) { key = e . nextElement ( ) ; logger . debug ( "" "" + key + ""="" + session . getAttribute ( key ) ) ; } } WebUtils . getWac ( ) . publishEvent ( new SessionDestroyedEvent ( session , context , sid ) ) ; } +" +268,"public static void checkIdentifierAgainstValidator ( String identifier , IdentifierValidator validator ) throws PatientIdentifierException { if ( StringUtils . isBlank ( identifier ) ) { throw new BlankIdentifierException ( ""PatientIdentifier.error.nullOrBlank"" ) ; } if ( validator == null ) { log . debug ( ""Validator is null, identifier passes."" ) ; return ; } try { if ( ! validator . isValid ( identifier ) ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.checkDigitWithParameter"" , identifier ) ) ; } } catch ( UnallowedIdentifierException e ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.unallowedIdentifier"" , identifier , validator . getName ( ) ) ) ; } log . debug ( ""The identifier passed validation."" ) ; } +","public static void checkIdentifierAgainstValidator ( String identifier , IdentifierValidator validator ) throws PatientIdentifierException { log . debug ( ""Checking identifier: "" + identifier + "" against validator: "" + validator ) ; if ( StringUtils . isBlank ( identifier ) ) { throw new BlankIdentifierException ( ""PatientIdentifier.error.nullOrBlank"" ) ; } if ( validator == null ) { log . debug ( ""Validator is null, identifier passes."" ) ; return ; } try { if ( ! validator . isValid ( identifier ) ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.checkDigitWithParameter"" , identifier ) ) ; } } catch ( UnallowedIdentifierException e ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.unallowedIdentifier"" , identifier , validator . getName ( ) ) ) ; } log . debug ( ""The identifier passed validation."" ) ; } +" +269,"public static void checkIdentifierAgainstValidator ( String identifier , IdentifierValidator validator ) throws PatientIdentifierException { log . debug ( ""Checking identifier: "" + identifier + "" against validator: "" + validator ) ; if ( StringUtils . isBlank ( identifier ) ) { throw new BlankIdentifierException ( ""PatientIdentifier.error.nullOrBlank"" ) ; } if ( validator == null ) { return ; } try { if ( ! validator . isValid ( identifier ) ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.checkDigitWithParameter"" , identifier ) ) ; } } catch ( UnallowedIdentifierException e ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.unallowedIdentifier"" , identifier , validator . getName ( ) ) ) ; } log . debug ( ""The identifier passed validation."" ) ; } +","public static void checkIdentifierAgainstValidator ( String identifier , IdentifierValidator validator ) throws PatientIdentifierException { log . debug ( ""Checking identifier: "" + identifier + "" against validator: "" + validator ) ; if ( StringUtils . isBlank ( identifier ) ) { throw new BlankIdentifierException ( ""PatientIdentifier.error.nullOrBlank"" ) ; } if ( validator == null ) { log . debug ( ""Validator is null, identifier passes."" ) ; return ; } try { if ( ! validator . isValid ( identifier ) ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.checkDigitWithParameter"" , identifier ) ) ; } } catch ( UnallowedIdentifierException e ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.unallowedIdentifier"" , identifier , validator . getName ( ) ) ) ; } log . debug ( ""The identifier passed validation."" ) ; } +" +270,"public static void checkIdentifierAgainstValidator ( String identifier , IdentifierValidator validator ) throws PatientIdentifierException { log . debug ( ""Checking identifier: "" + identifier + "" against validator: "" + validator ) ; if ( StringUtils . isBlank ( identifier ) ) { throw new BlankIdentifierException ( ""PatientIdentifier.error.nullOrBlank"" ) ; } if ( validator == null ) { log . debug ( ""Validator is null, identifier passes."" ) ; return ; } try { if ( ! validator . isValid ( identifier ) ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.checkDigitWithParameter"" , identifier ) ) ; } } catch ( UnallowedIdentifierException e ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.unallowedIdentifier"" , identifier , validator . getName ( ) ) ) ; } } +","public static void checkIdentifierAgainstValidator ( String identifier , IdentifierValidator validator ) throws PatientIdentifierException { log . debug ( ""Checking identifier: "" + identifier + "" against validator: "" + validator ) ; if ( StringUtils . isBlank ( identifier ) ) { throw new BlankIdentifierException ( ""PatientIdentifier.error.nullOrBlank"" ) ; } if ( validator == null ) { log . debug ( ""Validator is null, identifier passes."" ) ; return ; } try { if ( ! validator . isValid ( identifier ) ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.checkDigitWithParameter"" , identifier ) ) ; } } catch ( UnallowedIdentifierException e ) { throw new InvalidCheckDigitException ( getMessage ( ""PatientIdentifier.error.unallowedIdentifier"" , identifier , validator . getName ( ) ) ) ; } log . debug ( ""The identifier passed validation."" ) ; } +" +271,"protected void doStop ( ) throws Exception { super . doStop ( ) ; if ( stopClient && client != null && client . isConnected ( ) ) { String topic = getEndpoint ( ) . getTopic ( ) ; if ( getEndpoint ( ) . getConfiguration ( ) . isCleanStart ( ) ) { client . unsubscribe ( topic ) ; } else { LOG . debug ( ""Client: {} is durable so will not unsubscribe from topic: {}"" , clientId , topic ) ; } LOG . debug ( ""Disconnecting client: {} from broker: {}"" , clientId , getEndpoint ( ) . getConfiguration ( ) . getBrokerUrl ( ) ) ; client . disconnect ( ) ; } client = null ; } +","protected void doStop ( ) throws Exception { super . doStop ( ) ; if ( stopClient && client != null && client . isConnected ( ) ) { String topic = getEndpoint ( ) . getTopic ( ) ; if ( getEndpoint ( ) . getConfiguration ( ) . isCleanStart ( ) ) { LOG . debug ( ""Unsubscribing client: {} from topic: {}"" , clientId , topic ) ; client . unsubscribe ( topic ) ; } else { LOG . debug ( ""Client: {} is durable so will not unsubscribe from topic: {}"" , clientId , topic ) ; } LOG . debug ( ""Disconnecting client: {} from broker: {}"" , clientId , getEndpoint ( ) . getConfiguration ( ) . getBrokerUrl ( ) ) ; client . disconnect ( ) ; } client = null ; } +" +272,"protected void doStop ( ) throws Exception { super . doStop ( ) ; if ( stopClient && client != null && client . isConnected ( ) ) { String topic = getEndpoint ( ) . getTopic ( ) ; if ( getEndpoint ( ) . getConfiguration ( ) . isCleanStart ( ) ) { LOG . debug ( ""Unsubscribing client: {} from topic: {}"" , clientId , topic ) ; client . unsubscribe ( topic ) ; } else { } LOG . debug ( ""Disconnecting client: {} from broker: {}"" , clientId , getEndpoint ( ) . getConfiguration ( ) . getBrokerUrl ( ) ) ; client . disconnect ( ) ; } client = null ; } +","protected void doStop ( ) throws Exception { super . doStop ( ) ; if ( stopClient && client != null && client . isConnected ( ) ) { String topic = getEndpoint ( ) . getTopic ( ) ; if ( getEndpoint ( ) . getConfiguration ( ) . isCleanStart ( ) ) { LOG . debug ( ""Unsubscribing client: {} from topic: {}"" , clientId , topic ) ; client . unsubscribe ( topic ) ; } else { LOG . debug ( ""Client: {} is durable so will not unsubscribe from topic: {}"" , clientId , topic ) ; } LOG . debug ( ""Disconnecting client: {} from broker: {}"" , clientId , getEndpoint ( ) . getConfiguration ( ) . getBrokerUrl ( ) ) ; client . disconnect ( ) ; } client = null ; } +" +273,"protected void doStop ( ) throws Exception { super . doStop ( ) ; if ( stopClient && client != null && client . isConnected ( ) ) { String topic = getEndpoint ( ) . getTopic ( ) ; if ( getEndpoint ( ) . getConfiguration ( ) . isCleanStart ( ) ) { LOG . debug ( ""Unsubscribing client: {} from topic: {}"" , clientId , topic ) ; client . unsubscribe ( topic ) ; } else { LOG . debug ( ""Client: {} is durable so will not unsubscribe from topic: {}"" , clientId , topic ) ; } client . disconnect ( ) ; } client = null ; } +","protected void doStop ( ) throws Exception { super . doStop ( ) ; if ( stopClient && client != null && client . isConnected ( ) ) { String topic = getEndpoint ( ) . getTopic ( ) ; if ( getEndpoint ( ) . getConfiguration ( ) . isCleanStart ( ) ) { LOG . debug ( ""Unsubscribing client: {} from topic: {}"" , clientId , topic ) ; client . unsubscribe ( topic ) ; } else { LOG . debug ( ""Client: {} is durable so will not unsubscribe from topic: {}"" , clientId , topic ) ; } LOG . debug ( ""Disconnecting client: {} from broker: {}"" , clientId , getEndpoint ( ) . getConfiguration ( ) . getBrokerUrl ( ) ) ; client . disconnect ( ) ; } client = null ; } +" +274,"public void addReport ( Report report , boolean reportIsAnReportItem ) { if ( reportIsAnReportItem ) { reportsFromBeingProcessed . get ( report . getSourceObjectId ( ) ) . get ( report . getOutcomeObjectId ( ) ) . addReport ( report , false ) ; } else { String sourceObjectId = report . getSourceObjectId ( ) ; String outcomeObjectId = report . getOutcomeObjectId ( ) ; if ( StringUtils . isBlank ( sourceObjectId ) || StringUtils . isBlank ( outcomeObjectId ) ) { return ; } if ( Report . NO_SOURCE_OBJECT_ID . equals ( sourceObjectId ) ) { LOGGER . error ( ""Will not add report as source object id is not set!"" ) ; return ; } if ( aipIdToTransferredResourceIds . containsKey ( outcomeObjectId ) ) { if ( ! aipIdToTransferredResourceIds . get ( outcomeObjectId ) . contains ( sourceObjectId ) ) { aipIdToTransferredResourceIds . get ( outcomeObjectId ) . add ( sourceObjectId ) ; } } else { aipIdToTransferredResourceIds . computeIfAbsent ( outcomeObjectId , key -> new ArrayList < > ( ) ) . add ( sourceObjectId ) ; } if ( transferredResourceToAipIds . containsKey ( sourceObjectId ) ) { if ( ! transferredResourceToAipIds . get ( sourceObjectId ) . contains ( outcomeObjectId ) ) { transferredResourceToAipIds . get ( sourceObjectId ) . add ( outcomeObjectId ) ; } } else { transferredResourceToAipIds . computeIfAbsent ( sourceObjectId , key -> new ArrayList < > ( ) ) . add ( outcomeObjectId ) ; } if ( reportsFromBeingProcessed . get ( sourceObjectId ) != null ) { reportsFromBeingProcessed . get ( sourceObjectId ) . put ( outcomeObjectId , report ) ; allReports . get ( sourceObjectId ) . put ( outcomeObjectId , report ) ; } else { Map < String , Report > innerReports = new HashMap < > ( ) ; innerReports . put ( outcomeObjectId , report ) ; reportsFromBeingProcessed . put ( sourceObjectId , innerReports ) ; allReports . put ( sourceObjectId , innerReports ) ; } } } +","public void addReport ( Report report , boolean reportIsAnReportItem ) { if ( reportIsAnReportItem ) { reportsFromBeingProcessed . get ( report . getSourceObjectId ( ) ) . get ( report . getOutcomeObjectId ( ) ) . addReport ( report , false ) ; } else { String sourceObjectId = report . getSourceObjectId ( ) ; String outcomeObjectId = report . getOutcomeObjectId ( ) ; if ( StringUtils . isBlank ( sourceObjectId ) || StringUtils . isBlank ( outcomeObjectId ) ) { LOGGER . error ( ""Will not add report as both source & outcome object ids are blank!"" ) ; return ; } if ( Report . NO_SOURCE_OBJECT_ID . equals ( sourceObjectId ) ) { LOGGER . error ( ""Will not add report as source object id is not set!"" ) ; return ; } if ( aipIdToTransferredResourceIds . containsKey ( outcomeObjectId ) ) { if ( ! aipIdToTransferredResourceIds . get ( outcomeObjectId ) . contains ( sourceObjectId ) ) { aipIdToTransferredResourceIds . get ( outcomeObjectId ) . add ( sourceObjectId ) ; } } else { aipIdToTransferredResourceIds . computeIfAbsent ( outcomeObjectId , key -> new ArrayList < > ( ) ) . add ( sourceObjectId ) ; } if ( transferredResourceToAipIds . containsKey ( sourceObjectId ) ) { if ( ! transferredResourceToAipIds . get ( sourceObjectId ) . contains ( outcomeObjectId ) ) { transferredResourceToAipIds . get ( sourceObjectId ) . add ( outcomeObjectId ) ; } } else { transferredResourceToAipIds . computeIfAbsent ( sourceObjectId , key -> new ArrayList < > ( ) ) . add ( outcomeObjectId ) ; } if ( reportsFromBeingProcessed . get ( sourceObjectId ) != null ) { reportsFromBeingProcessed . get ( sourceObjectId ) . put ( outcomeObjectId , report ) ; allReports . get ( sourceObjectId ) . put ( outcomeObjectId , report ) ; } else { Map < String , Report > innerReports = new HashMap < > ( ) ; innerReports . put ( outcomeObjectId , report ) ; reportsFromBeingProcessed . put ( sourceObjectId , innerReports ) ; allReports . put ( sourceObjectId , innerReports ) ; } } } +" +275,"public void addReport ( Report report , boolean reportIsAnReportItem ) { if ( reportIsAnReportItem ) { reportsFromBeingProcessed . get ( report . getSourceObjectId ( ) ) . get ( report . getOutcomeObjectId ( ) ) . addReport ( report , false ) ; } else { String sourceObjectId = report . getSourceObjectId ( ) ; String outcomeObjectId = report . getOutcomeObjectId ( ) ; if ( StringUtils . isBlank ( sourceObjectId ) || StringUtils . isBlank ( outcomeObjectId ) ) { LOGGER . error ( ""Will not add report as both source & outcome object ids are blank!"" ) ; return ; } if ( Report . NO_SOURCE_OBJECT_ID . equals ( sourceObjectId ) ) { return ; } if ( aipIdToTransferredResourceIds . containsKey ( outcomeObjectId ) ) { if ( ! aipIdToTransferredResourceIds . get ( outcomeObjectId ) . contains ( sourceObjectId ) ) { aipIdToTransferredResourceIds . get ( outcomeObjectId ) . add ( sourceObjectId ) ; } } else { aipIdToTransferredResourceIds . computeIfAbsent ( outcomeObjectId , key -> new ArrayList < > ( ) ) . add ( sourceObjectId ) ; } if ( transferredResourceToAipIds . containsKey ( sourceObjectId ) ) { if ( ! transferredResourceToAipIds . get ( sourceObjectId ) . contains ( outcomeObjectId ) ) { transferredResourceToAipIds . get ( sourceObjectId ) . add ( outcomeObjectId ) ; } } else { transferredResourceToAipIds . computeIfAbsent ( sourceObjectId , key -> new ArrayList < > ( ) ) . add ( outcomeObjectId ) ; } if ( reportsFromBeingProcessed . get ( sourceObjectId ) != null ) { reportsFromBeingProcessed . get ( sourceObjectId ) . put ( outcomeObjectId , report ) ; allReports . get ( sourceObjectId ) . put ( outcomeObjectId , report ) ; } else { Map < String , Report > innerReports = new HashMap < > ( ) ; innerReports . put ( outcomeObjectId , report ) ; reportsFromBeingProcessed . put ( sourceObjectId , innerReports ) ; allReports . put ( sourceObjectId , innerReports ) ; } } } +","public void addReport ( Report report , boolean reportIsAnReportItem ) { if ( reportIsAnReportItem ) { reportsFromBeingProcessed . get ( report . getSourceObjectId ( ) ) . get ( report . getOutcomeObjectId ( ) ) . addReport ( report , false ) ; } else { String sourceObjectId = report . getSourceObjectId ( ) ; String outcomeObjectId = report . getOutcomeObjectId ( ) ; if ( StringUtils . isBlank ( sourceObjectId ) || StringUtils . isBlank ( outcomeObjectId ) ) { LOGGER . error ( ""Will not add report as both source & outcome object ids are blank!"" ) ; return ; } if ( Report . NO_SOURCE_OBJECT_ID . equals ( sourceObjectId ) ) { LOGGER . error ( ""Will not add report as source object id is not set!"" ) ; return ; } if ( aipIdToTransferredResourceIds . containsKey ( outcomeObjectId ) ) { if ( ! aipIdToTransferredResourceIds . get ( outcomeObjectId ) . contains ( sourceObjectId ) ) { aipIdToTransferredResourceIds . get ( outcomeObjectId ) . add ( sourceObjectId ) ; } } else { aipIdToTransferredResourceIds . computeIfAbsent ( outcomeObjectId , key -> new ArrayList < > ( ) ) . add ( sourceObjectId ) ; } if ( transferredResourceToAipIds . containsKey ( sourceObjectId ) ) { if ( ! transferredResourceToAipIds . get ( sourceObjectId ) . contains ( outcomeObjectId ) ) { transferredResourceToAipIds . get ( sourceObjectId ) . add ( outcomeObjectId ) ; } } else { transferredResourceToAipIds . computeIfAbsent ( sourceObjectId , key -> new ArrayList < > ( ) ) . add ( outcomeObjectId ) ; } if ( reportsFromBeingProcessed . get ( sourceObjectId ) != null ) { reportsFromBeingProcessed . get ( sourceObjectId ) . put ( outcomeObjectId , report ) ; allReports . get ( sourceObjectId ) . put ( outcomeObjectId , report ) ; } else { Map < String , Report > innerReports = new HashMap < > ( ) ; innerReports . put ( outcomeObjectId , report ) ; reportsFromBeingProcessed . put ( sourceObjectId , innerReports ) ; allReports . put ( sourceObjectId , innerReports ) ; } } } +" +276,"public void config ( String message ) { } +","public void config ( String message ) { logger . info ( message ) ; } +" +277,"private void entryMoved ( PartitionTxn partitionTxn , Dn oldEntryDn , Entry modifiedEntry , String entryIdOld ) throws LdapException { addEntry ( modifiedEntry ) ; String baseId = getEntryId ( partitionTxn , modifiedEntry . getDn ( ) ) ; ParentIdAndRdn parentIdAndRdn = getRdnIndex ( ) . reverseLookup ( partitionTxn , baseId ) ; IndexEntry indexEntry = new IndexEntry ( ) ; indexEntry . setId ( baseId ) ; indexEntry . setKey ( parentIdAndRdn ) ; Cursor < IndexEntry < ParentIdAndRdn , String > > cursor = new SingletonIndexCursor < > ( partitionTxn , indexEntry ) ; String parentId = parentIdAndRdn . getParentId ( ) ; Cursor < IndexEntry < String , String > > scopeCursor = new DescendantCursor ( partitionTxn , this , baseId , parentId , cursor ) ; try { while ( scopeCursor . next ( ) ) { IndexEntry < String , String > entry = scopeCursor . get ( ) ; if ( entry . getId ( ) != entryIdOld ) { addEntry ( fetch ( partitionTxn , entry . getId ( ) ) ) ; } } scopeCursor . close ( ) ; } catch ( Exception e ) { throw new LdapOperationException ( e . getMessage ( ) , e ) ; } File file = getFile ( oldEntryDn , DELETE ) ; boolean deleted = deleteFile ( file ) ; String dirName = file . getAbsolutePath ( ) ; dirName = dirName . substring ( 0 , dirName . indexOf ( CONF_FILE_EXTN ) ) ; deleted = deleteFile ( new File ( dirName ) ) ; LOG . warn ( ""move operation: deleted dir {} {}"" , dirName , deleted ) ; } +","private void entryMoved ( PartitionTxn partitionTxn , Dn oldEntryDn , Entry modifiedEntry , String entryIdOld ) throws LdapException { addEntry ( modifiedEntry ) ; String baseId = getEntryId ( partitionTxn , modifiedEntry . getDn ( ) ) ; ParentIdAndRdn parentIdAndRdn = getRdnIndex ( ) . reverseLookup ( partitionTxn , baseId ) ; IndexEntry indexEntry = new IndexEntry ( ) ; indexEntry . setId ( baseId ) ; indexEntry . setKey ( parentIdAndRdn ) ; Cursor < IndexEntry < ParentIdAndRdn , String > > cursor = new SingletonIndexCursor < > ( partitionTxn , indexEntry ) ; String parentId = parentIdAndRdn . getParentId ( ) ; Cursor < IndexEntry < String , String > > scopeCursor = new DescendantCursor ( partitionTxn , this , baseId , parentId , cursor ) ; try { while ( scopeCursor . next ( ) ) { IndexEntry < String , String > entry = scopeCursor . get ( ) ; if ( entry . getId ( ) != entryIdOld ) { addEntry ( fetch ( partitionTxn , entry . getId ( ) ) ) ; } } scopeCursor . close ( ) ; } catch ( Exception e ) { throw new LdapOperationException ( e . getMessage ( ) , e ) ; } File file = getFile ( oldEntryDn , DELETE ) ; boolean deleted = deleteFile ( file ) ; LOG . warn ( ""move operation: deleted file {} {}"" , file . getAbsoluteFile ( ) , deleted ) ; String dirName = file . getAbsolutePath ( ) ; dirName = dirName . substring ( 0 , dirName . indexOf ( CONF_FILE_EXTN ) ) ; deleted = deleteFile ( new File ( dirName ) ) ; LOG . warn ( ""move operation: deleted dir {} {}"" , dirName , deleted ) ; } +" +278,"private void entryMoved ( PartitionTxn partitionTxn , Dn oldEntryDn , Entry modifiedEntry , String entryIdOld ) throws LdapException { addEntry ( modifiedEntry ) ; String baseId = getEntryId ( partitionTxn , modifiedEntry . getDn ( ) ) ; ParentIdAndRdn parentIdAndRdn = getRdnIndex ( ) . reverseLookup ( partitionTxn , baseId ) ; IndexEntry indexEntry = new IndexEntry ( ) ; indexEntry . setId ( baseId ) ; indexEntry . setKey ( parentIdAndRdn ) ; Cursor < IndexEntry < ParentIdAndRdn , String > > cursor = new SingletonIndexCursor < > ( partitionTxn , indexEntry ) ; String parentId = parentIdAndRdn . getParentId ( ) ; Cursor < IndexEntry < String , String > > scopeCursor = new DescendantCursor ( partitionTxn , this , baseId , parentId , cursor ) ; try { while ( scopeCursor . next ( ) ) { IndexEntry < String , String > entry = scopeCursor . get ( ) ; if ( entry . getId ( ) != entryIdOld ) { addEntry ( fetch ( partitionTxn , entry . getId ( ) ) ) ; } } scopeCursor . close ( ) ; } catch ( Exception e ) { throw new LdapOperationException ( e . getMessage ( ) , e ) ; } File file = getFile ( oldEntryDn , DELETE ) ; boolean deleted = deleteFile ( file ) ; LOG . warn ( ""move operation: deleted file {} {}"" , file . getAbsoluteFile ( ) , deleted ) ; String dirName = file . getAbsolutePath ( ) ; dirName = dirName . substring ( 0 , dirName . indexOf ( CONF_FILE_EXTN ) ) ; deleted = deleteFile ( new File ( dirName ) ) ; } +","private void entryMoved ( PartitionTxn partitionTxn , Dn oldEntryDn , Entry modifiedEntry , String entryIdOld ) throws LdapException { addEntry ( modifiedEntry ) ; String baseId = getEntryId ( partitionTxn , modifiedEntry . getDn ( ) ) ; ParentIdAndRdn parentIdAndRdn = getRdnIndex ( ) . reverseLookup ( partitionTxn , baseId ) ; IndexEntry indexEntry = new IndexEntry ( ) ; indexEntry . setId ( baseId ) ; indexEntry . setKey ( parentIdAndRdn ) ; Cursor < IndexEntry < ParentIdAndRdn , String > > cursor = new SingletonIndexCursor < > ( partitionTxn , indexEntry ) ; String parentId = parentIdAndRdn . getParentId ( ) ; Cursor < IndexEntry < String , String > > scopeCursor = new DescendantCursor ( partitionTxn , this , baseId , parentId , cursor ) ; try { while ( scopeCursor . next ( ) ) { IndexEntry < String , String > entry = scopeCursor . get ( ) ; if ( entry . getId ( ) != entryIdOld ) { addEntry ( fetch ( partitionTxn , entry . getId ( ) ) ) ; } } scopeCursor . close ( ) ; } catch ( Exception e ) { throw new LdapOperationException ( e . getMessage ( ) , e ) ; } File file = getFile ( oldEntryDn , DELETE ) ; boolean deleted = deleteFile ( file ) ; LOG . warn ( ""move operation: deleted file {} {}"" , file . getAbsoluteFile ( ) , deleted ) ; String dirName = file . getAbsolutePath ( ) ; dirName = dirName . substring ( 0 , dirName . indexOf ( CONF_FILE_EXTN ) ) ; deleted = deleteFile ( new File ( dirName ) ) ; LOG . warn ( ""move operation: deleted dir {} {}"" , dirName , deleted ) ; } +" +279,"public < T > T getValueType ( Field field , Object value ) { if ( value == null ) { return ( T ) value ; } if ( MarketoClientUtils . isDateTypeField ( field ) ) { Date dt = null ; try { dt = new DateTime ( String . valueOf ( value ) , DateTimeZone . forID ( ""UTC"" ) ) . toDate ( ) ; return ( T ) Long . valueOf ( dt . getTime ( ) ) ; } catch ( Exception e ) { return null ; } } switch ( MarketoClientUtils . getFieldType ( field ) ) { case STRING : switch ( field . name ( ) ) { case FIELD_FIELDS : case FIELD_DEDUPE_FIELDS : case FIELD_SEARCHABLE_FIELDS : case FIELD_RELATIONSHIPS : return ( T ) new Gson ( ) . toJson ( value ) ; default : return ( T ) value ; } case INT : return ( T ) ( Integer ) Float . valueOf ( value . toString ( ) ) . intValue ( ) ; case BOOLEAN : return ( T ) Boolean . valueOf ( value . toString ( ) ) ; case FLOAT : return ( T ) Float . valueOf ( value . toString ( ) ) ; case DOUBLE : return ( T ) Double . valueOf ( value . toString ( ) ) ; case LONG : return ( T ) Long . valueOf ( value . toString ( ) ) ; default : LOG . warn ( ""Not managed -> type: {}, value: {} for field: {}."" , field . schema ( ) . getType ( ) , value , field ) ; return ( T ) value ; } } +","public < T > T getValueType ( Field field , Object value ) { if ( value == null ) { return ( T ) value ; } if ( MarketoClientUtils . isDateTypeField ( field ) ) { Date dt = null ; try { dt = new DateTime ( String . valueOf ( value ) , DateTimeZone . forID ( ""UTC"" ) ) . toDate ( ) ; return ( T ) Long . valueOf ( dt . getTime ( ) ) ; } catch ( Exception e ) { LOG . error ( ""Error while parsing date : {}."" , e . getMessage ( ) ) ; return null ; } } switch ( MarketoClientUtils . getFieldType ( field ) ) { case STRING : switch ( field . name ( ) ) { case FIELD_FIELDS : case FIELD_DEDUPE_FIELDS : case FIELD_SEARCHABLE_FIELDS : case FIELD_RELATIONSHIPS : return ( T ) new Gson ( ) . toJson ( value ) ; default : return ( T ) value ; } case INT : return ( T ) ( Integer ) Float . valueOf ( value . toString ( ) ) . intValue ( ) ; case BOOLEAN : return ( T ) Boolean . valueOf ( value . toString ( ) ) ; case FLOAT : return ( T ) Float . valueOf ( value . toString ( ) ) ; case DOUBLE : return ( T ) Double . valueOf ( value . toString ( ) ) ; case LONG : return ( T ) Long . valueOf ( value . toString ( ) ) ; default : LOG . warn ( ""Not managed -> type: {}, value: {} for field: {}."" , field . schema ( ) . getType ( ) , value , field ) ; return ( T ) value ; } } +" +280,"public < T > T getValueType ( Field field , Object value ) { if ( value == null ) { return ( T ) value ; } if ( MarketoClientUtils . isDateTypeField ( field ) ) { Date dt = null ; try { dt = new DateTime ( String . valueOf ( value ) , DateTimeZone . forID ( ""UTC"" ) ) . toDate ( ) ; return ( T ) Long . valueOf ( dt . getTime ( ) ) ; } catch ( Exception e ) { LOG . error ( ""Error while parsing date : {}."" , e . getMessage ( ) ) ; return null ; } } switch ( MarketoClientUtils . getFieldType ( field ) ) { case STRING : switch ( field . name ( ) ) { case FIELD_FIELDS : case FIELD_DEDUPE_FIELDS : case FIELD_SEARCHABLE_FIELDS : case FIELD_RELATIONSHIPS : return ( T ) new Gson ( ) . toJson ( value ) ; default : return ( T ) value ; } case INT : return ( T ) ( Integer ) Float . valueOf ( value . toString ( ) ) . intValue ( ) ; case BOOLEAN : return ( T ) Boolean . valueOf ( value . toString ( ) ) ; case FLOAT : return ( T ) Float . valueOf ( value . toString ( ) ) ; case DOUBLE : return ( T ) Double . valueOf ( value . toString ( ) ) ; case LONG : return ( T ) Long . valueOf ( value . toString ( ) ) ; default : return ( T ) value ; } } +","public < T > T getValueType ( Field field , Object value ) { if ( value == null ) { return ( T ) value ; } if ( MarketoClientUtils . isDateTypeField ( field ) ) { Date dt = null ; try { dt = new DateTime ( String . valueOf ( value ) , DateTimeZone . forID ( ""UTC"" ) ) . toDate ( ) ; return ( T ) Long . valueOf ( dt . getTime ( ) ) ; } catch ( Exception e ) { LOG . error ( ""Error while parsing date : {}."" , e . getMessage ( ) ) ; return null ; } } switch ( MarketoClientUtils . getFieldType ( field ) ) { case STRING : switch ( field . name ( ) ) { case FIELD_FIELDS : case FIELD_DEDUPE_FIELDS : case FIELD_SEARCHABLE_FIELDS : case FIELD_RELATIONSHIPS : return ( T ) new Gson ( ) . toJson ( value ) ; default : return ( T ) value ; } case INT : return ( T ) ( Integer ) Float . valueOf ( value . toString ( ) ) . intValue ( ) ; case BOOLEAN : return ( T ) Boolean . valueOf ( value . toString ( ) ) ; case FLOAT : return ( T ) Float . valueOf ( value . toString ( ) ) ; case DOUBLE : return ( T ) Double . valueOf ( value . toString ( ) ) ; case LONG : return ( T ) Long . valueOf ( value . toString ( ) ) ; default : LOG . warn ( ""Not managed -> type: {}, value: {} for field: {}."" , field . schema ( ) . getType ( ) , value , field ) ; return ( T ) value ; } } +" +281,"public AWSError parseAWSErrorFromContent ( HttpRequest request , HttpResponse response ) { if ( response . getPayload ( ) == null ) return null ; if ( ""text/plain"" . equals ( response . getPayload ( ) . getContentMetadata ( ) . getContentType ( ) ) ) return null ; try { AWSError error = factory . create ( errorHandlerProvider . get ( ) ) . setContext ( request ) . apply ( response ) ; if ( error . getRequestId ( ) == null ) error . setRequestId ( response . getFirstHeaderOrNull ( requestId ) ) ; error . setRequestToken ( response . getFirstHeaderOrNull ( requestToken ) ) ; if ( ""SignatureDoesNotMatch"" . equals ( error . getCode ( ) ) ) { error . setStringSigned ( signer . createStringToSign ( request ) ) ; error . setSignature ( signer . sign ( error . getStringSigned ( ) ) ) ; } return error ; } catch ( RuntimeException e ) { return null ; } } +","public AWSError parseAWSErrorFromContent ( HttpRequest request , HttpResponse response ) { if ( response . getPayload ( ) == null ) return null ; if ( ""text/plain"" . equals ( response . getPayload ( ) . getContentMetadata ( ) . getContentType ( ) ) ) return null ; try { AWSError error = factory . create ( errorHandlerProvider . get ( ) ) . setContext ( request ) . apply ( response ) ; if ( error . getRequestId ( ) == null ) error . setRequestId ( response . getFirstHeaderOrNull ( requestId ) ) ; error . setRequestToken ( response . getFirstHeaderOrNull ( requestToken ) ) ; if ( ""SignatureDoesNotMatch"" . equals ( error . getCode ( ) ) ) { error . setStringSigned ( signer . createStringToSign ( request ) ) ; error . setSignature ( signer . sign ( error . getStringSigned ( ) ) ) ; } return error ; } catch ( RuntimeException e ) { logger . warn ( e , ""error parsing error"" ) ; return null ; } } +" +282,"public EntityType getEntityInfo ( final Integer detailKeyId , final Integer detailtypeId ) throws ValidationException { if ( LOGGER . isDebugEnabled ( ) ) EntityType entity = null ; try { final Accountdetailtype accountdetailtype = getAccountDetailById ( detailtypeId ) ; final Class < ? > service = Class . forName ( accountdetailtype . getFullQualifiedName ( ) ) ; final String detailTypeName = service . getSimpleName ( ) ; String dataType = """" ; final java . lang . reflect . Method method = service . getMethod ( ""getId"" ) ; dataType = method . getReturnType ( ) . getSimpleName ( ) ; if ( dataType . equals ( ""Long"" ) ) entity = ( EntityType ) persistenceService . find ( ""from "" + detailTypeName + "" where id=? order by name"" , detailKeyId . longValue ( ) ) ; else entity = ( EntityType ) persistenceService . find ( ""from "" + detailTypeName + "" where id=? order by name"" , detailKeyId ) ; } catch ( final Exception e ) { final List < ValidationError > errors = new ArrayList < ValidationError > ( ) ; errors . add ( new ValidationError ( ""exp"" , e . getMessage ( ) ) ) ; throw new ValidationException ( errors ) ; } if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""VoucherHibernateDAO | getDetailCodeName | End"" ) ; return entity ; } +","public EntityType getEntityInfo ( final Integer detailKeyId , final Integer detailtypeId ) throws ValidationException { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""VoucherHibernateDAO | getDetailCodeName | start"" ) ; EntityType entity = null ; try { final Accountdetailtype accountdetailtype = getAccountDetailById ( detailtypeId ) ; final Class < ? > service = Class . forName ( accountdetailtype . getFullQualifiedName ( ) ) ; final String detailTypeName = service . getSimpleName ( ) ; String dataType = """" ; final java . lang . reflect . Method method = service . getMethod ( ""getId"" ) ; dataType = method . getReturnType ( ) . getSimpleName ( ) ; if ( dataType . equals ( ""Long"" ) ) entity = ( EntityType ) persistenceService . find ( ""from "" + detailTypeName + "" where id=? order by name"" , detailKeyId . longValue ( ) ) ; else entity = ( EntityType ) persistenceService . find ( ""from "" + detailTypeName + "" where id=? order by name"" , detailKeyId ) ; } catch ( final Exception e ) { final List < ValidationError > errors = new ArrayList < ValidationError > ( ) ; errors . add ( new ValidationError ( ""exp"" , e . getMessage ( ) ) ) ; throw new ValidationException ( errors ) ; } if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""VoucherHibernateDAO | getDetailCodeName | End"" ) ; return entity ; } +" +283,"public EntityType getEntityInfo ( final Integer detailKeyId , final Integer detailtypeId ) throws ValidationException { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""VoucherHibernateDAO | getDetailCodeName | start"" ) ; EntityType entity = null ; try { final Accountdetailtype accountdetailtype = getAccountDetailById ( detailtypeId ) ; final Class < ? > service = Class . forName ( accountdetailtype . getFullQualifiedName ( ) ) ; final String detailTypeName = service . getSimpleName ( ) ; String dataType = """" ; final java . lang . reflect . Method method = service . getMethod ( ""getId"" ) ; dataType = method . getReturnType ( ) . getSimpleName ( ) ; if ( dataType . equals ( ""Long"" ) ) entity = ( EntityType ) persistenceService . find ( ""from "" + detailTypeName + "" where id=? order by name"" , detailKeyId . longValue ( ) ) ; else entity = ( EntityType ) persistenceService . find ( ""from "" + detailTypeName + "" where id=? order by name"" , detailKeyId ) ; } catch ( final Exception e ) { final List < ValidationError > errors = new ArrayList < ValidationError > ( ) ; errors . add ( new ValidationError ( ""exp"" , e . getMessage ( ) ) ) ; throw new ValidationException ( errors ) ; } if ( LOGGER . isDebugEnabled ( ) ) return entity ; } +","public EntityType getEntityInfo ( final Integer detailKeyId , final Integer detailtypeId ) throws ValidationException { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""VoucherHibernateDAO | getDetailCodeName | start"" ) ; EntityType entity = null ; try { final Accountdetailtype accountdetailtype = getAccountDetailById ( detailtypeId ) ; final Class < ? > service = Class . forName ( accountdetailtype . getFullQualifiedName ( ) ) ; final String detailTypeName = service . getSimpleName ( ) ; String dataType = """" ; final java . lang . reflect . Method method = service . getMethod ( ""getId"" ) ; dataType = method . getReturnType ( ) . getSimpleName ( ) ; if ( dataType . equals ( ""Long"" ) ) entity = ( EntityType ) persistenceService . find ( ""from "" + detailTypeName + "" where id=? order by name"" , detailKeyId . longValue ( ) ) ; else entity = ( EntityType ) persistenceService . find ( ""from "" + detailTypeName + "" where id=? order by name"" , detailKeyId ) ; } catch ( final Exception e ) { final List < ValidationError > errors = new ArrayList < ValidationError > ( ) ; errors . add ( new ValidationError ( ""exp"" , e . getMessage ( ) ) ) ; throw new ValidationException ( errors ) ; } if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( ""VoucherHibernateDAO | getDetailCodeName | End"" ) ; return entity ; } +" +284,"protected void onTransportFailure ( ) { } +","protected void onTransportFailure ( ) { getLogger ( ) . warning ( ""Push connection using primary method ("" + getConfig ( ) . getTransport ( ) + "") failed. Trying with "" + getConfig ( ) . getFallbackTransport ( ) ) ; } +" +285,"@ Test public void testMethodName ( ) { Logger logger = LoggerFactory . getLogger ( ""my-log4j2-logger"" ) ; String result = recording . execute ( new Runnable ( ) { @ Override public void run ( ) { } } ) ; assertTrue ( result . contains ( "".run:"" ) ) ; } +","@ Test public void testMethodName ( ) { Logger logger = LoggerFactory . getLogger ( ""my-log4j2-logger"" ) ; String result = recording . execute ( new Runnable ( ) { @ Override public void run ( ) { logger . warn ( ""hello"" ) ; } } ) ; assertTrue ( result . contains ( "".run:"" ) ) ; } +" +286,"public void schemaSourceRegistered ( final Iterable < PotentialSchemaSource < ? > > sources ) { final Map < ModuleKey , Module > newModules = new HashMap < > ( ) ; for ( PotentialSchemaSource < ? > potentialYangSource : Iterables . filter ( sources , YANG_SCHEMA_SOURCE ) ) { final YangIdentifier moduleName = new YangIdentifier ( potentialYangSource . getSourceIdentifier ( ) . getName ( ) ) ; final Module newModule = new ModuleBuilder ( ) . setName ( moduleName ) . setRevision ( RevisionUtils . fromYangCommon ( potentialYangSource . getSourceIdentifier ( ) . getRevision ( ) ) ) . setSchema ( getUrlForModule ( potentialYangSource . getSourceIdentifier ( ) ) ) . build ( ) ; newModules . put ( newModule . key ( ) , newModule ) ; } if ( newModules . isEmpty ( ) ) { return ; } WriteTransaction tx = dataBroker . newWriteOnlyTransaction ( ) ; tx . merge ( LogicalDatastoreType . OPERATIONAL , InstanceIdentifier . create ( ModulesState . class ) , new ModulesStateBuilder ( ) . setModule ( newModules ) . build ( ) ) ; tx . commit ( ) . addCallback ( new FutureCallback < CommitInfo > ( ) { @ Override public void onSuccess ( final CommitInfo result ) { } @ Override public void onFailure ( final Throwable throwable ) { LOG . warn ( ""Unable to update modules state"" , throwable ) ; } } , MoreExecutors . directExecutor ( ) ) ; } +","public void schemaSourceRegistered ( final Iterable < PotentialSchemaSource < ? > > sources ) { final Map < ModuleKey , Module > newModules = new HashMap < > ( ) ; for ( PotentialSchemaSource < ? > potentialYangSource : Iterables . filter ( sources , YANG_SCHEMA_SOURCE ) ) { final YangIdentifier moduleName = new YangIdentifier ( potentialYangSource . getSourceIdentifier ( ) . getName ( ) ) ; final Module newModule = new ModuleBuilder ( ) . setName ( moduleName ) . setRevision ( RevisionUtils . fromYangCommon ( potentialYangSource . getSourceIdentifier ( ) . getRevision ( ) ) ) . setSchema ( getUrlForModule ( potentialYangSource . getSourceIdentifier ( ) ) ) . build ( ) ; newModules . put ( newModule . key ( ) , newModule ) ; } if ( newModules . isEmpty ( ) ) { return ; } WriteTransaction tx = dataBroker . newWriteOnlyTransaction ( ) ; tx . merge ( LogicalDatastoreType . OPERATIONAL , InstanceIdentifier . create ( ModulesState . class ) , new ModulesStateBuilder ( ) . setModule ( newModules ) . build ( ) ) ; tx . commit ( ) . addCallback ( new FutureCallback < CommitInfo > ( ) { @ Override public void onSuccess ( final CommitInfo result ) { LOG . debug ( ""Modules state successfully populated with new modules"" ) ; } @ Override public void onFailure ( final Throwable throwable ) { LOG . warn ( ""Unable to update modules state"" , throwable ) ; } } , MoreExecutors . directExecutor ( ) ) ; } +" +287,"public void schemaSourceRegistered ( final Iterable < PotentialSchemaSource < ? > > sources ) { final Map < ModuleKey , Module > newModules = new HashMap < > ( ) ; for ( PotentialSchemaSource < ? > potentialYangSource : Iterables . filter ( sources , YANG_SCHEMA_SOURCE ) ) { final YangIdentifier moduleName = new YangIdentifier ( potentialYangSource . getSourceIdentifier ( ) . getName ( ) ) ; final Module newModule = new ModuleBuilder ( ) . setName ( moduleName ) . setRevision ( RevisionUtils . fromYangCommon ( potentialYangSource . getSourceIdentifier ( ) . getRevision ( ) ) ) . setSchema ( getUrlForModule ( potentialYangSource . getSourceIdentifier ( ) ) ) . build ( ) ; newModules . put ( newModule . key ( ) , newModule ) ; } if ( newModules . isEmpty ( ) ) { return ; } WriteTransaction tx = dataBroker . newWriteOnlyTransaction ( ) ; tx . merge ( LogicalDatastoreType . OPERATIONAL , InstanceIdentifier . create ( ModulesState . class ) , new ModulesStateBuilder ( ) . setModule ( newModules ) . build ( ) ) ; tx . commit ( ) . addCallback ( new FutureCallback < CommitInfo > ( ) { @ Override public void onSuccess ( final CommitInfo result ) { LOG . debug ( ""Modules state successfully populated with new modules"" ) ; } @ Override public void onFailure ( final Throwable throwable ) { } } , MoreExecutors . directExecutor ( ) ) ; } +","public void schemaSourceRegistered ( final Iterable < PotentialSchemaSource < ? > > sources ) { final Map < ModuleKey , Module > newModules = new HashMap < > ( ) ; for ( PotentialSchemaSource < ? > potentialYangSource : Iterables . filter ( sources , YANG_SCHEMA_SOURCE ) ) { final YangIdentifier moduleName = new YangIdentifier ( potentialYangSource . getSourceIdentifier ( ) . getName ( ) ) ; final Module newModule = new ModuleBuilder ( ) . setName ( moduleName ) . setRevision ( RevisionUtils . fromYangCommon ( potentialYangSource . getSourceIdentifier ( ) . getRevision ( ) ) ) . setSchema ( getUrlForModule ( potentialYangSource . getSourceIdentifier ( ) ) ) . build ( ) ; newModules . put ( newModule . key ( ) , newModule ) ; } if ( newModules . isEmpty ( ) ) { return ; } WriteTransaction tx = dataBroker . newWriteOnlyTransaction ( ) ; tx . merge ( LogicalDatastoreType . OPERATIONAL , InstanceIdentifier . create ( ModulesState . class ) , new ModulesStateBuilder ( ) . setModule ( newModules ) . build ( ) ) ; tx . commit ( ) . addCallback ( new FutureCallback < CommitInfo > ( ) { @ Override public void onSuccess ( final CommitInfo result ) { LOG . debug ( ""Modules state successfully populated with new modules"" ) ; } @ Override public void onFailure ( final Throwable throwable ) { LOG . warn ( ""Unable to update modules state"" , throwable ) ; } } , MoreExecutors . directExecutor ( ) ) ; } +" +288,"public void handleKillbillEvent ( final ExtBusEvent killbillEvent ) { if ( killbillEvent . getEventType ( ) == ExtBusEventType . BLOCKING_STATE ) { if ( countPerToken . get ( killbillEvent . getUserToken ( ) ) == null ) { countPerToken . put ( killbillEvent . getUserToken ( ) , new AtomicInteger ( ) ) ; } final Integer seen = countPerToken . get ( killbillEvent . getUserToken ( ) ) . incrementAndGet ( ) ; if ( ! seen . toString ( ) . equalsIgnoreCase ( killbillEvent . getMetaData ( ) ) ) { testDao . insertExternalKey ( ""error-"" + seen ) ; throw new NotificationPluginApiRetryException ( ) ; } else { testDao . insertExternalKey ( killbillEvent . getAccountId ( ) . toString ( ) ) ; return ; } } if ( killbillEvent . getEventType ( ) != ExtBusEventType . ACCOUNT_CREATION ) { return ; } testDao . insertExternalKey ( killbillEvent . getAccountId ( ) . toString ( ) ) ; } +","public void handleKillbillEvent ( final ExtBusEvent killbillEvent ) { logger . info ( ""Received external event {}"" , killbillEvent . toString ( ) ) ; if ( killbillEvent . getEventType ( ) == ExtBusEventType . BLOCKING_STATE ) { if ( countPerToken . get ( killbillEvent . getUserToken ( ) ) == null ) { countPerToken . put ( killbillEvent . getUserToken ( ) , new AtomicInteger ( ) ) ; } final Integer seen = countPerToken . get ( killbillEvent . getUserToken ( ) ) . incrementAndGet ( ) ; if ( ! seen . toString ( ) . equalsIgnoreCase ( killbillEvent . getMetaData ( ) ) ) { testDao . insertExternalKey ( ""error-"" + seen ) ; throw new NotificationPluginApiRetryException ( ) ; } else { testDao . insertExternalKey ( killbillEvent . getAccountId ( ) . toString ( ) ) ; return ; } } if ( killbillEvent . getEventType ( ) != ExtBusEventType . ACCOUNT_CREATION ) { return ; } testDao . insertExternalKey ( killbillEvent . getAccountId ( ) . toString ( ) ) ; } +" +289,"@ Test public void syncAddTempDelete ( ) throws Exception { File f = copyToFile ( randomStream ( 0 , 4 * 1024 ) , folder . newFile ( ) ) ; String id = getIdForInputStream ( f ) ; FileInputStream fin = new FileInputStream ( f ) ; closer . register ( fin ) ; DataRecord rec = dataStore . addRecord ( fin , new BlobOptions ( ) . setUpload ( SYNCHRONOUS ) ) ; assertEquals ( id , rec . getIdentifier ( ) . toString ( ) ) ; assertFile ( rec . getStream ( ) , f , folder ) ; Collection < File > files = FileUtils . listFiles ( new File ( dsPath , ""tmp"" ) , FileFilterUtils . prefixFileFilter ( ""upload"" ) , null ) ; assertEquals ( 0 , files . size ( ) ) ; LOG . info ( ""Finished syncAddTempDelete"" ) ; } +","@ Test public void syncAddTempDelete ( ) throws Exception { LOG . info ( ""Starting syncAddTempDelete"" ) ; File f = copyToFile ( randomStream ( 0 , 4 * 1024 ) , folder . newFile ( ) ) ; String id = getIdForInputStream ( f ) ; FileInputStream fin = new FileInputStream ( f ) ; closer . register ( fin ) ; DataRecord rec = dataStore . addRecord ( fin , new BlobOptions ( ) . setUpload ( SYNCHRONOUS ) ) ; assertEquals ( id , rec . getIdentifier ( ) . toString ( ) ) ; assertFile ( rec . getStream ( ) , f , folder ) ; Collection < File > files = FileUtils . listFiles ( new File ( dsPath , ""tmp"" ) , FileFilterUtils . prefixFileFilter ( ""upload"" ) , null ) ; assertEquals ( 0 , files . size ( ) ) ; LOG . info ( ""Finished syncAddTempDelete"" ) ; } +" +290,"@ Test public void syncAddTempDelete ( ) throws Exception { LOG . info ( ""Starting syncAddTempDelete"" ) ; File f = copyToFile ( randomStream ( 0 , 4 * 1024 ) , folder . newFile ( ) ) ; String id = getIdForInputStream ( f ) ; FileInputStream fin = new FileInputStream ( f ) ; closer . register ( fin ) ; DataRecord rec = dataStore . addRecord ( fin , new BlobOptions ( ) . setUpload ( SYNCHRONOUS ) ) ; assertEquals ( id , rec . getIdentifier ( ) . toString ( ) ) ; assertFile ( rec . getStream ( ) , f , folder ) ; Collection < File > files = FileUtils . listFiles ( new File ( dsPath , ""tmp"" ) , FileFilterUtils . prefixFileFilter ( ""upload"" ) , null ) ; assertEquals ( 0 , files . size ( ) ) ; } +","@ Test public void syncAddTempDelete ( ) throws Exception { LOG . info ( ""Starting syncAddTempDelete"" ) ; File f = copyToFile ( randomStream ( 0 , 4 * 1024 ) , folder . newFile ( ) ) ; String id = getIdForInputStream ( f ) ; FileInputStream fin = new FileInputStream ( f ) ; closer . register ( fin ) ; DataRecord rec = dataStore . addRecord ( fin , new BlobOptions ( ) . setUpload ( SYNCHRONOUS ) ) ; assertEquals ( id , rec . getIdentifier ( ) . toString ( ) ) ; assertFile ( rec . getStream ( ) , f , folder ) ; Collection < File > files = FileUtils . listFiles ( new File ( dsPath , ""tmp"" ) , FileFilterUtils . prefixFileFilter ( ""upload"" ) , null ) ; assertEquals ( 0 , files . size ( ) ) ; LOG . info ( ""Finished syncAddTempDelete"" ) ; } +" +291,"@ Test public void testLoadAllNotes ( ) { Note note ; try { assertEquals ( 0 , notebook . getAllNotes ( ) . size ( ) ) ; note = notebook . createNote ( ""note1"" , anonymous ) ; Paragraph p1 = note . addNewParagraph ( AuthenticationInfo . ANONYMOUS ) ; Map < String , Object > config = p1 . getConfig ( ) ; config . put ( ""enabled"" , true ) ; p1 . setConfig ( config ) ; p1 . setText ( ""hello world"" ) ; notebook . saveNote ( note , anonymous ) ; } catch ( IOException fe ) { } assertEquals ( 1 , notebook . getAllNotes ( ) . size ( ) ) ; } +","@ Test public void testLoadAllNotes ( ) { Note note ; try { assertEquals ( 0 , notebook . getAllNotes ( ) . size ( ) ) ; note = notebook . createNote ( ""note1"" , anonymous ) ; Paragraph p1 = note . addNewParagraph ( AuthenticationInfo . ANONYMOUS ) ; Map < String , Object > config = p1 . getConfig ( ) ; config . put ( ""enabled"" , true ) ; p1 . setConfig ( config ) ; p1 . setText ( ""hello world"" ) ; notebook . saveNote ( note , anonymous ) ; } catch ( IOException fe ) { logger . warn ( ""Failed to create note and paragraph. Possible problem with persisting note, safe to ignore"" , fe ) ; } assertEquals ( 1 , notebook . getAllNotes ( ) . size ( ) ) ; } +" +292,"@ Test public void findProjectsByMultipleAttributeConditionTest ( ) { List < KeyValueQuery > predicates = new ArrayList < > ( ) ; Value numValue = Value . newBuilder ( ) . setNumberValue ( 0.6543210 ) . build ( ) ; KeyValueQuery keyValueQuery = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_1"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . LTE ) . build ( ) ; predicates . add ( keyValueQuery ) ; numValue = Value . newBuilder ( ) . setNumberValue ( 0.31 ) . build ( ) ; KeyValueQuery keyValueQuery2 = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_2"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . EQ ) . build ( ) ; predicates . add ( keyValueQuery2 ) ; FindProjects findProjects = FindProjects . newBuilder ( ) . addAllProjectIds ( projectMap . keySet ( ) ) . addAllPredicates ( predicates ) . setIdsOnly ( true ) . build ( ) ; FindProjects . Response response = projectServiceStub . findProjects ( findProjects ) ; LOGGER . info ( ""FindProjects Response : "" + response . getProjectsCount ( ) ) ; assertEquals ( ""Project count not match with expected project count"" , 1 , response . getProjectsCount ( ) ) ; assertEquals ( ""Project not match with expected project"" , project2 . getId ( ) , response . getProjectsList ( ) . get ( 0 ) . getId ( ) ) ; assertNotEquals ( ""Project not match with expected project"" , project2 , response . getProjectsList ( ) . get ( 0 ) ) ; assertEquals ( ""Total records count not matched with expected records count"" , 1 , response . getTotalRecords ( ) ) ; LOGGER . info ( ""FindProjects by multiple attribute condition test stop................................"" ) ; } +","@ Test public void findProjectsByMultipleAttributeConditionTest ( ) { LOGGER . info ( ""FindProjects by multiple attribute condition test start................................"" ) ; List < KeyValueQuery > predicates = new ArrayList < > ( ) ; Value numValue = Value . newBuilder ( ) . setNumberValue ( 0.6543210 ) . build ( ) ; KeyValueQuery keyValueQuery = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_1"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . LTE ) . build ( ) ; predicates . add ( keyValueQuery ) ; numValue = Value . newBuilder ( ) . setNumberValue ( 0.31 ) . build ( ) ; KeyValueQuery keyValueQuery2 = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_2"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . EQ ) . build ( ) ; predicates . add ( keyValueQuery2 ) ; FindProjects findProjects = FindProjects . newBuilder ( ) . addAllProjectIds ( projectMap . keySet ( ) ) . addAllPredicates ( predicates ) . setIdsOnly ( true ) . build ( ) ; FindProjects . Response response = projectServiceStub . findProjects ( findProjects ) ; LOGGER . info ( ""FindProjects Response : "" + response . getProjectsCount ( ) ) ; assertEquals ( ""Project count not match with expected project count"" , 1 , response . getProjectsCount ( ) ) ; assertEquals ( ""Project not match with expected project"" , project2 . getId ( ) , response . getProjectsList ( ) . get ( 0 ) . getId ( ) ) ; assertNotEquals ( ""Project not match with expected project"" , project2 , response . getProjectsList ( ) . get ( 0 ) ) ; assertEquals ( ""Total records count not matched with expected records count"" , 1 , response . getTotalRecords ( ) ) ; LOGGER . info ( ""FindProjects by multiple attribute condition test stop................................"" ) ; } +" +293,"@ Test public void findProjectsByMultipleAttributeConditionTest ( ) { LOGGER . info ( ""FindProjects by multiple attribute condition test start................................"" ) ; List < KeyValueQuery > predicates = new ArrayList < > ( ) ; Value numValue = Value . newBuilder ( ) . setNumberValue ( 0.6543210 ) . build ( ) ; KeyValueQuery keyValueQuery = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_1"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . LTE ) . build ( ) ; predicates . add ( keyValueQuery ) ; numValue = Value . newBuilder ( ) . setNumberValue ( 0.31 ) . build ( ) ; KeyValueQuery keyValueQuery2 = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_2"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . EQ ) . build ( ) ; predicates . add ( keyValueQuery2 ) ; FindProjects findProjects = FindProjects . newBuilder ( ) . addAllProjectIds ( projectMap . keySet ( ) ) . addAllPredicates ( predicates ) . setIdsOnly ( true ) . build ( ) ; FindProjects . Response response = projectServiceStub . findProjects ( findProjects ) ; assertEquals ( ""Project count not match with expected project count"" , 1 , response . getProjectsCount ( ) ) ; assertEquals ( ""Project not match with expected project"" , project2 . getId ( ) , response . getProjectsList ( ) . get ( 0 ) . getId ( ) ) ; assertNotEquals ( ""Project not match with expected project"" , project2 , response . getProjectsList ( ) . get ( 0 ) ) ; assertEquals ( ""Total records count not matched with expected records count"" , 1 , response . getTotalRecords ( ) ) ; LOGGER . info ( ""FindProjects by multiple attribute condition test stop................................"" ) ; } +","@ Test public void findProjectsByMultipleAttributeConditionTest ( ) { LOGGER . info ( ""FindProjects by multiple attribute condition test start................................"" ) ; List < KeyValueQuery > predicates = new ArrayList < > ( ) ; Value numValue = Value . newBuilder ( ) . setNumberValue ( 0.6543210 ) . build ( ) ; KeyValueQuery keyValueQuery = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_1"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . LTE ) . build ( ) ; predicates . add ( keyValueQuery ) ; numValue = Value . newBuilder ( ) . setNumberValue ( 0.31 ) . build ( ) ; KeyValueQuery keyValueQuery2 = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_2"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . EQ ) . build ( ) ; predicates . add ( keyValueQuery2 ) ; FindProjects findProjects = FindProjects . newBuilder ( ) . addAllProjectIds ( projectMap . keySet ( ) ) . addAllPredicates ( predicates ) . setIdsOnly ( true ) . build ( ) ; FindProjects . Response response = projectServiceStub . findProjects ( findProjects ) ; LOGGER . info ( ""FindProjects Response : "" + response . getProjectsCount ( ) ) ; assertEquals ( ""Project count not match with expected project count"" , 1 , response . getProjectsCount ( ) ) ; assertEquals ( ""Project not match with expected project"" , project2 . getId ( ) , response . getProjectsList ( ) . get ( 0 ) . getId ( ) ) ; assertNotEquals ( ""Project not match with expected project"" , project2 , response . getProjectsList ( ) . get ( 0 ) ) ; assertEquals ( ""Total records count not matched with expected records count"" , 1 , response . getTotalRecords ( ) ) ; LOGGER . info ( ""FindProjects by multiple attribute condition test stop................................"" ) ; } +" +294,"@ Test public void findProjectsByMultipleAttributeConditionTest ( ) { LOGGER . info ( ""FindProjects by multiple attribute condition test start................................"" ) ; List < KeyValueQuery > predicates = new ArrayList < > ( ) ; Value numValue = Value . newBuilder ( ) . setNumberValue ( 0.6543210 ) . build ( ) ; KeyValueQuery keyValueQuery = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_1"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . LTE ) . build ( ) ; predicates . add ( keyValueQuery ) ; numValue = Value . newBuilder ( ) . setNumberValue ( 0.31 ) . build ( ) ; KeyValueQuery keyValueQuery2 = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_2"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . EQ ) . build ( ) ; predicates . add ( keyValueQuery2 ) ; FindProjects findProjects = FindProjects . newBuilder ( ) . addAllProjectIds ( projectMap . keySet ( ) ) . addAllPredicates ( predicates ) . setIdsOnly ( true ) . build ( ) ; FindProjects . Response response = projectServiceStub . findProjects ( findProjects ) ; LOGGER . info ( ""FindProjects Response : "" + response . getProjectsCount ( ) ) ; assertEquals ( ""Project count not match with expected project count"" , 1 , response . getProjectsCount ( ) ) ; assertEquals ( ""Project not match with expected project"" , project2 . getId ( ) , response . getProjectsList ( ) . get ( 0 ) . getId ( ) ) ; assertNotEquals ( ""Project not match with expected project"" , project2 , response . getProjectsList ( ) . get ( 0 ) ) ; assertEquals ( ""Total records count not matched with expected records count"" , 1 , response . getTotalRecords ( ) ) ; } +","@ Test public void findProjectsByMultipleAttributeConditionTest ( ) { LOGGER . info ( ""FindProjects by multiple attribute condition test start................................"" ) ; List < KeyValueQuery > predicates = new ArrayList < > ( ) ; Value numValue = Value . newBuilder ( ) . setNumberValue ( 0.6543210 ) . build ( ) ; KeyValueQuery keyValueQuery = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_1"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . LTE ) . build ( ) ; predicates . add ( keyValueQuery ) ; numValue = Value . newBuilder ( ) . setNumberValue ( 0.31 ) . build ( ) ; KeyValueQuery keyValueQuery2 = KeyValueQuery . newBuilder ( ) . setKey ( ""attributes.attribute_2"" ) . setValue ( numValue ) . setOperator ( OperatorEnum . Operator . EQ ) . build ( ) ; predicates . add ( keyValueQuery2 ) ; FindProjects findProjects = FindProjects . newBuilder ( ) . addAllProjectIds ( projectMap . keySet ( ) ) . addAllPredicates ( predicates ) . setIdsOnly ( true ) . build ( ) ; FindProjects . Response response = projectServiceStub . findProjects ( findProjects ) ; LOGGER . info ( ""FindProjects Response : "" + response . getProjectsCount ( ) ) ; assertEquals ( ""Project count not match with expected project count"" , 1 , response . getProjectsCount ( ) ) ; assertEquals ( ""Project not match with expected project"" , project2 . getId ( ) , response . getProjectsList ( ) . get ( 0 ) . getId ( ) ) ; assertNotEquals ( ""Project not match with expected project"" , project2 , response . getProjectsList ( ) . get ( 0 ) ) ; assertEquals ( ""Total records count not matched with expected records count"" , 1 , response . getTotalRecords ( ) ) ; LOGGER . info ( ""FindProjects by multiple attribute condition test stop................................"" ) ; } +" +295,"@ POST @ Path ( ""/setExpire"" ) @ Transactional ( propagation = Propagation . REQUIRED , isolation = Isolation . DEFAULT , rollbackFor = Throwable . class ) public Response setExpire ( @ Context HttpServletRequest req , JsonNode json ) { Message message = null ; try { String userName = SecurityFilter . getLoginUsername ( req ) ; if ( StringUtils . isEmpty ( userName ) ) { throw new UDFException ( ""UserName is Empty!"" ) ; } Long udfId = json . get ( ""udfId"" ) . getLongValue ( ) ; if ( StringUtils . isEmpty ( udfId ) ) { throw new UDFException ( ""udfId is Empty!"" ) ; } String udfName = json . get ( ""udfName"" ) . getTextValue ( ) ; if ( StringUtils . isEmpty ( udfName ) ) { throw new UDFException ( ""udfName is Empty!"" ) ; } Long shareUDFId = udfService . getAllShareUDFInfoIdByUDFId ( userName , udfName ) ; udfService . setSharedUDFInfoExpire ( shareUDFId ) ; udfService . setUDFSharedInfo ( false , udfId ) ; message = Message . ok ( ) ; } catch ( Throwable e ) { message = Message . error ( e . getMessage ( ) ) ; } return Message . messageToResponse ( message ) ; } +","@ POST @ Path ( ""/setExpire"" ) @ Transactional ( propagation = Propagation . REQUIRED , isolation = Isolation . DEFAULT , rollbackFor = Throwable . class ) public Response setExpire ( @ Context HttpServletRequest req , JsonNode json ) { Message message = null ; try { String userName = SecurityFilter . getLoginUsername ( req ) ; if ( StringUtils . isEmpty ( userName ) ) { throw new UDFException ( ""UserName is Empty!"" ) ; } Long udfId = json . get ( ""udfId"" ) . getLongValue ( ) ; if ( StringUtils . isEmpty ( udfId ) ) { throw new UDFException ( ""udfId is Empty!"" ) ; } String udfName = json . get ( ""udfName"" ) . getTextValue ( ) ; if ( StringUtils . isEmpty ( udfName ) ) { throw new UDFException ( ""udfName is Empty!"" ) ; } Long shareUDFId = udfService . getAllShareUDFInfoIdByUDFId ( userName , udfName ) ; udfService . setSharedUDFInfoExpire ( shareUDFId ) ; udfService . setUDFSharedInfo ( false , udfId ) ; message = Message . ok ( ) ; } catch ( Throwable e ) { logger . error ( ""Failed to setExpire: "" , e ) ; message = Message . error ( e . getMessage ( ) ) ; } return Message . messageToResponse ( message ) ; } +" +296,"public Page < WikiPage > getWikiPageWikiPagesPage ( Long parentWikiPageId ) throws Exception { HttpInvoker . HttpResponse httpResponse = getWikiPageWikiPagesPageHttpResponse ( parentWikiPageId ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return Page . of ( content , WikiPageSerDes :: toDTO ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +","public Page < WikiPage > getWikiPageWikiPagesPage ( Long parentWikiPageId ) throws Exception { HttpInvoker . HttpResponse httpResponse = getWikiPageWikiPagesPageHttpResponse ( parentWikiPageId ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return Page . of ( content , WikiPageSerDes :: toDTO ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +" +297,"public Page < WikiPage > getWikiPageWikiPagesPage ( Long parentWikiPageId ) throws Exception { HttpInvoker . HttpResponse httpResponse = getWikiPageWikiPagesPageHttpResponse ( parentWikiPageId ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return Page . of ( content , WikiPageSerDes :: toDTO ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +","public Page < WikiPage > getWikiPageWikiPagesPage ( Long parentWikiPageId ) throws Exception { HttpInvoker . HttpResponse httpResponse = getWikiPageWikiPagesPageHttpResponse ( parentWikiPageId ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return Page . of ( content , WikiPageSerDes :: toDTO ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +" +298,"public Page < WikiPage > getWikiPageWikiPagesPage ( Long parentWikiPageId ) throws Exception { HttpInvoker . HttpResponse httpResponse = getWikiPageWikiPagesPageHttpResponse ( parentWikiPageId ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return Page . of ( content , WikiPageSerDes :: toDTO ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +","public Page < WikiPage > getWikiPageWikiPagesPage ( Long parentWikiPageId ) throws Exception { HttpInvoker . HttpResponse httpResponse = getWikiPageWikiPagesPageHttpResponse ( parentWikiPageId ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return Page . of ( content , WikiPageSerDes :: toDTO ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +" +299,"public Page < WikiPage > getWikiPageWikiPagesPage ( Long parentWikiPageId ) throws Exception { HttpInvoker . HttpResponse httpResponse = getWikiPageWikiPagesPageHttpResponse ( parentWikiPageId ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return Page . of ( content , WikiPageSerDes :: toDTO ) ; } catch ( Exception e ) { throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +","public Page < WikiPage > getWikiPageWikiPagesPage ( Long parentWikiPageId ) throws Exception { HttpInvoker . HttpResponse httpResponse = getWikiPageWikiPagesPageHttpResponse ( parentWikiPageId ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return Page . of ( content , WikiPageSerDes :: toDTO ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +" +300,"public String getSummary ( long classPK , PortletRequest portletRequest , PortletResponse portletResponse ) { try { AssetRenderer < ? > assetRenderer = getAssetRenderer ( classPK ) ; if ( assetRenderer != null ) { return assetRenderer . getSummary ( portletRequest , portletResponse ) ; } } catch ( Exception exception ) { if ( _log . isWarnEnabled ( ) ) { } } return null ; } +","public String getSummary ( long classPK , PortletRequest portletRequest , PortletResponse portletResponse ) { try { AssetRenderer < ? > assetRenderer = getAssetRenderer ( classPK ) ; if ( assetRenderer != null ) { return assetRenderer . getSummary ( portletRequest , portletResponse ) ; } } catch ( Exception exception ) { if ( _log . isWarnEnabled ( ) ) { _log . warn ( exception , exception ) ; } } return null ; } +" +301,"public void updateExperimentRunName ( String experimentRunId , String experimentRunName ) { try ( Session session = modelDBHibernateUtil . getSessionFactory ( ) . openSession ( ) ) { ExperimentRunEntity experimentRunEntity = session . load ( ExperimentRunEntity . class , experimentRunId , LockMode . PESSIMISTIC_WRITE ) ; experimentRunEntity . setName ( experimentRunName ) ; long currentTimestamp = Calendar . getInstance ( ) . getTimeInMillis ( ) ; experimentRunEntity . setDate_updated ( currentTimestamp ) ; Transaction transaction = session . beginTransaction ( ) ; session . update ( experimentRunEntity ) ; transaction . commit ( ) ; } catch ( Exception ex ) { if ( ModelDBUtils . needToRetry ( ex ) ) { updateExperimentRunName ( experimentRunId , experimentRunName ) ; } else { throw ex ; } } } +","public void updateExperimentRunName ( String experimentRunId , String experimentRunName ) { try ( Session session = modelDBHibernateUtil . getSessionFactory ( ) . openSession ( ) ) { ExperimentRunEntity experimentRunEntity = session . load ( ExperimentRunEntity . class , experimentRunId , LockMode . PESSIMISTIC_WRITE ) ; experimentRunEntity . setName ( experimentRunName ) ; long currentTimestamp = Calendar . getInstance ( ) . getTimeInMillis ( ) ; experimentRunEntity . setDate_updated ( currentTimestamp ) ; Transaction transaction = session . beginTransaction ( ) ; session . update ( experimentRunEntity ) ; transaction . commit ( ) ; LOGGER . debug ( ""ExperimentRun name updated successfully"" ) ; } catch ( Exception ex ) { if ( ModelDBUtils . needToRetry ( ex ) ) { updateExperimentRunName ( experimentRunId , experimentRunName ) ; } else { throw ex ; } } } +" +302,"public String getAspectSpeed ( @ Nonnull String aspect , @ Nonnull jmri . SignalSystem system ) { String property = ( String ) system . getProperty ( aspect , ""speed"" ) ; return property ; } +","public String getAspectSpeed ( @ Nonnull String aspect , @ Nonnull jmri . SignalSystem system ) { String property = ( String ) system . getProperty ( aspect , ""speed"" ) ; log . debug ( ""getAspectSpeed: aspect={}, speed={}"" , aspect , property ) ; return property ; } +" +303,"public static com . liferay . portal . kernel . model . EmailAddressSoap getEmailAddress ( long emailAddressId ) throws RemoteException { try { com . liferay . portal . kernel . model . EmailAddress returnValue = EmailAddressServiceUtil . getEmailAddress ( emailAddressId ) ; return com . liferay . portal . kernel . model . EmailAddressSoap . toSoapModel ( returnValue ) ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } } +","public static com . liferay . portal . kernel . model . EmailAddressSoap getEmailAddress ( long emailAddressId ) throws RemoteException { try { com . liferay . portal . kernel . model . EmailAddress returnValue = EmailAddressServiceUtil . getEmailAddress ( emailAddressId ) ; return com . liferay . portal . kernel . model . EmailAddressSoap . toSoapModel ( returnValue ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } } +" +304,"@ AfterClass public static void tearDown ( ) { File file = new File ( ""audit.log"" ) ; if ( FileUtils . deleteQuietly ( file ) ) { } else { LOG . info ( ""audit.log could not be deleted."" ) ; } } +","@ AfterClass public static void tearDown ( ) { File file = new File ( ""audit.log"" ) ; if ( FileUtils . deleteQuietly ( file ) ) { LOG . info ( ""{} has been deleted as all tests have completed."" , file . getName ( ) ) ; } else { LOG . info ( ""audit.log could not be deleted."" ) ; } } +" +305,"@ AfterClass public static void tearDown ( ) { File file = new File ( ""audit.log"" ) ; if ( FileUtils . deleteQuietly ( file ) ) { LOG . info ( ""{} has been deleted as all tests have completed."" , file . getName ( ) ) ; } else { } } +","@ AfterClass public static void tearDown ( ) { File file = new File ( ""audit.log"" ) ; if ( FileUtils . deleteQuietly ( file ) ) { LOG . info ( ""{} has been deleted as all tests have completed."" , file . getName ( ) ) ; } else { LOG . info ( ""audit.log could not be deleted."" ) ; } } +" +306,"private boolean downloadFernflowerJar ( PackageConfig packageConfig , Path fernflowerJar ) { String downloadURL = String . format ( ""https://jitpack.io/com/github/fesh0r/fernflower/%s/fernflower-%s.jar"" , packageConfig . fernflower . hash , packageConfig . fernflower . hash ) ; try ( BufferedInputStream in = new BufferedInputStream ( new URL ( downloadURL ) . openStream ( ) ) ; FileOutputStream fileOutputStream = new FileOutputStream ( fernflowerJar . toFile ( ) ) ) { byte [ ] dataBuffer = new byte [ 1024 ] ; int bytesRead ; while ( ( bytesRead = in . read ( dataBuffer , 0 , 1024 ) ) != - 1 ) { fileOutputStream . write ( dataBuffer , 0 , bytesRead ) ; } return true ; } catch ( IOException e ) { return false ; } } +","private boolean downloadFernflowerJar ( PackageConfig packageConfig , Path fernflowerJar ) { String downloadURL = String . format ( ""https://jitpack.io/com/github/fesh0r/fernflower/%s/fernflower-%s.jar"" , packageConfig . fernflower . hash , packageConfig . fernflower . hash ) ; try ( BufferedInputStream in = new BufferedInputStream ( new URL ( downloadURL ) . openStream ( ) ) ; FileOutputStream fileOutputStream = new FileOutputStream ( fernflowerJar . toFile ( ) ) ) { byte [ ] dataBuffer = new byte [ 1024 ] ; int bytesRead ; while ( ( bytesRead = in . read ( dataBuffer , 0 , 1024 ) ) != - 1 ) { fileOutputStream . write ( dataBuffer , 0 , bytesRead ) ; } return true ; } catch ( IOException e ) { log . error ( ""Unable to download Fernflower from "" + downloadURL , e ) ; return false ; } } +" +307,"public SpiResponse < List < SpiTrustedBeneficiaries > > requestTrustedBeneficiariesList ( @ NotNull SpiContextData spiContextData , SpiAccountReference accountReference , @ NotNull SpiAccountConsent spiAccountConsent , @ NotNull SpiAspspConsentDataProvider spiAspspConsentDataProvider ) { SpiTrustedBeneficiaries trustedBeneficiaries = new SpiTrustedBeneficiaries ( ""mocked trusted beneficiaries id"" , SpiAccountReference . builder ( ) . iban ( ""mocked debtor iban"" ) . build ( ) , SpiAccountReference . builder ( ) . iban ( ""mocked creditor iban"" ) . build ( ) , ""mocked creditor agent"" , ""mocked creditor name"" , ""mocked creditor alias"" , ""mocked creditor id"" , new SpiAddress ( ""mocked street name"" , ""mocked building number"" , ""mocked town name"" , ""mocked post code"" , ""mocked country"" ) ) ; return SpiResponse . < List < SpiTrustedBeneficiaries > > builder ( ) . payload ( Collections . singletonList ( trustedBeneficiaries ) ) . build ( ) ; } +","public SpiResponse < List < SpiTrustedBeneficiaries > > requestTrustedBeneficiariesList ( @ NotNull SpiContextData spiContextData , SpiAccountReference accountReference , @ NotNull SpiAccountConsent spiAccountConsent , @ NotNull SpiAspspConsentDataProvider spiAspspConsentDataProvider ) { logger . info ( ""Retrieving mock trusted beneficiaries list for consent: {}"" , spiAccountConsent ) ; SpiTrustedBeneficiaries trustedBeneficiaries = new SpiTrustedBeneficiaries ( ""mocked trusted beneficiaries id"" , SpiAccountReference . builder ( ) . iban ( ""mocked debtor iban"" ) . build ( ) , SpiAccountReference . builder ( ) . iban ( ""mocked creditor iban"" ) . build ( ) , ""mocked creditor agent"" , ""mocked creditor name"" , ""mocked creditor alias"" , ""mocked creditor id"" , new SpiAddress ( ""mocked street name"" , ""mocked building number"" , ""mocked town name"" , ""mocked post code"" , ""mocked country"" ) ) ; return SpiResponse . < List < SpiTrustedBeneficiaries > > builder ( ) . payload ( Collections . singletonList ( trustedBeneficiaries ) ) . build ( ) ; } +" +308,"public void createOverallProcess ( ) throws ProcessGenerationException , IOException , DAOException , CommandException { final long begin = System . nanoTime ( ) ; ProcessGenerator processGenerator = new ProcessGenerator ( ) ; processGenerator . generateProcess ( templateId , projectId ) ; overallProcess = processGenerator . getGeneratedProcess ( ) ; overallProcess . setTitle ( getTitle ( ) ) ; ProcessService . checkTasks ( overallProcess , overallWorkpiece . getRootElement ( ) . getType ( ) ) ; processService . saveToDatabase ( overallProcess ) ; ServiceManager . getFileService ( ) . createProcessLocation ( overallProcess ) ; overallWorkpiece . setId ( overallProcess . getId ( ) . toString ( ) ) ; overallWorkpiece . getRootElement ( ) . getMetadata ( ) . addAll ( overallMetadata ) ; addToBatch ( overallProcess ) ; logger . info ( ""Process {} (ID {}) successfully created."" , overallProcess . getTitle ( ) , overallProcess . getId ( ) ) ; if ( logger . isTraceEnabled ( ) ) { logger . trace ( ""Creating {} took {} ms."" , overallProcess . getTitle ( ) , TimeUnit . NANOSECONDS . toMillis ( System . nanoTime ( ) - begin ) ) ; } } +","public void createOverallProcess ( ) throws ProcessGenerationException , IOException , DAOException , CommandException { final long begin = System . nanoTime ( ) ; logger . info ( ""Creating overall process {}..."" , title ) ; ProcessGenerator processGenerator = new ProcessGenerator ( ) ; processGenerator . generateProcess ( templateId , projectId ) ; overallProcess = processGenerator . getGeneratedProcess ( ) ; overallProcess . setTitle ( getTitle ( ) ) ; ProcessService . checkTasks ( overallProcess , overallWorkpiece . getRootElement ( ) . getType ( ) ) ; processService . saveToDatabase ( overallProcess ) ; ServiceManager . getFileService ( ) . createProcessLocation ( overallProcess ) ; overallWorkpiece . setId ( overallProcess . getId ( ) . toString ( ) ) ; overallWorkpiece . getRootElement ( ) . getMetadata ( ) . addAll ( overallMetadata ) ; addToBatch ( overallProcess ) ; logger . info ( ""Process {} (ID {}) successfully created."" , overallProcess . getTitle ( ) , overallProcess . getId ( ) ) ; if ( logger . isTraceEnabled ( ) ) { logger . trace ( ""Creating {} took {} ms."" , overallProcess . getTitle ( ) , TimeUnit . NANOSECONDS . toMillis ( System . nanoTime ( ) - begin ) ) ; } } +" +309,"public void createOverallProcess ( ) throws ProcessGenerationException , IOException , DAOException , CommandException { final long begin = System . nanoTime ( ) ; logger . info ( ""Creating overall process {}..."" , title ) ; ProcessGenerator processGenerator = new ProcessGenerator ( ) ; processGenerator . generateProcess ( templateId , projectId ) ; overallProcess = processGenerator . getGeneratedProcess ( ) ; overallProcess . setTitle ( getTitle ( ) ) ; ProcessService . checkTasks ( overallProcess , overallWorkpiece . getRootElement ( ) . getType ( ) ) ; processService . saveToDatabase ( overallProcess ) ; ServiceManager . getFileService ( ) . createProcessLocation ( overallProcess ) ; overallWorkpiece . setId ( overallProcess . getId ( ) . toString ( ) ) ; overallWorkpiece . getRootElement ( ) . getMetadata ( ) . addAll ( overallMetadata ) ; addToBatch ( overallProcess ) ; if ( logger . isTraceEnabled ( ) ) { logger . trace ( ""Creating {} took {} ms."" , overallProcess . getTitle ( ) , TimeUnit . NANOSECONDS . toMillis ( System . nanoTime ( ) - begin ) ) ; } } +","public void createOverallProcess ( ) throws ProcessGenerationException , IOException , DAOException , CommandException { final long begin = System . nanoTime ( ) ; logger . info ( ""Creating overall process {}..."" , title ) ; ProcessGenerator processGenerator = new ProcessGenerator ( ) ; processGenerator . generateProcess ( templateId , projectId ) ; overallProcess = processGenerator . getGeneratedProcess ( ) ; overallProcess . setTitle ( getTitle ( ) ) ; ProcessService . checkTasks ( overallProcess , overallWorkpiece . getRootElement ( ) . getType ( ) ) ; processService . saveToDatabase ( overallProcess ) ; ServiceManager . getFileService ( ) . createProcessLocation ( overallProcess ) ; overallWorkpiece . setId ( overallProcess . getId ( ) . toString ( ) ) ; overallWorkpiece . getRootElement ( ) . getMetadata ( ) . addAll ( overallMetadata ) ; addToBatch ( overallProcess ) ; logger . info ( ""Process {} (ID {}) successfully created."" , overallProcess . getTitle ( ) , overallProcess . getId ( ) ) ; if ( logger . isTraceEnabled ( ) ) { logger . trace ( ""Creating {} took {} ms."" , overallProcess . getTitle ( ) , TimeUnit . NANOSECONDS . toMillis ( System . nanoTime ( ) - begin ) ) ; } } +" +310,"public void createOverallProcess ( ) throws ProcessGenerationException , IOException , DAOException , CommandException { final long begin = System . nanoTime ( ) ; logger . info ( ""Creating overall process {}..."" , title ) ; ProcessGenerator processGenerator = new ProcessGenerator ( ) ; processGenerator . generateProcess ( templateId , projectId ) ; overallProcess = processGenerator . getGeneratedProcess ( ) ; overallProcess . setTitle ( getTitle ( ) ) ; ProcessService . checkTasks ( overallProcess , overallWorkpiece . getRootElement ( ) . getType ( ) ) ; processService . saveToDatabase ( overallProcess ) ; ServiceManager . getFileService ( ) . createProcessLocation ( overallProcess ) ; overallWorkpiece . setId ( overallProcess . getId ( ) . toString ( ) ) ; overallWorkpiece . getRootElement ( ) . getMetadata ( ) . addAll ( overallMetadata ) ; addToBatch ( overallProcess ) ; logger . info ( ""Process {} (ID {}) successfully created."" , overallProcess . getTitle ( ) , overallProcess . getId ( ) ) ; if ( logger . isTraceEnabled ( ) ) { } } +","public void createOverallProcess ( ) throws ProcessGenerationException , IOException , DAOException , CommandException { final long begin = System . nanoTime ( ) ; logger . info ( ""Creating overall process {}..."" , title ) ; ProcessGenerator processGenerator = new ProcessGenerator ( ) ; processGenerator . generateProcess ( templateId , projectId ) ; overallProcess = processGenerator . getGeneratedProcess ( ) ; overallProcess . setTitle ( getTitle ( ) ) ; ProcessService . checkTasks ( overallProcess , overallWorkpiece . getRootElement ( ) . getType ( ) ) ; processService . saveToDatabase ( overallProcess ) ; ServiceManager . getFileService ( ) . createProcessLocation ( overallProcess ) ; overallWorkpiece . setId ( overallProcess . getId ( ) . toString ( ) ) ; overallWorkpiece . getRootElement ( ) . getMetadata ( ) . addAll ( overallMetadata ) ; addToBatch ( overallProcess ) ; logger . info ( ""Process {} (ID {}) successfully created."" , overallProcess . getTitle ( ) , overallProcess . getId ( ) ) ; if ( logger . isTraceEnabled ( ) ) { logger . trace ( ""Creating {} took {} ms."" , overallProcess . getTitle ( ) , TimeUnit . NANOSECONDS . toMillis ( System . nanoTime ( ) - begin ) ) ; } } +" +311,"private boolean validateDefNode ( CssDef def , String atRule ) { if ( insideMediaAtRule ) { if ( lenient ) { wrongDefNodes . add ( def ) ; return false ; } else { treeLogger . log ( Type . ERROR , ""A "" + atRule + "" is not allowed inside a @media at-rule. ["" + def + ""]"" ) ; throw new Css2GssConversionException ( ""A "" + atRule + "" is not allowed inside a @media "" + ""at-rule."" ) ; } } return true ; } +","private boolean validateDefNode ( CssDef def , String atRule ) { if ( insideMediaAtRule ) { if ( lenient ) { treeLogger . log ( Type . WARN , ""A "" + atRule + "" is not allowed inside a @media at-rule."" + ""The following "" + atRule + "" ["" + def + ""] will be moved in the upper scope"" ) ; wrongDefNodes . add ( def ) ; return false ; } else { treeLogger . log ( Type . ERROR , ""A "" + atRule + "" is not allowed inside a @media at-rule. ["" + def + ""]"" ) ; throw new Css2GssConversionException ( ""A "" + atRule + "" is not allowed inside a @media "" + ""at-rule."" ) ; } } return true ; } +" +312,"private boolean validateDefNode ( CssDef def , String atRule ) { if ( insideMediaAtRule ) { if ( lenient ) { treeLogger . log ( Type . WARN , ""A "" + atRule + "" is not allowed inside a @media at-rule."" + ""The following "" + atRule + "" ["" + def + ""] will be moved in the upper scope"" ) ; wrongDefNodes . add ( def ) ; return false ; } else { throw new Css2GssConversionException ( ""A "" + atRule + "" is not allowed inside a @media "" + ""at-rule."" ) ; } } return true ; } +","private boolean validateDefNode ( CssDef def , String atRule ) { if ( insideMediaAtRule ) { if ( lenient ) { treeLogger . log ( Type . WARN , ""A "" + atRule + "" is not allowed inside a @media at-rule."" + ""The following "" + atRule + "" ["" + def + ""] will be moved in the upper scope"" ) ; wrongDefNodes . add ( def ) ; return false ; } else { treeLogger . log ( Type . ERROR , ""A "" + atRule + "" is not allowed inside a @media at-rule. ["" + def + ""]"" ) ; throw new Css2GssConversionException ( ""A "" + atRule + "" is not allowed inside a @media "" + ""at-rule."" ) ; } } return true ; } +" +313,"public void retrieve ( String depositId , File working , Progress progress , String optFilePath ) throws Exception { String fileDir = TivoliStorageManager . TEMP_PATH_PREFIX + ""/"" + depositId ; String filePath = fileDir + ""/"" + working . getName ( ) ; if ( ! Files . exists ( Paths . get ( fileDir ) ) ) { Files . createDirectory ( Paths . get ( fileDir ) ) ; } for ( int r = 0 ; r < TivoliStorageManager . maxRetries ; r ++ ) { ProcessBuilder pb = new ProcessBuilder ( ""dsmc"" , ""retrieve"" , filePath , ""-description="" + depositId , ""-optfile="" + optFilePath , ""-replace=true"" ) ; Process p = pb . start ( ) ; p . waitFor ( ) ; if ( p . exitValue ( ) != 0 ) { logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed using "" + optFilePath + "". "" ) ; InputStream error = p . getErrorStream ( ) ; for ( int i = 0 ; i < error . available ( ) ; i ++ ) { logger . info ( """" + error . read ( ) ) ; } if ( r == ( TivoliStorageManager . maxRetries - 1 ) ) { throw new Exception ( ""Retrieval of "" + working . getName ( ) + "" failed. "" ) ; } logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed. Retrying in "" + TivoliStorageManager . retryTime + "" mins"" ) ; TimeUnit . MINUTES . sleep ( TivoliStorageManager . retryTime ) ; } else { if ( Files . exists ( Paths . get ( filePath ) ) ) { Files . move ( Paths . get ( filePath ) , Paths . get ( working . getAbsolutePath ( ) ) , StandardCopyOption . REPLACE_EXISTING ) ; } Files . delete ( Paths . get ( fileDir ) ) ; break ; } } } +","public void retrieve ( String depositId , File working , Progress progress , String optFilePath ) throws Exception { String fileDir = TivoliStorageManager . TEMP_PATH_PREFIX + ""/"" + depositId ; String filePath = fileDir + ""/"" + working . getName ( ) ; if ( ! Files . exists ( Paths . get ( fileDir ) ) ) { Files . createDirectory ( Paths . get ( fileDir ) ) ; } logger . info ( ""Retrieve command is "" + ""dsmc "" + "" retrieve "" + filePath + "" -description="" + depositId + "" -optfile="" + optFilePath + ""-replace=true"" ) ; for ( int r = 0 ; r < TivoliStorageManager . maxRetries ; r ++ ) { ProcessBuilder pb = new ProcessBuilder ( ""dsmc"" , ""retrieve"" , filePath , ""-description="" + depositId , ""-optfile="" + optFilePath , ""-replace=true"" ) ; Process p = pb . start ( ) ; p . waitFor ( ) ; if ( p . exitValue ( ) != 0 ) { logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed using "" + optFilePath + "". "" ) ; InputStream error = p . getErrorStream ( ) ; for ( int i = 0 ; i < error . available ( ) ; i ++ ) { logger . info ( """" + error . read ( ) ) ; } if ( r == ( TivoliStorageManager . maxRetries - 1 ) ) { throw new Exception ( ""Retrieval of "" + working . getName ( ) + "" failed. "" ) ; } logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed. Retrying in "" + TivoliStorageManager . retryTime + "" mins"" ) ; TimeUnit . MINUTES . sleep ( TivoliStorageManager . retryTime ) ; } else { if ( Files . exists ( Paths . get ( filePath ) ) ) { Files . move ( Paths . get ( filePath ) , Paths . get ( working . getAbsolutePath ( ) ) , StandardCopyOption . REPLACE_EXISTING ) ; } Files . delete ( Paths . get ( fileDir ) ) ; break ; } } } +" +314,"public void retrieve ( String depositId , File working , Progress progress , String optFilePath ) throws Exception { String fileDir = TivoliStorageManager . TEMP_PATH_PREFIX + ""/"" + depositId ; String filePath = fileDir + ""/"" + working . getName ( ) ; if ( ! Files . exists ( Paths . get ( fileDir ) ) ) { Files . createDirectory ( Paths . get ( fileDir ) ) ; } logger . info ( ""Retrieve command is "" + ""dsmc "" + "" retrieve "" + filePath + "" -description="" + depositId + "" -optfile="" + optFilePath + ""-replace=true"" ) ; for ( int r = 0 ; r < TivoliStorageManager . maxRetries ; r ++ ) { ProcessBuilder pb = new ProcessBuilder ( ""dsmc"" , ""retrieve"" , filePath , ""-description="" + depositId , ""-optfile="" + optFilePath , ""-replace=true"" ) ; Process p = pb . start ( ) ; p . waitFor ( ) ; if ( p . exitValue ( ) != 0 ) { InputStream error = p . getErrorStream ( ) ; for ( int i = 0 ; i < error . available ( ) ; i ++ ) { logger . info ( """" + error . read ( ) ) ; } if ( r == ( TivoliStorageManager . maxRetries - 1 ) ) { throw new Exception ( ""Retrieval of "" + working . getName ( ) + "" failed. "" ) ; } logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed. Retrying in "" + TivoliStorageManager . retryTime + "" mins"" ) ; TimeUnit . MINUTES . sleep ( TivoliStorageManager . retryTime ) ; } else { if ( Files . exists ( Paths . get ( filePath ) ) ) { Files . move ( Paths . get ( filePath ) , Paths . get ( working . getAbsolutePath ( ) ) , StandardCopyOption . REPLACE_EXISTING ) ; } Files . delete ( Paths . get ( fileDir ) ) ; break ; } } } +","public void retrieve ( String depositId , File working , Progress progress , String optFilePath ) throws Exception { String fileDir = TivoliStorageManager . TEMP_PATH_PREFIX + ""/"" + depositId ; String filePath = fileDir + ""/"" + working . getName ( ) ; if ( ! Files . exists ( Paths . get ( fileDir ) ) ) { Files . createDirectory ( Paths . get ( fileDir ) ) ; } logger . info ( ""Retrieve command is "" + ""dsmc "" + "" retrieve "" + filePath + "" -description="" + depositId + "" -optfile="" + optFilePath + ""-replace=true"" ) ; for ( int r = 0 ; r < TivoliStorageManager . maxRetries ; r ++ ) { ProcessBuilder pb = new ProcessBuilder ( ""dsmc"" , ""retrieve"" , filePath , ""-description="" + depositId , ""-optfile="" + optFilePath , ""-replace=true"" ) ; Process p = pb . start ( ) ; p . waitFor ( ) ; if ( p . exitValue ( ) != 0 ) { logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed using "" + optFilePath + "". "" ) ; InputStream error = p . getErrorStream ( ) ; for ( int i = 0 ; i < error . available ( ) ; i ++ ) { logger . info ( """" + error . read ( ) ) ; } if ( r == ( TivoliStorageManager . maxRetries - 1 ) ) { throw new Exception ( ""Retrieval of "" + working . getName ( ) + "" failed. "" ) ; } logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed. Retrying in "" + TivoliStorageManager . retryTime + "" mins"" ) ; TimeUnit . MINUTES . sleep ( TivoliStorageManager . retryTime ) ; } else { if ( Files . exists ( Paths . get ( filePath ) ) ) { Files . move ( Paths . get ( filePath ) , Paths . get ( working . getAbsolutePath ( ) ) , StandardCopyOption . REPLACE_EXISTING ) ; } Files . delete ( Paths . get ( fileDir ) ) ; break ; } } } +" +315,"public void retrieve ( String depositId , File working , Progress progress , String optFilePath ) throws Exception { String fileDir = TivoliStorageManager . TEMP_PATH_PREFIX + ""/"" + depositId ; String filePath = fileDir + ""/"" + working . getName ( ) ; if ( ! Files . exists ( Paths . get ( fileDir ) ) ) { Files . createDirectory ( Paths . get ( fileDir ) ) ; } logger . info ( ""Retrieve command is "" + ""dsmc "" + "" retrieve "" + filePath + "" -description="" + depositId + "" -optfile="" + optFilePath + ""-replace=true"" ) ; for ( int r = 0 ; r < TivoliStorageManager . maxRetries ; r ++ ) { ProcessBuilder pb = new ProcessBuilder ( ""dsmc"" , ""retrieve"" , filePath , ""-description="" + depositId , ""-optfile="" + optFilePath , ""-replace=true"" ) ; Process p = pb . start ( ) ; p . waitFor ( ) ; if ( p . exitValue ( ) != 0 ) { logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed using "" + optFilePath + "". "" ) ; InputStream error = p . getErrorStream ( ) ; for ( int i = 0 ; i < error . available ( ) ; i ++ ) { } if ( r == ( TivoliStorageManager . maxRetries - 1 ) ) { throw new Exception ( ""Retrieval of "" + working . getName ( ) + "" failed. "" ) ; } logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed. Retrying in "" + TivoliStorageManager . retryTime + "" mins"" ) ; TimeUnit . MINUTES . sleep ( TivoliStorageManager . retryTime ) ; } else { if ( Files . exists ( Paths . get ( filePath ) ) ) { Files . move ( Paths . get ( filePath ) , Paths . get ( working . getAbsolutePath ( ) ) , StandardCopyOption . REPLACE_EXISTING ) ; } Files . delete ( Paths . get ( fileDir ) ) ; break ; } } } +","public void retrieve ( String depositId , File working , Progress progress , String optFilePath ) throws Exception { String fileDir = TivoliStorageManager . TEMP_PATH_PREFIX + ""/"" + depositId ; String filePath = fileDir + ""/"" + working . getName ( ) ; if ( ! Files . exists ( Paths . get ( fileDir ) ) ) { Files . createDirectory ( Paths . get ( fileDir ) ) ; } logger . info ( ""Retrieve command is "" + ""dsmc "" + "" retrieve "" + filePath + "" -description="" + depositId + "" -optfile="" + optFilePath + ""-replace=true"" ) ; for ( int r = 0 ; r < TivoliStorageManager . maxRetries ; r ++ ) { ProcessBuilder pb = new ProcessBuilder ( ""dsmc"" , ""retrieve"" , filePath , ""-description="" + depositId , ""-optfile="" + optFilePath , ""-replace=true"" ) ; Process p = pb . start ( ) ; p . waitFor ( ) ; if ( p . exitValue ( ) != 0 ) { logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed using "" + optFilePath + "". "" ) ; InputStream error = p . getErrorStream ( ) ; for ( int i = 0 ; i < error . available ( ) ; i ++ ) { logger . info ( """" + error . read ( ) ) ; } if ( r == ( TivoliStorageManager . maxRetries - 1 ) ) { throw new Exception ( ""Retrieval of "" + working . getName ( ) + "" failed. "" ) ; } logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed. Retrying in "" + TivoliStorageManager . retryTime + "" mins"" ) ; TimeUnit . MINUTES . sleep ( TivoliStorageManager . retryTime ) ; } else { if ( Files . exists ( Paths . get ( filePath ) ) ) { Files . move ( Paths . get ( filePath ) , Paths . get ( working . getAbsolutePath ( ) ) , StandardCopyOption . REPLACE_EXISTING ) ; } Files . delete ( Paths . get ( fileDir ) ) ; break ; } } } +" +316,"public void retrieve ( String depositId , File working , Progress progress , String optFilePath ) throws Exception { String fileDir = TivoliStorageManager . TEMP_PATH_PREFIX + ""/"" + depositId ; String filePath = fileDir + ""/"" + working . getName ( ) ; if ( ! Files . exists ( Paths . get ( fileDir ) ) ) { Files . createDirectory ( Paths . get ( fileDir ) ) ; } logger . info ( ""Retrieve command is "" + ""dsmc "" + "" retrieve "" + filePath + "" -description="" + depositId + "" -optfile="" + optFilePath + ""-replace=true"" ) ; for ( int r = 0 ; r < TivoliStorageManager . maxRetries ; r ++ ) { ProcessBuilder pb = new ProcessBuilder ( ""dsmc"" , ""retrieve"" , filePath , ""-description="" + depositId , ""-optfile="" + optFilePath , ""-replace=true"" ) ; Process p = pb . start ( ) ; p . waitFor ( ) ; if ( p . exitValue ( ) != 0 ) { logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed using "" + optFilePath + "". "" ) ; InputStream error = p . getErrorStream ( ) ; for ( int i = 0 ; i < error . available ( ) ; i ++ ) { logger . info ( """" + error . read ( ) ) ; } if ( r == ( TivoliStorageManager . maxRetries - 1 ) ) { throw new Exception ( ""Retrieval of "" + working . getName ( ) + "" failed. "" ) ; } TimeUnit . MINUTES . sleep ( TivoliStorageManager . retryTime ) ; } else { if ( Files . exists ( Paths . get ( filePath ) ) ) { Files . move ( Paths . get ( filePath ) , Paths . get ( working . getAbsolutePath ( ) ) , StandardCopyOption . REPLACE_EXISTING ) ; } Files . delete ( Paths . get ( fileDir ) ) ; break ; } } } +","public void retrieve ( String depositId , File working , Progress progress , String optFilePath ) throws Exception { String fileDir = TivoliStorageManager . TEMP_PATH_PREFIX + ""/"" + depositId ; String filePath = fileDir + ""/"" + working . getName ( ) ; if ( ! Files . exists ( Paths . get ( fileDir ) ) ) { Files . createDirectory ( Paths . get ( fileDir ) ) ; } logger . info ( ""Retrieve command is "" + ""dsmc "" + "" retrieve "" + filePath + "" -description="" + depositId + "" -optfile="" + optFilePath + ""-replace=true"" ) ; for ( int r = 0 ; r < TivoliStorageManager . maxRetries ; r ++ ) { ProcessBuilder pb = new ProcessBuilder ( ""dsmc"" , ""retrieve"" , filePath , ""-description="" + depositId , ""-optfile="" + optFilePath , ""-replace=true"" ) ; Process p = pb . start ( ) ; p . waitFor ( ) ; if ( p . exitValue ( ) != 0 ) { logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed using "" + optFilePath + "". "" ) ; InputStream error = p . getErrorStream ( ) ; for ( int i = 0 ; i < error . available ( ) ; i ++ ) { logger . info ( """" + error . read ( ) ) ; } if ( r == ( TivoliStorageManager . maxRetries - 1 ) ) { throw new Exception ( ""Retrieval of "" + working . getName ( ) + "" failed. "" ) ; } logger . info ( ""Retrieval of "" + working . getName ( ) + "" failed. Retrying in "" + TivoliStorageManager . retryTime + "" mins"" ) ; TimeUnit . MINUTES . sleep ( TivoliStorageManager . retryTime ) ; } else { if ( Files . exists ( Paths . get ( filePath ) ) ) { Files . move ( Paths . get ( filePath ) , Paths . get ( working . getAbsolutePath ( ) ) , StandardCopyOption . REPLACE_EXISTING ) ; } Files . delete ( Paths . get ( fileDir ) ) ; break ; } } } +" +317,"private static void startServer ( ) throws InterruptedException { try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +","private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +" +318,"private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +","private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +" +319,"private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +","private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +" +320,"private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +","private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +" +321,"private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +","private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +" +322,"private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } } ) ) ; jettyWebServer . join ( ) ; } +","private static void startServer ( ) throws InterruptedException { LOG . info ( ""Starting submarine server"" ) ; try { jettyWebServer . start ( ) ; } catch ( Exception e ) { LOG . error ( ""Error while running jettyServer"" , e ) ; System . exit ( - 1 ) ; } LOG . info ( ""Done, submarine server started"" ) ; Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( ( ) -> { LOG . info ( ""Shutting down Submarine Server ... "" ) ; try { jettyWebServer . stop ( ) ; Thread . sleep ( 3000 ) ; } catch ( Exception e ) { LOG . error ( ""Error while stopping servlet container"" , e ) ; } LOG . info ( ""Bye"" ) ; } ) ) ; jettyWebServer . join ( ) ; } +" +323,"private void createFeatureTypes ( Collection < Pair < ArcGISLayer , DataGroupInfo > > layersAndParents ) { List < FeatureCreator > tasks = layersAndParents . stream ( ) . filter ( lp -> ! lp . getFirstObject ( ) . isSingleFusedMapCache ( ) ) . map ( lp -> new FeatureCreator ( lp . getFirstObject ( ) , lp . getSecondObject ( ) ) ) . collect ( Collectors . toList ( ) ) ; try { myExecutor . invokeAll ( tasks ) ; } catch ( InterruptedException e ) { } } +","private void createFeatureTypes ( Collection < Pair < ArcGISLayer , DataGroupInfo > > layersAndParents ) { List < FeatureCreator > tasks = layersAndParents . stream ( ) . filter ( lp -> ! lp . getFirstObject ( ) . isSingleFusedMapCache ( ) ) . map ( lp -> new FeatureCreator ( lp . getFirstObject ( ) , lp . getSecondObject ( ) ) ) . collect ( Collectors . toList ( ) ) ; try { myExecutor . invokeAll ( tasks ) ; } catch ( InterruptedException e ) { LOGGER . error ( e , e ) ; } } +" +324,"public static void trace ( ResourceRequest req , String src ) { if ( isTrace ) { List < String > attrNames = Collections . list ( req . getAttributeNames ( ) ) ; StringBuilder txt = new StringBuilder ( 128 ) ; txt . append ( src ) ; txt . append ( ""\nAttribute names: "" ) . append ( attrNames ) ; txt . append ( ""\nasync_request_uri: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.request_uri"" ) ) ; txt . append ( ""\nasync_context_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.context_path"" ) ) ; txt . append ( ""\nasync_servlet_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.servlet_path"" ) ) ; txt . append ( ""\nasync_path_info: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.path_info"" ) ) ; txt . append ( ""\nasync_query_string: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.query_string"" ) ) ; txt . append ( ""\nforward_request_uri: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.request_uri"" ) ) ; txt . append ( ""\nforward_context_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.context_path"" ) ) ; txt . append ( ""\nforward_servlet_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.servlet_path"" ) ) ; txt . append ( ""\nforward_path_info: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.path_info"" ) ) ; txt . append ( ""\nforward_query_string: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.query_string"" ) ) ; txt . append ( ""\ninclude_request_uri: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.request_uri"" ) ) ; txt . append ( ""\ninclude_context_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.context_path"" ) ) ; txt . append ( ""\ninclude_servlet_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.servlet_path"" ) ) ; txt . append ( ""\ninclude_path_info: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.path_info"" ) ) ; txt . append ( ""\ninclude_query_string: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.query_string"" ) ) ; txt . append ( ""\nmethod_context_path: "" ) . append ( req . getContextPath ( ) ) ; } } +","public static void trace ( ResourceRequest req , String src ) { if ( isTrace ) { List < String > attrNames = Collections . list ( req . getAttributeNames ( ) ) ; StringBuilder txt = new StringBuilder ( 128 ) ; txt . append ( src ) ; txt . append ( ""\nAttribute names: "" ) . append ( attrNames ) ; txt . append ( ""\nasync_request_uri: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.request_uri"" ) ) ; txt . append ( ""\nasync_context_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.context_path"" ) ) ; txt . append ( ""\nasync_servlet_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.servlet_path"" ) ) ; txt . append ( ""\nasync_path_info: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.path_info"" ) ) ; txt . append ( ""\nasync_query_string: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.async.query_string"" ) ) ; txt . append ( ""\nforward_request_uri: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.request_uri"" ) ) ; txt . append ( ""\nforward_context_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.context_path"" ) ) ; txt . append ( ""\nforward_servlet_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.servlet_path"" ) ) ; txt . append ( ""\nforward_path_info: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.path_info"" ) ) ; txt . append ( ""\nforward_query_string: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.forward.query_string"" ) ) ; txt . append ( ""\ninclude_request_uri: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.request_uri"" ) ) ; txt . append ( ""\ninclude_context_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.context_path"" ) ) ; txt . append ( ""\ninclude_servlet_path: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.servlet_path"" ) ) ; txt . append ( ""\ninclude_path_info: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.path_info"" ) ) ; txt . append ( ""\ninclude_query_string: "" ) . append ( ( String ) req . getAttribute ( ""javax.servlet.include.query_string"" ) ) ; txt . append ( ""\nmethod_context_path: "" ) . append ( req . getContextPath ( ) ) ; logger . debug ( txt . toString ( ) ) ; } } +" +325,"public boolean takeLead ( UUID leaderId , int ttl ) { ResultSet lwtResult = session . execute ( takeLeadPrepStmt . bind ( leaderId , reaperInstanceId , AppContext . REAPER_INSTANCE_ADDRESS , ttl ) ) ; if ( lwtResult . wasApplied ( ) ) { LOG . debug ( ""Took lead on segment {}"" , leaderId ) ; return true ; } LOG . debug ( ""Could not take lead on segment {}"" , leaderId ) ; return false ; } +","public boolean takeLead ( UUID leaderId , int ttl ) { LOG . debug ( ""Trying to take lead on segment {}"" , leaderId ) ; ResultSet lwtResult = session . execute ( takeLeadPrepStmt . bind ( leaderId , reaperInstanceId , AppContext . REAPER_INSTANCE_ADDRESS , ttl ) ) ; if ( lwtResult . wasApplied ( ) ) { LOG . debug ( ""Took lead on segment {}"" , leaderId ) ; return true ; } LOG . debug ( ""Could not take lead on segment {}"" , leaderId ) ; return false ; } +" +326,"public boolean takeLead ( UUID leaderId , int ttl ) { LOG . debug ( ""Trying to take lead on segment {}"" , leaderId ) ; ResultSet lwtResult = session . execute ( takeLeadPrepStmt . bind ( leaderId , reaperInstanceId , AppContext . REAPER_INSTANCE_ADDRESS , ttl ) ) ; if ( lwtResult . wasApplied ( ) ) { return true ; } LOG . debug ( ""Could not take lead on segment {}"" , leaderId ) ; return false ; } +","public boolean takeLead ( UUID leaderId , int ttl ) { LOG . debug ( ""Trying to take lead on segment {}"" , leaderId ) ; ResultSet lwtResult = session . execute ( takeLeadPrepStmt . bind ( leaderId , reaperInstanceId , AppContext . REAPER_INSTANCE_ADDRESS , ttl ) ) ; if ( lwtResult . wasApplied ( ) ) { LOG . debug ( ""Took lead on segment {}"" , leaderId ) ; return true ; } LOG . debug ( ""Could not take lead on segment {}"" , leaderId ) ; return false ; } +" +327,"public boolean takeLead ( UUID leaderId , int ttl ) { LOG . debug ( ""Trying to take lead on segment {}"" , leaderId ) ; ResultSet lwtResult = session . execute ( takeLeadPrepStmt . bind ( leaderId , reaperInstanceId , AppContext . REAPER_INSTANCE_ADDRESS , ttl ) ) ; if ( lwtResult . wasApplied ( ) ) { LOG . debug ( ""Took lead on segment {}"" , leaderId ) ; return true ; } return false ; } +","public boolean takeLead ( UUID leaderId , int ttl ) { LOG . debug ( ""Trying to take lead on segment {}"" , leaderId ) ; ResultSet lwtResult = session . execute ( takeLeadPrepStmt . bind ( leaderId , reaperInstanceId , AppContext . REAPER_INSTANCE_ADDRESS , ttl ) ) ; if ( lwtResult . wasApplied ( ) ) { LOG . debug ( ""Took lead on segment {}"" , leaderId ) ; return true ; } LOG . debug ( ""Could not take lead on segment {}"" , leaderId ) ; return false ; } +" +328,"protected boolean begin ( ) { _log . info ( ""+------------------------------------------+"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""+------------------------------------------+"" ) ; DfDBFluteTaskStatus . getInstance ( ) . setTaskType ( TaskType . TakeAssert ) ; return true ; } +","protected boolean begin ( ) { _log . info ( ""+------------------------------------------+"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""| Take Assert |"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""+------------------------------------------+"" ) ; DfDBFluteTaskStatus . getInstance ( ) . setTaskType ( TaskType . TakeAssert ) ; return true ; } +" +329,"public void run ( ) { try { monkey . start ( ) ; monkey . waitForStop ( ) ; } catch ( Exception e ) { LOG . warn ( StringUtils . stringifyException ( e ) ) ; } } +","public void run ( ) { try { LOG . info ( ""Starting ChaosMonkey"" ) ; monkey . start ( ) ; monkey . waitForStop ( ) ; } catch ( Exception e ) { LOG . warn ( StringUtils . stringifyException ( e ) ) ; } } +" +330,"public void run ( ) { try { LOG . info ( ""Starting ChaosMonkey"" ) ; monkey . start ( ) ; monkey . waitForStop ( ) ; } catch ( Exception e ) { } } +","public void run ( ) { try { LOG . info ( ""Starting ChaosMonkey"" ) ; monkey . start ( ) ; monkey . waitForStop ( ) ; } catch ( Exception e ) { LOG . warn ( StringUtils . stringifyException ( e ) ) ; } } +" +331,"public void printError ( CallContext context , Exception ex , HttpServletRequest request , HttpServletResponse response ) { ErrorInfo errorInfo = extractError ( ex ) ; if ( response . isCommitted ( ) ) { return ; } String token = ( context instanceof BrowserCallContextImpl ? ( ( BrowserCallContextImpl ) context ) . getToken ( ) : null ) ; if ( token == null ) { response . resetBuffer ( ) ; CALL . setStatus ( request , response , errorInfo . statusCode ) ; String message = ex . getMessage ( ) ; if ( ! ( ex instanceof CmisBaseException ) ) { message = ""An error occurred!"" ; } JSONObject jsonResponse = new JSONObject ( ) ; jsonResponse . put ( ERROR_EXCEPTION , errorInfo . exceptionName ) ; jsonResponse . put ( ERROR_MESSAGE , errorInfo . message ) ; String st = ExceptionHelper . getStacktraceAsString ( ex ) ; if ( st != null ) { jsonResponse . put ( ERROR_STACKTRACE , st ) ; } try { CALL . writeJSON ( jsonResponse , request , response ) ; } catch ( IOException e ) { LOG . error ( e . getMessage ( ) , e ) ; try { response . sendError ( errorInfo . statusCode , message ) ; } catch ( IOException en ) { } } } else { CALL . setStatus ( request , response , SC_OK ) ; response . setContentType ( AbstractBrowserServiceCall . HTML_MIME_TYPE ) ; response . setContentLength ( 0 ) ; if ( context != null ) { CALL . setCookie ( request , response , context . getRepositoryId ( ) , token , CALL . createCookieValue ( errorInfo . statusCode , null , errorInfo . exceptionName , ex . getMessage ( ) ) ) ; } } } +","public void printError ( CallContext context , Exception ex , HttpServletRequest request , HttpServletResponse response ) { ErrorInfo errorInfo = extractError ( ex ) ; if ( response . isCommitted ( ) ) { LOG . warn ( ""Failed to send error message to client. "" + ""Response is already committed."" , ex ) ; return ; } String token = ( context instanceof BrowserCallContextImpl ? ( ( BrowserCallContextImpl ) context ) . getToken ( ) : null ) ; if ( token == null ) { response . resetBuffer ( ) ; CALL . setStatus ( request , response , errorInfo . statusCode ) ; String message = ex . getMessage ( ) ; if ( ! ( ex instanceof CmisBaseException ) ) { message = ""An error occurred!"" ; } JSONObject jsonResponse = new JSONObject ( ) ; jsonResponse . put ( ERROR_EXCEPTION , errorInfo . exceptionName ) ; jsonResponse . put ( ERROR_MESSAGE , errorInfo . message ) ; String st = ExceptionHelper . getStacktraceAsString ( ex ) ; if ( st != null ) { jsonResponse . put ( ERROR_STACKTRACE , st ) ; } try { CALL . writeJSON ( jsonResponse , request , response ) ; } catch ( IOException e ) { LOG . error ( e . getMessage ( ) , e ) ; try { response . sendError ( errorInfo . statusCode , message ) ; } catch ( IOException en ) { } } } else { CALL . setStatus ( request , response , SC_OK ) ; response . setContentType ( AbstractBrowserServiceCall . HTML_MIME_TYPE ) ; response . setContentLength ( 0 ) ; if ( context != null ) { CALL . setCookie ( request , response , context . getRepositoryId ( ) , token , CALL . createCookieValue ( errorInfo . statusCode , null , errorInfo . exceptionName , ex . getMessage ( ) ) ) ; } } } +" +332,"public void printError ( CallContext context , Exception ex , HttpServletRequest request , HttpServletResponse response ) { ErrorInfo errorInfo = extractError ( ex ) ; if ( response . isCommitted ( ) ) { LOG . warn ( ""Failed to send error message to client. "" + ""Response is already committed."" , ex ) ; return ; } String token = ( context instanceof BrowserCallContextImpl ? ( ( BrowserCallContextImpl ) context ) . getToken ( ) : null ) ; if ( token == null ) { response . resetBuffer ( ) ; CALL . setStatus ( request , response , errorInfo . statusCode ) ; String message = ex . getMessage ( ) ; if ( ! ( ex instanceof CmisBaseException ) ) { message = ""An error occurred!"" ; } JSONObject jsonResponse = new JSONObject ( ) ; jsonResponse . put ( ERROR_EXCEPTION , errorInfo . exceptionName ) ; jsonResponse . put ( ERROR_MESSAGE , errorInfo . message ) ; String st = ExceptionHelper . getStacktraceAsString ( ex ) ; if ( st != null ) { jsonResponse . put ( ERROR_STACKTRACE , st ) ; } try { CALL . writeJSON ( jsonResponse , request , response ) ; } catch ( IOException e ) { try { response . sendError ( errorInfo . statusCode , message ) ; } catch ( IOException en ) { } } } else { CALL . setStatus ( request , response , SC_OK ) ; response . setContentType ( AbstractBrowserServiceCall . HTML_MIME_TYPE ) ; response . setContentLength ( 0 ) ; if ( context != null ) { CALL . setCookie ( request , response , context . getRepositoryId ( ) , token , CALL . createCookieValue ( errorInfo . statusCode , null , errorInfo . exceptionName , ex . getMessage ( ) ) ) ; } } } +","public void printError ( CallContext context , Exception ex , HttpServletRequest request , HttpServletResponse response ) { ErrorInfo errorInfo = extractError ( ex ) ; if ( response . isCommitted ( ) ) { LOG . warn ( ""Failed to send error message to client. "" + ""Response is already committed."" , ex ) ; return ; } String token = ( context instanceof BrowserCallContextImpl ? ( ( BrowserCallContextImpl ) context ) . getToken ( ) : null ) ; if ( token == null ) { response . resetBuffer ( ) ; CALL . setStatus ( request , response , errorInfo . statusCode ) ; String message = ex . getMessage ( ) ; if ( ! ( ex instanceof CmisBaseException ) ) { message = ""An error occurred!"" ; } JSONObject jsonResponse = new JSONObject ( ) ; jsonResponse . put ( ERROR_EXCEPTION , errorInfo . exceptionName ) ; jsonResponse . put ( ERROR_MESSAGE , errorInfo . message ) ; String st = ExceptionHelper . getStacktraceAsString ( ex ) ; if ( st != null ) { jsonResponse . put ( ERROR_STACKTRACE , st ) ; } try { CALL . writeJSON ( jsonResponse , request , response ) ; } catch ( IOException e ) { LOG . error ( e . getMessage ( ) , e ) ; try { response . sendError ( errorInfo . statusCode , message ) ; } catch ( IOException en ) { } } } else { CALL . setStatus ( request , response , SC_OK ) ; response . setContentType ( AbstractBrowserServiceCall . HTML_MIME_TYPE ) ; response . setContentLength ( 0 ) ; if ( context != null ) { CALL . setCookie ( request , response , context . getRepositoryId ( ) , token , CALL . createCookieValue ( errorInfo . statusCode , null , errorInfo . exceptionName , ex . getMessage ( ) ) ) ; } } } +" +333,"public String print ( String xmlDeclaration , String indent ) { Transformer serializer ; try { serializer = tf . newTransformer ( ) ; serializer . setOutputProperty ( OutputKeys . OMIT_XML_DECLARATION , xmlDeclaration ) ; serializer . setOutputProperty ( OutputKeys . INDENT , indent ) ; serializer . setOutputProperty ( OutputPropertiesFactory . S_KEY_CONTENT_HANDLER , org . apache . xml . serializer . ToXMLStream . class . getName ( ) ) ; StringWriter writer = new StringWriter ( ) ; serializer . transform ( new DOMSource ( document ) , new StreamResult ( writer ) ) ; return writer . toString ( ) ; } catch ( TransformerFactoryConfigurationError | TransformerException e ) { } return null ; } +","public String print ( String xmlDeclaration , String indent ) { Transformer serializer ; try { serializer = tf . newTransformer ( ) ; serializer . setOutputProperty ( OutputKeys . OMIT_XML_DECLARATION , xmlDeclaration ) ; serializer . setOutputProperty ( OutputKeys . INDENT , indent ) ; serializer . setOutputProperty ( OutputPropertiesFactory . S_KEY_CONTENT_HANDLER , org . apache . xml . serializer . ToXMLStream . class . getName ( ) ) ; StringWriter writer = new StringWriter ( ) ; serializer . transform ( new DOMSource ( document ) , new StreamResult ( writer ) ) ; return writer . toString ( ) ; } catch ( TransformerFactoryConfigurationError | TransformerException e ) { LOGGER . debug ( e . getMessage ( ) , e ) ; } return null ; } +" +334,"public static boolean isReachable ( String url ) { try { return isReachable ( new URL ( url ) ) ; } catch ( MalformedURLException e ) { return false ; } } +","public static boolean isReachable ( String url ) { try { return isReachable ( new URL ( url ) ) ; } catch ( MalformedURLException e ) { LOG . error ( e . getMessage ( ) , e ) ; return false ; } } +" +335,"protected void performSystemPing ( ) { try { InetAddress destinationAddress = destination . getValue ( ) ; if ( destinationAddress == null ) { return ; } networkUtils . nativePing ( pingMethod , destinationAddress . getHostAddress ( ) , timeoutInMS ) . ifPresent ( o -> { if ( o . isSuccess ( ) ) { PresenceDetectionValue v = updateReachableValue ( PresenceDetectionType . ICMP_PING , getLatency ( o , preferResponseTimeAsLatency ) ) ; updateListener . partialDetectionResult ( v ) ; } } ) ; } catch ( IOException e ) { logger . trace ( ""Failed to execute a native ping for ip {}"" , hostname , e ) ; } catch ( InterruptedException e ) { } } +","protected void performSystemPing ( ) { try { logger . trace ( ""Perform native ping presence detection for {}"" , hostname ) ; InetAddress destinationAddress = destination . getValue ( ) ; if ( destinationAddress == null ) { return ; } networkUtils . nativePing ( pingMethod , destinationAddress . getHostAddress ( ) , timeoutInMS ) . ifPresent ( o -> { if ( o . isSuccess ( ) ) { PresenceDetectionValue v = updateReachableValue ( PresenceDetectionType . ICMP_PING , getLatency ( o , preferResponseTimeAsLatency ) ) ; updateListener . partialDetectionResult ( v ) ; } } ) ; } catch ( IOException e ) { logger . trace ( ""Failed to execute a native ping for ip {}"" , hostname , e ) ; } catch ( InterruptedException e ) { } } +" +336,"protected void performSystemPing ( ) { try { logger . trace ( ""Perform native ping presence detection for {}"" , hostname ) ; InetAddress destinationAddress = destination . getValue ( ) ; if ( destinationAddress == null ) { return ; } networkUtils . nativePing ( pingMethod , destinationAddress . getHostAddress ( ) , timeoutInMS ) . ifPresent ( o -> { if ( o . isSuccess ( ) ) { PresenceDetectionValue v = updateReachableValue ( PresenceDetectionType . ICMP_PING , getLatency ( o , preferResponseTimeAsLatency ) ) ; updateListener . partialDetectionResult ( v ) ; } } ) ; } catch ( IOException e ) { } catch ( InterruptedException e ) { } } +","protected void performSystemPing ( ) { try { logger . trace ( ""Perform native ping presence detection for {}"" , hostname ) ; InetAddress destinationAddress = destination . getValue ( ) ; if ( destinationAddress == null ) { return ; } networkUtils . nativePing ( pingMethod , destinationAddress . getHostAddress ( ) , timeoutInMS ) . ifPresent ( o -> { if ( o . isSuccess ( ) ) { PresenceDetectionValue v = updateReachableValue ( PresenceDetectionType . ICMP_PING , getLatency ( o , preferResponseTimeAsLatency ) ) ; updateListener . partialDetectionResult ( v ) ; } } ) ; } catch ( IOException e ) { logger . trace ( ""Failed to execute a native ping for ip {}"" , hostname , e ) ; } catch ( InterruptedException e ) { } } +" +337,"public void createClientWith2Pools ( VM client , final int [ ] serverPorts1 , final int [ ] serverPorts2 , final String serverHost , final String redundancyLevel ) { client . invoke ( ( ) -> { getCache ( ) . getQueryService ( ) ; AttributesFactory regionFactory0 = new AttributesFactory ( ) ; AttributesFactory regionFactory1 = new AttributesFactory ( ) ; regionFactory0 . setScope ( Scope . LOCAL ) ; regionFactory1 . setScope ( Scope . LOCAL ) ; if ( redundancyLevel != null ) { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } else { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , - 1 , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , - 1 , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } createRegion ( regions [ 0 ] , regionFactory0 . create ( ) ) ; createRegion ( regions [ 1 ] , regionFactory1 . create ( ) ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 0 ] ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 1 ] ) ; } ) ; } +","public void createClientWith2Pools ( VM client , final int [ ] serverPorts1 , final int [ ] serverPorts2 , final String serverHost , final String redundancyLevel ) { client . invoke ( ( ) -> { logger . info ( ""### Create Client. ###"" ) ; getCache ( ) . getQueryService ( ) ; AttributesFactory regionFactory0 = new AttributesFactory ( ) ; AttributesFactory regionFactory1 = new AttributesFactory ( ) ; regionFactory0 . setScope ( Scope . LOCAL ) ; regionFactory1 . setScope ( Scope . LOCAL ) ; if ( redundancyLevel != null ) { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } else { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , - 1 , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , - 1 , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } createRegion ( regions [ 0 ] , regionFactory0 . create ( ) ) ; createRegion ( regions [ 1 ] , regionFactory1 . create ( ) ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 0 ] ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 1 ] ) ; } ) ; } +" +338,"public void createClientWith2Pools ( VM client , final int [ ] serverPorts1 , final int [ ] serverPorts2 , final String serverHost , final String redundancyLevel ) { client . invoke ( ( ) -> { logger . info ( ""### Create Client. ###"" ) ; getCache ( ) . getQueryService ( ) ; AttributesFactory regionFactory0 = new AttributesFactory ( ) ; AttributesFactory regionFactory1 = new AttributesFactory ( ) ; regionFactory0 . setScope ( Scope . LOCAL ) ; regionFactory1 . setScope ( Scope . LOCAL ) ; if ( redundancyLevel != null ) { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } else { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , - 1 , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , - 1 , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } createRegion ( regions [ 0 ] , regionFactory0 . create ( ) ) ; createRegion ( regions [ 1 ] , regionFactory1 . create ( ) ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 1 ] ) ; } ) ; } +","public void createClientWith2Pools ( VM client , final int [ ] serverPorts1 , final int [ ] serverPorts2 , final String serverHost , final String redundancyLevel ) { client . invoke ( ( ) -> { logger . info ( ""### Create Client. ###"" ) ; getCache ( ) . getQueryService ( ) ; AttributesFactory regionFactory0 = new AttributesFactory ( ) ; AttributesFactory regionFactory1 = new AttributesFactory ( ) ; regionFactory0 . setScope ( Scope . LOCAL ) ; regionFactory1 . setScope ( Scope . LOCAL ) ; if ( redundancyLevel != null ) { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } else { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , - 1 , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , - 1 , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } createRegion ( regions [ 0 ] , regionFactory0 . create ( ) ) ; createRegion ( regions [ 1 ] , regionFactory1 . create ( ) ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 0 ] ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 1 ] ) ; } ) ; } +" +339,"public void createClientWith2Pools ( VM client , final int [ ] serverPorts1 , final int [ ] serverPorts2 , final String serverHost , final String redundancyLevel ) { client . invoke ( ( ) -> { logger . info ( ""### Create Client. ###"" ) ; getCache ( ) . getQueryService ( ) ; AttributesFactory regionFactory0 = new AttributesFactory ( ) ; AttributesFactory regionFactory1 = new AttributesFactory ( ) ; regionFactory0 . setScope ( Scope . LOCAL ) ; regionFactory1 . setScope ( Scope . LOCAL ) ; if ( redundancyLevel != null ) { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } else { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , - 1 , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , - 1 , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } createRegion ( regions [ 0 ] , regionFactory0 . create ( ) ) ; createRegion ( regions [ 1 ] , regionFactory1 . create ( ) ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 0 ] ) ; } ) ; } +","public void createClientWith2Pools ( VM client , final int [ ] serverPorts1 , final int [ ] serverPorts2 , final String serverHost , final String redundancyLevel ) { client . invoke ( ( ) -> { logger . info ( ""### Create Client. ###"" ) ; getCache ( ) . getQueryService ( ) ; AttributesFactory regionFactory0 = new AttributesFactory ( ) ; AttributesFactory regionFactory1 = new AttributesFactory ( ) ; regionFactory0 . setScope ( Scope . LOCAL ) ; regionFactory1 . setScope ( Scope . LOCAL ) ; if ( redundancyLevel != null ) { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , Integer . parseInt ( redundancyLevel ) , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } else { ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory0 , serverHost , serverPorts1 , true , - 1 , - 1 , ( String ) null , ""testPoolA"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; ClientServerTestCase . configureConnectionPoolWithNameAndFactory ( regionFactory1 , serverHost , serverPorts2 , true , - 1 , - 1 , ( String ) null , ""testPoolB"" , PoolManager . createFactory ( ) , - 1 , - 1 , - 2 , - 1 ) ; } createRegion ( regions [ 0 ] , regionFactory0 . create ( ) ) ; createRegion ( regions [ 1 ] , regionFactory1 . create ( ) ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 0 ] ) ; logger . info ( ""### Successfully Created Region on Client :"" + regions [ 1 ] ) ; } ) ; } +" +340,"private void setLocationFromBoundingBox ( MetacardImpl metacard , String north , String south , String east , String west ) { try { Envelope envelope = new Envelope ( Double . parseDouble ( east . trim ( ) ) , Double . parseDouble ( west . trim ( ) ) , Double . parseDouble ( south . trim ( ) ) , Double . parseDouble ( north . trim ( ) ) ) ; String wkt = WKT_WRITER_THREAD_LOCAL . get ( ) . write ( factory . toGeometry ( envelope ) ) ; if ( wkt != null ) { metacard . setAttribute ( Core . LOCATION , wkt ) ; } } catch ( NumberFormatException nfe ) { } } +","private void setLocationFromBoundingBox ( MetacardImpl metacard , String north , String south , String east , String west ) { try { Envelope envelope = new Envelope ( Double . parseDouble ( east . trim ( ) ) , Double . parseDouble ( west . trim ( ) ) , Double . parseDouble ( south . trim ( ) ) , Double . parseDouble ( north . trim ( ) ) ) ; String wkt = WKT_WRITER_THREAD_LOCAL . get ( ) . write ( factory . toGeometry ( envelope ) ) ; if ( wkt != null ) { metacard . setAttribute ( Core . LOCATION , wkt ) ; } } catch ( NumberFormatException nfe ) { LOGGER . debug ( ""Unable to parse double from GMD metadata {}, {}, {}, {}"" , west , east , south , north ) ; } } +" +341,"protected void persistBlobs ( String key , long sizeOfBlobs , List < Map < String , String > > blobInfos ) { synchronized ( this ) { StorageEntry entry = getStorageEntry ( key ) ; if ( entry == null ) { if ( sizeOfBlobs > 0 ) { incrementStorageSize ( sizeOfBlobs ) ; } entry = new StorageEntry ( ) ; } else { incrementStorageSize ( sizeOfBlobs - entry . getSize ( ) ) ; } entry . setSize ( sizeOfBlobs ) ; entry . setBlobInfos ( blobInfos ) ; if ( log . isDebugEnabled ( ) ) { } putStorageEntry ( key , entry ) ; } } +","protected void persistBlobs ( String key , long sizeOfBlobs , List < Map < String , String > > blobInfos ) { synchronized ( this ) { StorageEntry entry = getStorageEntry ( key ) ; if ( entry == null ) { if ( sizeOfBlobs > 0 ) { incrementStorageSize ( sizeOfBlobs ) ; } entry = new StorageEntry ( ) ; } else { incrementStorageSize ( sizeOfBlobs - entry . getSize ( ) ) ; } entry . setSize ( sizeOfBlobs ) ; entry . setBlobInfos ( blobInfos ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( String . format ( ""Setting blobs %s in StorageEntry stored at key %s"" , blobInfos , key ) ) ; } putStorageEntry ( key , entry ) ; } } +" +342,"public static void main ( String [ ] args ) { LOGGER . log ( Level . INFO , ""info test"" ) ; } +","public static void main ( String [ ] args ) { LOGGER . log ( Level . ERROR , ""error test"" ) ; LOGGER . log ( Level . INFO , ""info test"" ) ; } +" +343,"public static void main ( String [ ] args ) { LOGGER . log ( Level . ERROR , ""error test"" ) ; } +","public static void main ( String [ ] args ) { LOGGER . log ( Level . ERROR , ""error test"" ) ; LOGGER . log ( Level . INFO , ""info test"" ) ; } +" +344,"private Option < DublinCoreCatalog > isNew ( DublinCoreCatalog dc ) throws SeriesServiceDatabaseException { final String id = dc . getFirst ( DublinCore . PROPERTY_IDENTIFIER ) ; if ( id != null ) { try { return equals ( persistence . getSeries ( id ) , dc ) ? Option . < DublinCoreCatalog > none ( ) : some ( dc ) ; } catch ( NotFoundException e ) { return some ( dc ) ; } } else { dc . set ( DublinCore . PROPERTY_IDENTIFIER , UUID . randomUUID ( ) . toString ( ) ) ; return some ( dc ) ; } } +","private Option < DublinCoreCatalog > isNew ( DublinCoreCatalog dc ) throws SeriesServiceDatabaseException { final String id = dc . getFirst ( DublinCore . PROPERTY_IDENTIFIER ) ; if ( id != null ) { try { return equals ( persistence . getSeries ( id ) , dc ) ? Option . < DublinCoreCatalog > none ( ) : some ( dc ) ; } catch ( NotFoundException e ) { return some ( dc ) ; } } else { logger . info ( ""Series Dublin Core does not contain identifier, generating one"" ) ; dc . set ( DublinCore . PROPERTY_IDENTIFIER , UUID . randomUUID ( ) . toString ( ) ) ; return some ( dc ) ; } } +" +345,"public IQ handle ( IQ iq ) { String orderJsonStr = unmarshalOrder ( iq ) ; String className = unmarshalClassName ( iq ) ; IQ response = IQ . createResultIQ ( iq ) ; Gson gson = new Gson ( ) ; Order order = null ; try { order = ( Order ) gson . fromJson ( orderJsonStr , Class . forName ( className ) ) ; String senderId = IntercomponentUtil . getSender ( iq . getFrom ( ) . toBareJID ( ) , SystemConstants . XMPP_SERVER_NAME_PREFIX ) ; RemoteFacade . getInstance ( ) . activateOrder ( senderId , order ) ; } catch ( Throwable e ) { XmppExceptionToErrorConditionTranslator . updateErrorCondition ( response , e ) ; } return response ; } +","public IQ handle ( IQ iq ) { LOGGER . debug ( String . format ( Messages . Log . RECEIVING_REMOTE_REQUEST_S , iq . getID ( ) ) ) ; String orderJsonStr = unmarshalOrder ( iq ) ; String className = unmarshalClassName ( iq ) ; IQ response = IQ . createResultIQ ( iq ) ; Gson gson = new Gson ( ) ; Order order = null ; try { order = ( Order ) gson . fromJson ( orderJsonStr , Class . forName ( className ) ) ; String senderId = IntercomponentUtil . getSender ( iq . getFrom ( ) . toBareJID ( ) , SystemConstants . XMPP_SERVER_NAME_PREFIX ) ; RemoteFacade . getInstance ( ) . activateOrder ( senderId , order ) ; } catch ( Throwable e ) { XmppExceptionToErrorConditionTranslator . updateErrorCondition ( response , e ) ; } return response ; } +" +346,"public Map < CacheKey , Long > loadAll ( Iterable < ? extends CacheKey > keys ) throws Exception { int size = Iterables . size ( keys ) ; if ( size == 0 ) { return ImmutableMap . of ( ) ; } CacheKey anyKey = stream ( keys ) . findAny ( ) . get ( ) ; if ( stream ( keys ) . anyMatch ( k -> ! k . getSchema ( ) . equals ( anyKey . getSchema ( ) ) || ! k . getTable ( ) . equals ( anyKey . getTable ( ) ) || ! k . getFamily ( ) . equals ( anyKey . getFamily ( ) ) || ! k . getQualifier ( ) . equals ( anyKey . getQualifier ( ) ) ) ) { throw new PrestoException ( FUNCTION_IMPLEMENTATION_ERROR , ""loadAll called with a non-homogeneous collection of cache keys"" ) ; } Map < Range , CacheKey > rangeToKey = stream ( keys ) . collect ( Collectors . toMap ( CacheKey :: getRange , Function . identity ( ) ) ) ; LOG . debug ( ""rangeToKey size is %s"" , rangeToKey . size ( ) ) ; String metricsTable = getMetricsTableName ( anyKey . getSchema ( ) , anyKey . getTable ( ) ) ; Text columnFamily = new Text ( getIndexColumnFamily ( anyKey . getFamily ( ) . getBytes ( UTF_8 ) , anyKey . getQualifier ( ) . getBytes ( UTF_8 ) ) . array ( ) ) ; BatchScanner scanner = connector . createBatchScanner ( metricsTable , anyKey . getAuths ( ) , 10 ) ; try { scanner . setRanges ( stream ( keys ) . map ( CacheKey :: getRange ) . collect ( Collectors . toList ( ) ) ) ; scanner . fetchColumn ( columnFamily , CARDINALITY_CQ_AS_TEXT ) ; Map < CacheKey , Long > rangeValues = new HashMap < > ( ) ; stream ( keys ) . forEach ( key -> rangeValues . put ( key , 0L ) ) ; for ( Entry < Key , Value > entry : scanner ) { rangeValues . put ( rangeToKey . get ( Range . exact ( entry . getKey ( ) . getRow ( ) ) ) , parseLong ( entry . getValue ( ) . toString ( ) ) ) ; } return rangeValues ; } finally { if ( scanner != null ) { scanner . close ( ) ; } } } +","public Map < CacheKey , Long > loadAll ( Iterable < ? extends CacheKey > keys ) throws Exception { int size = Iterables . size ( keys ) ; if ( size == 0 ) { return ImmutableMap . of ( ) ; } LOG . debug ( ""Loading %s exact ranges from Accumulo"" , size ) ; CacheKey anyKey = stream ( keys ) . findAny ( ) . get ( ) ; if ( stream ( keys ) . anyMatch ( k -> ! k . getSchema ( ) . equals ( anyKey . getSchema ( ) ) || ! k . getTable ( ) . equals ( anyKey . getTable ( ) ) || ! k . getFamily ( ) . equals ( anyKey . getFamily ( ) ) || ! k . getQualifier ( ) . equals ( anyKey . getQualifier ( ) ) ) ) { throw new PrestoException ( FUNCTION_IMPLEMENTATION_ERROR , ""loadAll called with a non-homogeneous collection of cache keys"" ) ; } Map < Range , CacheKey > rangeToKey = stream ( keys ) . collect ( Collectors . toMap ( CacheKey :: getRange , Function . identity ( ) ) ) ; LOG . debug ( ""rangeToKey size is %s"" , rangeToKey . size ( ) ) ; String metricsTable = getMetricsTableName ( anyKey . getSchema ( ) , anyKey . getTable ( ) ) ; Text columnFamily = new Text ( getIndexColumnFamily ( anyKey . getFamily ( ) . getBytes ( UTF_8 ) , anyKey . getQualifier ( ) . getBytes ( UTF_8 ) ) . array ( ) ) ; BatchScanner scanner = connector . createBatchScanner ( metricsTable , anyKey . getAuths ( ) , 10 ) ; try { scanner . setRanges ( stream ( keys ) . map ( CacheKey :: getRange ) . collect ( Collectors . toList ( ) ) ) ; scanner . fetchColumn ( columnFamily , CARDINALITY_CQ_AS_TEXT ) ; Map < CacheKey , Long > rangeValues = new HashMap < > ( ) ; stream ( keys ) . forEach ( key -> rangeValues . put ( key , 0L ) ) ; for ( Entry < Key , Value > entry : scanner ) { rangeValues . put ( rangeToKey . get ( Range . exact ( entry . getKey ( ) . getRow ( ) ) ) , parseLong ( entry . getValue ( ) . toString ( ) ) ) ; } return rangeValues ; } finally { if ( scanner != null ) { scanner . close ( ) ; } } } +" +347,"public Map < CacheKey , Long > loadAll ( Iterable < ? extends CacheKey > keys ) throws Exception { int size = Iterables . size ( keys ) ; if ( size == 0 ) { return ImmutableMap . of ( ) ; } LOG . debug ( ""Loading %s exact ranges from Accumulo"" , size ) ; CacheKey anyKey = stream ( keys ) . findAny ( ) . get ( ) ; if ( stream ( keys ) . anyMatch ( k -> ! k . getSchema ( ) . equals ( anyKey . getSchema ( ) ) || ! k . getTable ( ) . equals ( anyKey . getTable ( ) ) || ! k . getFamily ( ) . equals ( anyKey . getFamily ( ) ) || ! k . getQualifier ( ) . equals ( anyKey . getQualifier ( ) ) ) ) { throw new PrestoException ( FUNCTION_IMPLEMENTATION_ERROR , ""loadAll called with a non-homogeneous collection of cache keys"" ) ; } Map < Range , CacheKey > rangeToKey = stream ( keys ) . collect ( Collectors . toMap ( CacheKey :: getRange , Function . identity ( ) ) ) ; String metricsTable = getMetricsTableName ( anyKey . getSchema ( ) , anyKey . getTable ( ) ) ; Text columnFamily = new Text ( getIndexColumnFamily ( anyKey . getFamily ( ) . getBytes ( UTF_8 ) , anyKey . getQualifier ( ) . getBytes ( UTF_8 ) ) . array ( ) ) ; BatchScanner scanner = connector . createBatchScanner ( metricsTable , anyKey . getAuths ( ) , 10 ) ; try { scanner . setRanges ( stream ( keys ) . map ( CacheKey :: getRange ) . collect ( Collectors . toList ( ) ) ) ; scanner . fetchColumn ( columnFamily , CARDINALITY_CQ_AS_TEXT ) ; Map < CacheKey , Long > rangeValues = new HashMap < > ( ) ; stream ( keys ) . forEach ( key -> rangeValues . put ( key , 0L ) ) ; for ( Entry < Key , Value > entry : scanner ) { rangeValues . put ( rangeToKey . get ( Range . exact ( entry . getKey ( ) . getRow ( ) ) ) , parseLong ( entry . getValue ( ) . toString ( ) ) ) ; } return rangeValues ; } finally { if ( scanner != null ) { scanner . close ( ) ; } } } +","public Map < CacheKey , Long > loadAll ( Iterable < ? extends CacheKey > keys ) throws Exception { int size = Iterables . size ( keys ) ; if ( size == 0 ) { return ImmutableMap . of ( ) ; } LOG . debug ( ""Loading %s exact ranges from Accumulo"" , size ) ; CacheKey anyKey = stream ( keys ) . findAny ( ) . get ( ) ; if ( stream ( keys ) . anyMatch ( k -> ! k . getSchema ( ) . equals ( anyKey . getSchema ( ) ) || ! k . getTable ( ) . equals ( anyKey . getTable ( ) ) || ! k . getFamily ( ) . equals ( anyKey . getFamily ( ) ) || ! k . getQualifier ( ) . equals ( anyKey . getQualifier ( ) ) ) ) { throw new PrestoException ( FUNCTION_IMPLEMENTATION_ERROR , ""loadAll called with a non-homogeneous collection of cache keys"" ) ; } Map < Range , CacheKey > rangeToKey = stream ( keys ) . collect ( Collectors . toMap ( CacheKey :: getRange , Function . identity ( ) ) ) ; LOG . debug ( ""rangeToKey size is %s"" , rangeToKey . size ( ) ) ; String metricsTable = getMetricsTableName ( anyKey . getSchema ( ) , anyKey . getTable ( ) ) ; Text columnFamily = new Text ( getIndexColumnFamily ( anyKey . getFamily ( ) . getBytes ( UTF_8 ) , anyKey . getQualifier ( ) . getBytes ( UTF_8 ) ) . array ( ) ) ; BatchScanner scanner = connector . createBatchScanner ( metricsTable , anyKey . getAuths ( ) , 10 ) ; try { scanner . setRanges ( stream ( keys ) . map ( CacheKey :: getRange ) . collect ( Collectors . toList ( ) ) ) ; scanner . fetchColumn ( columnFamily , CARDINALITY_CQ_AS_TEXT ) ; Map < CacheKey , Long > rangeValues = new HashMap < > ( ) ; stream ( keys ) . forEach ( key -> rangeValues . put ( key , 0L ) ) ; for ( Entry < Key , Value > entry : scanner ) { rangeValues . put ( rangeToKey . get ( Range . exact ( entry . getKey ( ) . getRow ( ) ) ) , parseLong ( entry . getValue ( ) . toString ( ) ) ) ; } return rangeValues ; } finally { if ( scanner != null ) { scanner . close ( ) ; } } } +" +348,"public void execute ( ) { if ( ! resultInjectedFromCache ) { try { LoadPolymorphicCnAElementById command = new LoadPolymorphicCnAElementById ( new Integer [ ] { rootElmt } ) ; command = getCommandService ( ) . executeCommand ( command ) ; CnATreeElement root = command . getElements ( ) . get ( 0 ) ; List < Process > processList = new ArrayList < Process > ( 0 ) ; if ( root instanceof Organization || root instanceof Audit ) { processList . addAll ( getProcesses ( root ) ) ; } else if ( root instanceof Process ) { processList . add ( ( Process ) root ) ; } for ( Process p : processList ) { List < CnATreeElement > assets = loadLinkedProcesses ( p ) ; for ( CnATreeElement asset : assets ) { List < CnATreeElement > scenarios = loadLinkedIncidentSzenarios ( asset ) ; for ( CnATreeElement scenario : scenarios ) { if ( affectsCIA ( scenario ) ) { scenario = ( IncidentScenario ) getDaoFactory ( ) . getDAO ( IncidentScenario . TYPE_ID ) . initializeAndUnproxy ( scenario ) ; CnATreeElement parent = loadScenarioParent ( scenario ) ; if ( ! isScenarioGroupRoot ( parent , scenario ) ) { int riskColor = getRiskColour ( asset , scenario ) ; setRiskColourForParentScenarioGroup ( parent . getUuid ( ) , riskColor ) ; } } } } } generateResult ( ) ; } catch ( CommandException e ) { } } } +","public void execute ( ) { if ( ! resultInjectedFromCache ) { try { LoadPolymorphicCnAElementById command = new LoadPolymorphicCnAElementById ( new Integer [ ] { rootElmt } ) ; command = getCommandService ( ) . executeCommand ( command ) ; CnATreeElement root = command . getElements ( ) . get ( 0 ) ; List < Process > processList = new ArrayList < Process > ( 0 ) ; if ( root instanceof Organization || root instanceof Audit ) { processList . addAll ( getProcesses ( root ) ) ; } else if ( root instanceof Process ) { processList . add ( ( Process ) root ) ; } for ( Process p : processList ) { List < CnATreeElement > assets = loadLinkedProcesses ( p ) ; for ( CnATreeElement asset : assets ) { List < CnATreeElement > scenarios = loadLinkedIncidentSzenarios ( asset ) ; for ( CnATreeElement scenario : scenarios ) { if ( affectsCIA ( scenario ) ) { scenario = ( IncidentScenario ) getDaoFactory ( ) . getDAO ( IncidentScenario . TYPE_ID ) . initializeAndUnproxy ( scenario ) ; CnATreeElement parent = loadScenarioParent ( scenario ) ; if ( ! isScenarioGroupRoot ( parent , scenario ) ) { int riskColor = getRiskColour ( asset , scenario ) ; setRiskColourForParentScenarioGroup ( parent . getUuid ( ) , riskColor ) ; } } } } } generateResult ( ) ; } catch ( CommandException e ) { log . error ( ""Error while executing command"" , e ) ; } } } +" +349,"public void execute ( State state ) throws WorkflowExecutionException { TlsContext tlsContext = state . getTlsContext ( connectionAlias ) ; if ( isExecuted ( ) ) { throw new WorkflowExecutionException ( ""Action already executed!"" ) ; } messages = new LinkedList < > ( ) ; messages . add ( new WorkflowConfigurationFactory ( state . getConfig ( ) ) . createClientKeyExchangeMessage ( AlgorithmResolver . getKeyExchangeAlgorithm ( tlsContext . getChooser ( ) . getSelectedCipherSuite ( ) ) ) ) ; String sending = getReadableString ( messages ) ; if ( hasDefaultAlias ( ) ) { } else { LOGGER . info ( ""Sending DynamicKeyExchange ("" + connectionAlias + ""): "" + sending ) ; } try { MessageActionResult result = sendMessageHelper . sendMessages ( messages , records , tlsContext ) ; messages = new ArrayList < > ( result . getMessageList ( ) ) ; records = new ArrayList < > ( result . getRecordList ( ) ) ; setExecuted ( true ) ; } catch ( IOException E ) { tlsContext . setReceivedTransportHandlerException ( true ) ; LOGGER . debug ( E ) ; setExecuted ( false ) ; } } +","public void execute ( State state ) throws WorkflowExecutionException { TlsContext tlsContext = state . getTlsContext ( connectionAlias ) ; if ( isExecuted ( ) ) { throw new WorkflowExecutionException ( ""Action already executed!"" ) ; } messages = new LinkedList < > ( ) ; messages . add ( new WorkflowConfigurationFactory ( state . getConfig ( ) ) . createClientKeyExchangeMessage ( AlgorithmResolver . getKeyExchangeAlgorithm ( tlsContext . getChooser ( ) . getSelectedCipherSuite ( ) ) ) ) ; String sending = getReadableString ( messages ) ; if ( hasDefaultAlias ( ) ) { LOGGER . info ( ""Sending DynamicKeyExchange: "" + sending ) ; } else { LOGGER . info ( ""Sending DynamicKeyExchange ("" + connectionAlias + ""): "" + sending ) ; } try { MessageActionResult result = sendMessageHelper . sendMessages ( messages , records , tlsContext ) ; messages = new ArrayList < > ( result . getMessageList ( ) ) ; records = new ArrayList < > ( result . getRecordList ( ) ) ; setExecuted ( true ) ; } catch ( IOException E ) { tlsContext . setReceivedTransportHandlerException ( true ) ; LOGGER . debug ( E ) ; setExecuted ( false ) ; } } +" +350,"public void execute ( State state ) throws WorkflowExecutionException { TlsContext tlsContext = state . getTlsContext ( connectionAlias ) ; if ( isExecuted ( ) ) { throw new WorkflowExecutionException ( ""Action already executed!"" ) ; } messages = new LinkedList < > ( ) ; messages . add ( new WorkflowConfigurationFactory ( state . getConfig ( ) ) . createClientKeyExchangeMessage ( AlgorithmResolver . getKeyExchangeAlgorithm ( tlsContext . getChooser ( ) . getSelectedCipherSuite ( ) ) ) ) ; String sending = getReadableString ( messages ) ; if ( hasDefaultAlias ( ) ) { LOGGER . info ( ""Sending DynamicKeyExchange: "" + sending ) ; } else { } try { MessageActionResult result = sendMessageHelper . sendMessages ( messages , records , tlsContext ) ; messages = new ArrayList < > ( result . getMessageList ( ) ) ; records = new ArrayList < > ( result . getRecordList ( ) ) ; setExecuted ( true ) ; } catch ( IOException E ) { tlsContext . setReceivedTransportHandlerException ( true ) ; LOGGER . debug ( E ) ; setExecuted ( false ) ; } } +","public void execute ( State state ) throws WorkflowExecutionException { TlsContext tlsContext = state . getTlsContext ( connectionAlias ) ; if ( isExecuted ( ) ) { throw new WorkflowExecutionException ( ""Action already executed!"" ) ; } messages = new LinkedList < > ( ) ; messages . add ( new WorkflowConfigurationFactory ( state . getConfig ( ) ) . createClientKeyExchangeMessage ( AlgorithmResolver . getKeyExchangeAlgorithm ( tlsContext . getChooser ( ) . getSelectedCipherSuite ( ) ) ) ) ; String sending = getReadableString ( messages ) ; if ( hasDefaultAlias ( ) ) { LOGGER . info ( ""Sending DynamicKeyExchange: "" + sending ) ; } else { LOGGER . info ( ""Sending DynamicKeyExchange ("" + connectionAlias + ""): "" + sending ) ; } try { MessageActionResult result = sendMessageHelper . sendMessages ( messages , records , tlsContext ) ; messages = new ArrayList < > ( result . getMessageList ( ) ) ; records = new ArrayList < > ( result . getRecordList ( ) ) ; setExecuted ( true ) ; } catch ( IOException E ) { tlsContext . setReceivedTransportHandlerException ( true ) ; LOGGER . debug ( E ) ; setExecuted ( false ) ; } } +" +351,"public void execute ( State state ) throws WorkflowExecutionException { TlsContext tlsContext = state . getTlsContext ( connectionAlias ) ; if ( isExecuted ( ) ) { throw new WorkflowExecutionException ( ""Action already executed!"" ) ; } messages = new LinkedList < > ( ) ; messages . add ( new WorkflowConfigurationFactory ( state . getConfig ( ) ) . createClientKeyExchangeMessage ( AlgorithmResolver . getKeyExchangeAlgorithm ( tlsContext . getChooser ( ) . getSelectedCipherSuite ( ) ) ) ) ; String sending = getReadableString ( messages ) ; if ( hasDefaultAlias ( ) ) { LOGGER . info ( ""Sending DynamicKeyExchange: "" + sending ) ; } else { LOGGER . info ( ""Sending DynamicKeyExchange ("" + connectionAlias + ""): "" + sending ) ; } try { MessageActionResult result = sendMessageHelper . sendMessages ( messages , records , tlsContext ) ; messages = new ArrayList < > ( result . getMessageList ( ) ) ; records = new ArrayList < > ( result . getRecordList ( ) ) ; setExecuted ( true ) ; } catch ( IOException E ) { tlsContext . setReceivedTransportHandlerException ( true ) ; setExecuted ( false ) ; } } +","public void execute ( State state ) throws WorkflowExecutionException { TlsContext tlsContext = state . getTlsContext ( connectionAlias ) ; if ( isExecuted ( ) ) { throw new WorkflowExecutionException ( ""Action already executed!"" ) ; } messages = new LinkedList < > ( ) ; messages . add ( new WorkflowConfigurationFactory ( state . getConfig ( ) ) . createClientKeyExchangeMessage ( AlgorithmResolver . getKeyExchangeAlgorithm ( tlsContext . getChooser ( ) . getSelectedCipherSuite ( ) ) ) ) ; String sending = getReadableString ( messages ) ; if ( hasDefaultAlias ( ) ) { LOGGER . info ( ""Sending DynamicKeyExchange: "" + sending ) ; } else { LOGGER . info ( ""Sending DynamicKeyExchange ("" + connectionAlias + ""): "" + sending ) ; } try { MessageActionResult result = sendMessageHelper . sendMessages ( messages , records , tlsContext ) ; messages = new ArrayList < > ( result . getMessageList ( ) ) ; records = new ArrayList < > ( result . getRecordList ( ) ) ; setExecuted ( true ) ; } catch ( IOException E ) { tlsContext . setReceivedTransportHandlerException ( true ) ; LOGGER . debug ( E ) ; setExecuted ( false ) ; } } +" +352,"protected void rollback ( ) { try { connection . rollback ( ) ; } catch ( Exception e ) { } } +","protected void rollback ( ) { try { connection . rollback ( ) ; } catch ( Exception e ) { log . warn ( ""An error occurred while trying to rollback a connection"" , e ) ; } } +" +353,"public void run ( ) { try { switch ( request . getCommandCode ( ) ) { case RxAARequest . code : handleEvent ( new Event ( true , factory . createAARequest ( request ) , null ) ) ; break ; case RxSessionTermRequest . code : handleEvent ( new Event ( true , factory . createSessionTermRequest ( request ) , null ) ) ; break ; default : listener . doOtherEvent ( session , new AppRequestEventImpl ( request ) , null ) ; break ; } } catch ( Exception e ) { } } +","public void run ( ) { try { switch ( request . getCommandCode ( ) ) { case RxAARequest . code : handleEvent ( new Event ( true , factory . createAARequest ( request ) , null ) ) ; break ; case RxSessionTermRequest . code : handleEvent ( new Event ( true , factory . createSessionTermRequest ( request ) , null ) ) ; break ; default : listener . doOtherEvent ( session , new AppRequestEventImpl ( request ) , null ) ; break ; } } catch ( Exception e ) { logger . debug ( ""Failed to process request message"" , e ) ; } } +" +354,"private void addToNode ( Node node , List < WMSLayerState > layerStates ) { for ( WMSLayerState layerState : layerStates ) { try { XMLUtilities . marshalJAXBObjectToElement ( layerState , node ) ; } catch ( JAXBException e ) { } } } +","private void addToNode ( Node node , List < WMSLayerState > layerStates ) { for ( WMSLayerState layerState : layerStates ) { try { XMLUtilities . marshalJAXBObjectToElement ( layerState , node ) ; } catch ( JAXBException e ) { LOGGER . error ( e . getMessage ( ) , e ) ; } } } +" +355,"public void abort ( String why , Throwable e ) { this . aborted = true ; stop ( why ) ; } +","public void abort ( String why , Throwable e ) { LOG . warn ( ""Aborting ReplicationHFileCleaner because "" + why , e ) ; this . aborted = true ; stop ( why ) ; } +" +356,"private void logCorruptedInput ( ) { if ( currentFile != null ) { try { logManager . logRecord ( currentFile . getAbsolutePath ( ) , ""Corrupted input file"" ) ; } catch ( IOException e ) { } LOGGER . log ( Level . WARN , ""Corrupted input file: "" + currentFile . getAbsolutePath ( ) ) ; } } +","private void logCorruptedInput ( ) { if ( currentFile != null ) { try { logManager . logRecord ( currentFile . getAbsolutePath ( ) , ""Corrupted input file"" ) ; } catch ( IOException e ) { LOGGER . log ( Level . WARN , ""Filed to write to feed log file"" , e ) ; } LOGGER . log ( Level . WARN , ""Corrupted input file: "" + currentFile . getAbsolutePath ( ) ) ; } } +" +357,"private void logCorruptedInput ( ) { if ( currentFile != null ) { try { logManager . logRecord ( currentFile . getAbsolutePath ( ) , ""Corrupted input file"" ) ; } catch ( IOException e ) { LOGGER . log ( Level . WARN , ""Filed to write to feed log file"" , e ) ; } } } +","private void logCorruptedInput ( ) { if ( currentFile != null ) { try { logManager . logRecord ( currentFile . getAbsolutePath ( ) , ""Corrupted input file"" ) ; } catch ( IOException e ) { LOGGER . log ( Level . WARN , ""Filed to write to feed log file"" , e ) ; } LOGGER . log ( Level . WARN , ""Corrupted input file: "" + currentFile . getAbsolutePath ( ) ) ; } } +" +358,"public static void initCredentialsForCluster ( Job job , Configuration conf ) throws IOException { UserProvider userProvider = UserProvider . instantiate ( job . getConfiguration ( ) ) ; if ( userProvider . isHBaseSecurityEnabled ( ) ) { try { Connection peerConn = ConnectionFactory . createConnection ( conf ) ; try { TokenUtil . addTokenForJob ( peerConn , userProvider . getCurrent ( ) , job ) ; } finally { peerConn . close ( ) ; } } catch ( InterruptedException e ) { Thread . interrupted ( ) ; } } } +","public static void initCredentialsForCluster ( Job job , Configuration conf ) throws IOException { UserProvider userProvider = UserProvider . instantiate ( job . getConfiguration ( ) ) ; if ( userProvider . isHBaseSecurityEnabled ( ) ) { try { Connection peerConn = ConnectionFactory . createConnection ( conf ) ; try { TokenUtil . addTokenForJob ( peerConn , userProvider . getCurrent ( ) , job ) ; } finally { peerConn . close ( ) ; } } catch ( InterruptedException e ) { LOG . info ( ""Interrupted obtaining user authentication token"" ) ; Thread . interrupted ( ) ; } } } +" +359,"public List < Map < String , String > > getInfectedItems ( String infectedJcrNodePath ) { String infectedJcrNodeIdentifier = getJcrNodeIdentifier ( infectedJcrNodePath ) ; String jcrNodeWorkspace = getJcrNodeWorkspace ( infectedJcrNodePath ) ; List < Map < String , String > > infectedNodes = new ArrayList < Map < String , String > > ( ) ; long startTime = System . currentTimeMillis ( ) ; try { ExtendedSession session = ( ExtendedSession ) WCMCoreUtils . getSystemSessionProvider ( ) . getSession ( jcrNodeWorkspace , repositoryService . getCurrentRepository ( ) ) ; Property infectedJcrNodeProperty = session . getPropertyByIdentifier ( infectedJcrNodeIdentifier ) ; if ( infectedJcrNodeProperty . getName ( ) . equals ( NodetypeConstant . JCR_DATA ) ) { Node infectedJcrContentNode = infectedJcrNodeProperty . getParent ( ) ; Node infectedJcrNode = infectedJcrContentNode . getParent ( ) ; Map < String , String > infectedNode = new HashMap < String , String > ( ) ; infectedNode . put ( INFECTED_ITEM_ID , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) ) ; infectedNode . put ( INFECTED_JCR_NODE_WORKSPACE , jcrNodeWorkspace ) ; infectedNode . put ( INFECTED_ITEM_NAME , infectedJcrNode . getName ( ) ) ; infectedNode . put ( INFECTED_ITEM_LAST_MODIFIER , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) ) ; infectedNodes . add ( infectedNode ) ; long endTime = System . currentTimeMillis ( ) ; } } catch ( ItemNotFoundException e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . warn ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""The jcr node may be already cleaned by a previous malware detection operation:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime ) ; } catch ( Exception e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . error ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""Error when trying to get the infected jcr node information:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime , e ) ; } return infectedNodes ; } +","public List < Map < String , String > > getInfectedItems ( String infectedJcrNodePath ) { String infectedJcrNodeIdentifier = getJcrNodeIdentifier ( infectedJcrNodePath ) ; String jcrNodeWorkspace = getJcrNodeWorkspace ( infectedJcrNodePath ) ; List < Map < String , String > > infectedNodes = new ArrayList < Map < String , String > > ( ) ; long startTime = System . currentTimeMillis ( ) ; try { ExtendedSession session = ( ExtendedSession ) WCMCoreUtils . getSystemSessionProvider ( ) . getSession ( jcrNodeWorkspace , repositoryService . getCurrentRepository ( ) ) ; Property infectedJcrNodeProperty = session . getPropertyByIdentifier ( infectedJcrNodeIdentifier ) ; if ( infectedJcrNodeProperty . getName ( ) . equals ( NodetypeConstant . JCR_DATA ) ) { Node infectedJcrContentNode = infectedJcrNodeProperty . getParent ( ) ; Node infectedJcrNode = infectedJcrContentNode . getParent ( ) ; Map < String , String > infectedNode = new HashMap < String , String > ( ) ; infectedNode . put ( INFECTED_ITEM_ID , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) ) ; infectedNode . put ( INFECTED_JCR_NODE_WORKSPACE , jcrNodeWorkspace ) ; infectedNode . put ( INFECTED_ITEM_NAME , infectedJcrNode . getName ( ) ) ; infectedNode . put ( INFECTED_ITEM_LAST_MODIFIER , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) ) ; infectedNodes . add ( infectedNode ) ; long endTime = System . currentTimeMillis ( ) ; LOGGER . info ( ""service={} operation={} parameters=\""jcrNodeIdentifier:{}\"" \""jcrNodeName:{}\"" \""jcrNodeLastModifier:{}\"" status=ok "" + ""duration_ms={}"" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) , infectedJcrNode . getName ( ) , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) , endTime - startTime ) ; } } catch ( ItemNotFoundException e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . warn ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""The jcr node may be already cleaned by a previous malware detection operation:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime ) ; } catch ( Exception e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . error ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""Error when trying to get the infected jcr node information:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime , e ) ; } return infectedNodes ; } +" +360,"public List < Map < String , String > > getInfectedItems ( String infectedJcrNodePath ) { String infectedJcrNodeIdentifier = getJcrNodeIdentifier ( infectedJcrNodePath ) ; String jcrNodeWorkspace = getJcrNodeWorkspace ( infectedJcrNodePath ) ; List < Map < String , String > > infectedNodes = new ArrayList < Map < String , String > > ( ) ; long startTime = System . currentTimeMillis ( ) ; try { ExtendedSession session = ( ExtendedSession ) WCMCoreUtils . getSystemSessionProvider ( ) . getSession ( jcrNodeWorkspace , repositoryService . getCurrentRepository ( ) ) ; Property infectedJcrNodeProperty = session . getPropertyByIdentifier ( infectedJcrNodeIdentifier ) ; if ( infectedJcrNodeProperty . getName ( ) . equals ( NodetypeConstant . JCR_DATA ) ) { Node infectedJcrContentNode = infectedJcrNodeProperty . getParent ( ) ; Node infectedJcrNode = infectedJcrContentNode . getParent ( ) ; Map < String , String > infectedNode = new HashMap < String , String > ( ) ; infectedNode . put ( INFECTED_ITEM_ID , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) ) ; infectedNode . put ( INFECTED_JCR_NODE_WORKSPACE , jcrNodeWorkspace ) ; infectedNode . put ( INFECTED_ITEM_NAME , infectedJcrNode . getName ( ) ) ; infectedNode . put ( INFECTED_ITEM_LAST_MODIFIER , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) ) ; infectedNodes . add ( infectedNode ) ; long endTime = System . currentTimeMillis ( ) ; LOGGER . info ( ""service={} operation={} parameters=\""jcrNodeIdentifier:{}\"" \""jcrNodeName:{}\"" \""jcrNodeLastModifier:{}\"" status=ok "" + ""duration_ms={}"" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) , infectedJcrNode . getName ( ) , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) , endTime - startTime ) ; } } catch ( ItemNotFoundException e ) { long endTime = System . currentTimeMillis ( ) ; } catch ( Exception e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . error ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""Error when trying to get the infected jcr node information:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime , e ) ; } return infectedNodes ; } +","public List < Map < String , String > > getInfectedItems ( String infectedJcrNodePath ) { String infectedJcrNodeIdentifier = getJcrNodeIdentifier ( infectedJcrNodePath ) ; String jcrNodeWorkspace = getJcrNodeWorkspace ( infectedJcrNodePath ) ; List < Map < String , String > > infectedNodes = new ArrayList < Map < String , String > > ( ) ; long startTime = System . currentTimeMillis ( ) ; try { ExtendedSession session = ( ExtendedSession ) WCMCoreUtils . getSystemSessionProvider ( ) . getSession ( jcrNodeWorkspace , repositoryService . getCurrentRepository ( ) ) ; Property infectedJcrNodeProperty = session . getPropertyByIdentifier ( infectedJcrNodeIdentifier ) ; if ( infectedJcrNodeProperty . getName ( ) . equals ( NodetypeConstant . JCR_DATA ) ) { Node infectedJcrContentNode = infectedJcrNodeProperty . getParent ( ) ; Node infectedJcrNode = infectedJcrContentNode . getParent ( ) ; Map < String , String > infectedNode = new HashMap < String , String > ( ) ; infectedNode . put ( INFECTED_ITEM_ID , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) ) ; infectedNode . put ( INFECTED_JCR_NODE_WORKSPACE , jcrNodeWorkspace ) ; infectedNode . put ( INFECTED_ITEM_NAME , infectedJcrNode . getName ( ) ) ; infectedNode . put ( INFECTED_ITEM_LAST_MODIFIER , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) ) ; infectedNodes . add ( infectedNode ) ; long endTime = System . currentTimeMillis ( ) ; LOGGER . info ( ""service={} operation={} parameters=\""jcrNodeIdentifier:{}\"" \""jcrNodeName:{}\"" \""jcrNodeLastModifier:{}\"" status=ok "" + ""duration_ms={}"" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) , infectedJcrNode . getName ( ) , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) , endTime - startTime ) ; } } catch ( ItemNotFoundException e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . warn ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""The jcr node may be already cleaned by a previous malware detection operation:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime ) ; } catch ( Exception e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . error ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""Error when trying to get the infected jcr node information:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime , e ) ; } return infectedNodes ; } +" +361,"public List < Map < String , String > > getInfectedItems ( String infectedJcrNodePath ) { String infectedJcrNodeIdentifier = getJcrNodeIdentifier ( infectedJcrNodePath ) ; String jcrNodeWorkspace = getJcrNodeWorkspace ( infectedJcrNodePath ) ; List < Map < String , String > > infectedNodes = new ArrayList < Map < String , String > > ( ) ; long startTime = System . currentTimeMillis ( ) ; try { ExtendedSession session = ( ExtendedSession ) WCMCoreUtils . getSystemSessionProvider ( ) . getSession ( jcrNodeWorkspace , repositoryService . getCurrentRepository ( ) ) ; Property infectedJcrNodeProperty = session . getPropertyByIdentifier ( infectedJcrNodeIdentifier ) ; if ( infectedJcrNodeProperty . getName ( ) . equals ( NodetypeConstant . JCR_DATA ) ) { Node infectedJcrContentNode = infectedJcrNodeProperty . getParent ( ) ; Node infectedJcrNode = infectedJcrContentNode . getParent ( ) ; Map < String , String > infectedNode = new HashMap < String , String > ( ) ; infectedNode . put ( INFECTED_ITEM_ID , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) ) ; infectedNode . put ( INFECTED_JCR_NODE_WORKSPACE , jcrNodeWorkspace ) ; infectedNode . put ( INFECTED_ITEM_NAME , infectedJcrNode . getName ( ) ) ; infectedNode . put ( INFECTED_ITEM_LAST_MODIFIER , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) ) ; infectedNodes . add ( infectedNode ) ; long endTime = System . currentTimeMillis ( ) ; LOGGER . info ( ""service={} operation={} parameters=\""jcrNodeIdentifier:{}\"" \""jcrNodeName:{}\"" \""jcrNodeLastModifier:{}\"" status=ok "" + ""duration_ms={}"" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) , infectedJcrNode . getName ( ) , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) , endTime - startTime ) ; } } catch ( ItemNotFoundException e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . warn ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""The jcr node may be already cleaned by a previous malware detection operation:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime ) ; } catch ( Exception e ) { long endTime = System . currentTimeMillis ( ) ; } return infectedNodes ; } +","public List < Map < String , String > > getInfectedItems ( String infectedJcrNodePath ) { String infectedJcrNodeIdentifier = getJcrNodeIdentifier ( infectedJcrNodePath ) ; String jcrNodeWorkspace = getJcrNodeWorkspace ( infectedJcrNodePath ) ; List < Map < String , String > > infectedNodes = new ArrayList < Map < String , String > > ( ) ; long startTime = System . currentTimeMillis ( ) ; try { ExtendedSession session = ( ExtendedSession ) WCMCoreUtils . getSystemSessionProvider ( ) . getSession ( jcrNodeWorkspace , repositoryService . getCurrentRepository ( ) ) ; Property infectedJcrNodeProperty = session . getPropertyByIdentifier ( infectedJcrNodeIdentifier ) ; if ( infectedJcrNodeProperty . getName ( ) . equals ( NodetypeConstant . JCR_DATA ) ) { Node infectedJcrContentNode = infectedJcrNodeProperty . getParent ( ) ; Node infectedJcrNode = infectedJcrContentNode . getParent ( ) ; Map < String , String > infectedNode = new HashMap < String , String > ( ) ; infectedNode . put ( INFECTED_ITEM_ID , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) ) ; infectedNode . put ( INFECTED_JCR_NODE_WORKSPACE , jcrNodeWorkspace ) ; infectedNode . put ( INFECTED_ITEM_NAME , infectedJcrNode . getName ( ) ) ; infectedNode . put ( INFECTED_ITEM_LAST_MODIFIER , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) ) ; infectedNodes . add ( infectedNode ) ; long endTime = System . currentTimeMillis ( ) ; LOGGER . info ( ""service={} operation={} parameters=\""jcrNodeIdentifier:{}\"" \""jcrNodeName:{}\"" \""jcrNodeLastModifier:{}\"" status=ok "" + ""duration_ms={}"" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , ( ( NodeImpl ) infectedJcrNode ) . getIdentifier ( ) , infectedJcrNode . getName ( ) , infectedJcrNode . getProperty ( NodetypeConstant . EXO_LAST_MODIFIER ) . getString ( ) , endTime - startTime ) ; } } catch ( ItemNotFoundException e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . warn ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""The jcr node may be already cleaned by a previous malware detection operation:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime ) ; } catch ( Exception e ) { long endTime = System . currentTimeMillis ( ) ; LOGGER . error ( ""service={} operation={} parameters=\""infectedJcrNodePath:{}\"" status=ko duration_ms={} error_msg=\""Error when trying to get the infected jcr node information:{}\"""" , MALWARE_DETECTION_FEATURE , MALWARE_INFECTED_JCR_NODE_DETECTION , infectedJcrNodePath , endTime - startTime , e ) ; } return infectedNodes ; } +" +362,"public String getUrlLink ( final Map < String , Object > document ) { final FessConfig fessConfig = ComponentUtil . getFessConfig ( ) ; String url = DocumentUtil . getValue ( document , fessConfig . getIndexFieldUrl ( ) , String . class ) ; if ( StringUtil . isBlank ( url ) ) { return ""#not-found-"" + DocumentUtil . getValue ( document , fessConfig . getIndexFieldDocId ( ) , String . class ) ; } final boolean isSmbUrl = url . startsWith ( ""smb:"" ) || url . startsWith ( ""smb1:"" ) ; final boolean isFtpUrl = url . startsWith ( ""ftp:"" ) ; final boolean isSmbOrFtpUrl = isSmbUrl || isFtpUrl ; url = ComponentUtil . getPathMappingHelper ( ) . replaceUrl ( url ) ; final boolean isHttpUrl = url . startsWith ( ""http:"" ) || url . startsWith ( ""https:"" ) ; if ( isSmbUrl ) { url = url . replace ( ""smb:"" , ""file:"" ) ; url = url . replace ( ""smb1:"" , ""file:"" ) ; } if ( isHttpUrl && isSmbOrFtpUrl ) { final StringBuilder buf = new StringBuilder ( url . length ( ) + 100 ) ; for ( final char c : url . toCharArray ( ) ) { if ( CharUtil . isUrlChar ( c ) ) { buf . append ( c ) ; } else { try { buf . append ( URLEncoder . encode ( String . valueOf ( c ) , urlLinkEncoding ) ) ; } catch ( final UnsupportedEncodingException e ) { buf . append ( c ) ; } } } url = buf . toString ( ) ; } else if ( url . startsWith ( ""file:"" ) ) { url = updateFileProtocol ( url ) ; if ( encodeUrlLink ) { return appendQueryParameter ( document , url ) ; } if ( ! isSmbOrFtpUrl ) { try { url = URLDecoder . decode ( url . replace ( ""+"" , ""%2B"" ) , urlLinkEncoding ) ; } catch ( final Exception e ) { if ( logger . isDebugEnabled ( ) ) { } } } } return appendQueryParameter ( document , url ) ; } +","public String getUrlLink ( final Map < String , Object > document ) { final FessConfig fessConfig = ComponentUtil . getFessConfig ( ) ; String url = DocumentUtil . getValue ( document , fessConfig . getIndexFieldUrl ( ) , String . class ) ; if ( StringUtil . isBlank ( url ) ) { return ""#not-found-"" + DocumentUtil . getValue ( document , fessConfig . getIndexFieldDocId ( ) , String . class ) ; } final boolean isSmbUrl = url . startsWith ( ""smb:"" ) || url . startsWith ( ""smb1:"" ) ; final boolean isFtpUrl = url . startsWith ( ""ftp:"" ) ; final boolean isSmbOrFtpUrl = isSmbUrl || isFtpUrl ; url = ComponentUtil . getPathMappingHelper ( ) . replaceUrl ( url ) ; final boolean isHttpUrl = url . startsWith ( ""http:"" ) || url . startsWith ( ""https:"" ) ; if ( isSmbUrl ) { url = url . replace ( ""smb:"" , ""file:"" ) ; url = url . replace ( ""smb1:"" , ""file:"" ) ; } if ( isHttpUrl && isSmbOrFtpUrl ) { final StringBuilder buf = new StringBuilder ( url . length ( ) + 100 ) ; for ( final char c : url . toCharArray ( ) ) { if ( CharUtil . isUrlChar ( c ) ) { buf . append ( c ) ; } else { try { buf . append ( URLEncoder . encode ( String . valueOf ( c ) , urlLinkEncoding ) ) ; } catch ( final UnsupportedEncodingException e ) { buf . append ( c ) ; } } } url = buf . toString ( ) ; } else if ( url . startsWith ( ""file:"" ) ) { url = updateFileProtocol ( url ) ; if ( encodeUrlLink ) { return appendQueryParameter ( document , url ) ; } if ( ! isSmbOrFtpUrl ) { try { url = URLDecoder . decode ( url . replace ( ""+"" , ""%2B"" ) , urlLinkEncoding ) ; } catch ( final Exception e ) { if ( logger . isDebugEnabled ( ) ) { logger . warn ( ""Failed to decode {}"" , url , e ) ; } } } } return appendQueryParameter ( document , url ) ; } +" +363,"public void setEnabled ( boolean enabled ) throws Exception { if ( enabled ) { List < Driver > enabledDrivers = new ArrayList < > ( ) ; try { for ( Driver driver : getDrivers ( ) ) { driver . setEnabled ( true ) ; enabledDrivers . add ( driver ) ; } this . enabled = true ; } catch ( Exception e ) { Collections . reverse ( enabledDrivers ) ; for ( Driver driver : enabledDrivers ) { try { driver . setEnabled ( false ) ; } catch ( Exception e1 ) { Logger . warn ( e1 ) ; } } fireMachineEnableFailed ( e . getMessage ( ) ) ; throw e ; } fireMachineEnabled ( ) ; } else { this . setHomed ( false ) ; fireMachineAboutToBeDisabled ( ""User requested stop."" ) ; Exception e = null ; List < Driver > enabledDrivers = new ArrayList < > ( ) ; enabledDrivers . addAll ( getDrivers ( ) ) ; Collections . reverse ( enabledDrivers ) ; for ( Driver driver : enabledDrivers ) { try { driver . setEnabled ( false ) ; } catch ( Exception e1 ) { Logger . warn ( e1 ) ; e = e1 ; } } this . enabled = false ; if ( e != null ) { fireMachineDisableFailed ( e . getMessage ( ) ) ; throw e ; } fireMachineDisabled ( ""User requested stop."" ) ; } } +","public void setEnabled ( boolean enabled ) throws Exception { Logger . debug ( ""setEnabled({})"" , enabled ) ; if ( enabled ) { List < Driver > enabledDrivers = new ArrayList < > ( ) ; try { for ( Driver driver : getDrivers ( ) ) { driver . setEnabled ( true ) ; enabledDrivers . add ( driver ) ; } this . enabled = true ; } catch ( Exception e ) { Collections . reverse ( enabledDrivers ) ; for ( Driver driver : enabledDrivers ) { try { driver . setEnabled ( false ) ; } catch ( Exception e1 ) { Logger . warn ( e1 ) ; } } fireMachineEnableFailed ( e . getMessage ( ) ) ; throw e ; } fireMachineEnabled ( ) ; } else { this . setHomed ( false ) ; fireMachineAboutToBeDisabled ( ""User requested stop."" ) ; Exception e = null ; List < Driver > enabledDrivers = new ArrayList < > ( ) ; enabledDrivers . addAll ( getDrivers ( ) ) ; Collections . reverse ( enabledDrivers ) ; for ( Driver driver : enabledDrivers ) { try { driver . setEnabled ( false ) ; } catch ( Exception e1 ) { Logger . warn ( e1 ) ; e = e1 ; } } this . enabled = false ; if ( e != null ) { fireMachineDisableFailed ( e . getMessage ( ) ) ; throw e ; } fireMachineDisabled ( ""User requested stop."" ) ; } } +" +364,"public void onLogoutSuccess ( HttpServletRequest request , HttpServletResponse response , Authentication authentication ) throws IOException , ServletException { if ( authentication != null ) { userDirectoryService . invalidate ( authentication . getName ( ) ) ; } else { logger . trace ( ""Logout after session expiration"" ) ; } super . onLogoutSuccess ( request , response , authentication ) ; } +","public void onLogoutSuccess ( HttpServletRequest request , HttpServletResponse response , Authentication authentication ) throws IOException , ServletException { if ( authentication != null ) { userDirectoryService . invalidate ( authentication . getName ( ) ) ; logger . trace ( ""Logging out user {} ..."" , authentication . getName ( ) ) ; } else { logger . trace ( ""Logout after session expiration"" ) ; } super . onLogoutSuccess ( request , response , authentication ) ; } +" +365,"public void onLogoutSuccess ( HttpServletRequest request , HttpServletResponse response , Authentication authentication ) throws IOException , ServletException { if ( authentication != null ) { userDirectoryService . invalidate ( authentication . getName ( ) ) ; logger . trace ( ""Logging out user {} ..."" , authentication . getName ( ) ) ; } else { } super . onLogoutSuccess ( request , response , authentication ) ; } +","public void onLogoutSuccess ( HttpServletRequest request , HttpServletResponse response , Authentication authentication ) throws IOException , ServletException { if ( authentication != null ) { userDirectoryService . invalidate ( authentication . getName ( ) ) ; logger . trace ( ""Logging out user {} ..."" , authentication . getName ( ) ) ; } else { logger . trace ( ""Logout after session expiration"" ) ; } super . onLogoutSuccess ( request , response , authentication ) ; } +" +366,"public String getValueFromServer ( String url ) throws Exception { RemoteUrl remoteUrl = new RemoteUrl ( url , hostList ) ; ValueVo confItemVo = restfulMgr . getJsonData ( ValueVo . class , remoteUrl , retryTime , retrySleepSeconds ) ; if ( confItemVo . getStatus ( ) . equals ( Constants . NOTOK ) ) { throw new Exception ( ""status is not ok."" ) ; } return confItemVo . getValue ( ) ; } +","public String getValueFromServer ( String url ) throws Exception { RemoteUrl remoteUrl = new RemoteUrl ( url , hostList ) ; ValueVo confItemVo = restfulMgr . getJsonData ( ValueVo . class , remoteUrl , retryTime , retrySleepSeconds ) ; LOGGER . debug ( ""remote server return: "" + confItemVo . toString ( ) ) ; if ( confItemVo . getStatus ( ) . equals ( Constants . NOTOK ) ) { throw new Exception ( ""status is not ok."" ) ; } return confItemVo . getValue ( ) ; } +" +367,"private void findConfigurationsInsideVfModule ( DelegateExecution execution , String vnfId , String vfModuleId , List < Resource > resourceList , List < Pair < WorkflowType , String > > aaiResourceIds ) { try { org . onap . aai . domain . yang . VfModule aaiVfModule = bbInputSetupUtils . getAAIVfModule ( vnfId , vfModuleId ) ; AAIResultWrapper vfModuleWrapper = new AAIResultWrapper ( new AAICommonObjectMapperProvider ( ) . getMapper ( ) . writeValueAsString ( aaiVfModule ) ) ; Optional < Relationships > relationshipsOp ; relationshipsOp = vfModuleWrapper . getRelationships ( ) ; if ( relationshipsOp . isPresent ( ) ) { relationshipsOp = workflowActionUtils . extractRelationshipsVnfc ( relationshipsOp . get ( ) ) ; addConfigToResources ( relationshipsOp , resourceList , aaiResourceIds ) ; } } catch ( Exception ex ) { buildAndThrowException ( execution , ""Failed to find Configuration object from the vfModule."" ) ; } } +","private void findConfigurationsInsideVfModule ( DelegateExecution execution , String vnfId , String vfModuleId , List < Resource > resourceList , List < Pair < WorkflowType , String > > aaiResourceIds ) { try { org . onap . aai . domain . yang . VfModule aaiVfModule = bbInputSetupUtils . getAAIVfModule ( vnfId , vfModuleId ) ; AAIResultWrapper vfModuleWrapper = new AAIResultWrapper ( new AAICommonObjectMapperProvider ( ) . getMapper ( ) . writeValueAsString ( aaiVfModule ) ) ; Optional < Relationships > relationshipsOp ; relationshipsOp = vfModuleWrapper . getRelationships ( ) ; if ( relationshipsOp . isPresent ( ) ) { relationshipsOp = workflowActionUtils . extractRelationshipsVnfc ( relationshipsOp . get ( ) ) ; addConfigToResources ( relationshipsOp , resourceList , aaiResourceIds ) ; } } catch ( Exception ex ) { logger . error ( ""Exception in findConfigurationsInsideVfModule"" , ex ) ; buildAndThrowException ( execution , ""Failed to find Configuration object from the vfModule."" ) ; } } +" +368,"public List < Long > getRunningSecStorageVmListByMsid ( SecondaryStorageVm . Role role , long msid ) { List < Long > l = new ArrayList < Long > ( ) ; TransactionLegacy txn = TransactionLegacy . currentTxn ( ) ; ; PreparedStatement pstmt = null ; try { String sql ; if ( role == null ) { sql = ""SELECT s.id FROM secondary_storage_vm s, vm_instance v, host h "" + ""WHERE s.id=v.id AND v.state='Running' AND v.host_id=h.id AND h.mgmt_server_id=?"" ; } else { sql = ""SELECT s.id FROM secondary_storage_vm s, vm_instance v, host h "" + ""WHERE s.id=v.id AND v.state='Running' AND s.role=? AND v.host_id=h.id AND h.mgmt_server_id=?"" ; } pstmt = txn . prepareAutoCloseStatement ( sql ) ; if ( role == null ) { pstmt . setLong ( 1 , msid ) ; } else { pstmt . setString ( 1 , role . toString ( ) ) ; pstmt . setLong ( 2 , msid ) ; } ResultSet rs = pstmt . executeQuery ( ) ; while ( rs . next ( ) ) { l . add ( rs . getLong ( 1 ) ) ; } } catch ( SQLException e ) { } return l ; } +","public List < Long > getRunningSecStorageVmListByMsid ( SecondaryStorageVm . Role role , long msid ) { List < Long > l = new ArrayList < Long > ( ) ; TransactionLegacy txn = TransactionLegacy . currentTxn ( ) ; ; PreparedStatement pstmt = null ; try { String sql ; if ( role == null ) { sql = ""SELECT s.id FROM secondary_storage_vm s, vm_instance v, host h "" + ""WHERE s.id=v.id AND v.state='Running' AND v.host_id=h.id AND h.mgmt_server_id=?"" ; } else { sql = ""SELECT s.id FROM secondary_storage_vm s, vm_instance v, host h "" + ""WHERE s.id=v.id AND v.state='Running' AND s.role=? AND v.host_id=h.id AND h.mgmt_server_id=?"" ; } pstmt = txn . prepareAutoCloseStatement ( sql ) ; if ( role == null ) { pstmt . setLong ( 1 , msid ) ; } else { pstmt . setString ( 1 , role . toString ( ) ) ; pstmt . setLong ( 2 , msid ) ; } ResultSet rs = pstmt . executeQuery ( ) ; while ( rs . next ( ) ) { l . add ( rs . getLong ( 1 ) ) ; } } catch ( SQLException e ) { s_logger . debug ( ""Caught SQLException: "" , e ) ; } return l ; } +" +369,"public void shutDown ( ) { if ( initialized . compareAndSet ( true , false ) ) { stop ( ) ; configuration . removeChangeListener ( this ) ; instrumentation . removeTransformer ( this ) ; Thread t = transformThread ; transformThread = null ; if ( t != null && ! t . isInterrupted ( ) ) { t . interrupt ( ) ; } cleanUp . set ( true ) ; try { instrumentApplication ( ) ; } catch ( Throwable e ) { } } } +","public void shutDown ( ) { if ( initialized . compareAndSet ( true , false ) ) { stop ( ) ; configuration . removeChangeListener ( this ) ; instrumentation . removeTransformer ( this ) ; Thread t = transformThread ; transformThread = null ; if ( t != null && ! t . isInterrupted ( ) ) { t . interrupt ( ) ; } cleanUp . set ( true ) ; try { instrumentApplication ( ) ; } catch ( Throwable e ) { LOG . warn ( ""Failed to shutdown due "" + e . getMessage ( ) + "". This exception is ignored."" , e ) ; } } } +" +370,"protected < T extends Comparable < T > > void assertSorted ( String message , Iterable < T > list , boolean reverse ) { if ( ! reverse ) { assertTrue ( Ordering . natural ( ) . isOrdered ( list ) , message ) ; } else { assertTrue ( Ordering . natural ( ) . reverse ( ) . isOrdered ( list ) , message ) ; } } +","protected < T extends Comparable < T > > void assertSorted ( String message , Iterable < T > list , boolean reverse ) { LOGGER . info ( ""Assert sort reverse: "" + reverse ) ; if ( ! reverse ) { assertTrue ( Ordering . natural ( ) . isOrdered ( list ) , message ) ; } else { assertTrue ( Ordering . natural ( ) . reverse ( ) . isOrdered ( list ) , message ) ; } } +" +371,"private static SSLContext createEasySSLContext ( ) { try { SSLContext context = SSLContext . getInstance ( ""TLS"" ) ; context . init ( null , new TrustManager [ ] { new DefaultX509TrustManager ( null ) } , null ) ; return context ; } catch ( Exception e ) { throw new HttpClientError ( e . toString ( ) ) ; } } +","private static SSLContext createEasySSLContext ( ) { try { SSLContext context = SSLContext . getInstance ( ""TLS"" ) ; context . init ( null , new TrustManager [ ] { new DefaultX509TrustManager ( null ) } , null ) ; return context ; } catch ( Exception e ) { LOG . error ( e . getMessage ( ) , e ) ; throw new HttpClientError ( e . toString ( ) ) ; } } +" +372,"public long length ( ) { try { return fs . getFileStatus ( hdfsPath ) . getLen ( ) ; } catch ( IOException e ) { return 0 ; } } +","public long length ( ) { try { return fs . getFileStatus ( hdfsPath ) . getLen ( ) ; } catch ( IOException e ) { logger . error ( ""Fail to get length of the file {}, "" , hdfsPath . toUri ( ) , e ) ; return 0 ; } } +" +373,"public String execute ( ) { myValidate ( ) ; if ( ! hasActionErrors ( ) ) { try { getBean ( ) . copyTo ( pingTarget ) ; PingTargetManager pingTargetMgr = WebloggerFactory . getWeblogger ( ) . getPingTargetManager ( ) ; pingTargetMgr . savePingTarget ( pingTarget ) ; WebloggerFactory . getWeblogger ( ) . flush ( ) ; addMessage ( isAdd ( ) ? ""pingTarget.created"" : ""pingTarget.updated"" , pingTarget . getName ( ) ) ; return SUCCESS ; } catch ( WebloggerException ex ) { addError ( ""generic.error.check.logs"" ) ; } } return INPUT ; } +","public String execute ( ) { myValidate ( ) ; if ( ! hasActionErrors ( ) ) { try { getBean ( ) . copyTo ( pingTarget ) ; PingTargetManager pingTargetMgr = WebloggerFactory . getWeblogger ( ) . getPingTargetManager ( ) ; pingTargetMgr . savePingTarget ( pingTarget ) ; WebloggerFactory . getWeblogger ( ) . flush ( ) ; addMessage ( isAdd ( ) ? ""pingTarget.created"" : ""pingTarget.updated"" , pingTarget . getName ( ) ) ; return SUCCESS ; } catch ( WebloggerException ex ) { log . error ( ""Error adding/editing ping target"" , ex ) ; addError ( ""generic.error.check.logs"" ) ; } } return INPUT ; } +" +374,"@ Aspect ( advice = org . support . project . ormapping . transaction . Transaction . class ) private List < PointUserHistoriesEntity > reCalcPointUserHistoryOnUser ( Integer userId , int offset2 ) { List < PointUserHistoriesEntity > list ; list = PointUserHistoriesDao . get ( ) . selectOnUser ( userId , limit , offset2 , Order . ASC ) ; for ( PointUserHistoriesEntity item : list ) { item . setBeforeTotal ( calcTotal ) ; calcTotal += item . getPoint ( ) ; item . setTotal ( calcTotal ) ; PointUserHistoriesDao . get ( ) . physicalUpdate ( item ) ; } return list ; } +","@ Aspect ( advice = org . support . project . ormapping . transaction . Transaction . class ) private List < PointUserHistoriesEntity > reCalcPointUserHistoryOnUser ( Integer userId , int offset2 ) { List < PointUserHistoriesEntity > list ; list = PointUserHistoriesDao . get ( ) . selectOnUser ( userId , limit , offset2 , Order . ASC ) ; for ( PointUserHistoriesEntity item : list ) { item . setBeforeTotal ( calcTotal ) ; calcTotal += item . getPoint ( ) ; item . setTotal ( calcTotal ) ; LOG . debug ( ""\t"" + DateUtils . getSimpleFormat ( ) . format ( item . getInsertDatetime ( ) ) + "" [total]"" + calcTotal ) ; PointUserHistoriesDao . get ( ) . physicalUpdate ( item ) ; } return list ; } +" +375,"private ExitCodes processStdIn ( ) { try { if ( System . in . available ( ) == 0 ) { for ( String messageLine : CliConstants . MESS_PROC_STDIN ) { System . out . println ( messageLine ) ; } } } catch ( IOException e ) { } ItemDetails item = ItemDetails . fromValues ( CliConstants . NAME_STDIN ) ; return processStream ( item , System . in ) ; } +","private ExitCodes processStdIn ( ) { try { if ( System . in . available ( ) == 0 ) { for ( String messageLine : CliConstants . MESS_PROC_STDIN ) { System . out . println ( messageLine ) ; } } } catch ( IOException e ) { logger . log ( Level . SEVERE , ""STDIN is not available"" , e ) ; } ItemDetails item = ItemDetails . fromValues ( CliConstants . NAME_STDIN ) ; return processStream ( item , System . in ) ; } +" +376,"public MBStatsUser findByPrimaryKey ( Serializable primaryKey ) throws NoSuchStatsUserException { MBStatsUser mbStatsUser = fetchByPrimaryKey ( primaryKey ) ; if ( mbStatsUser == null ) { if ( _log . isDebugEnabled ( ) ) { } throw new NoSuchStatsUserException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return mbStatsUser ; } +","public MBStatsUser findByPrimaryKey ( Serializable primaryKey ) throws NoSuchStatsUserException { MBStatsUser mbStatsUser = fetchByPrimaryKey ( primaryKey ) ; if ( mbStatsUser == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchStatsUserException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return mbStatsUser ; } +" +377,"protected void refreshMe ( ) { if ( logger . isDebugEnabled ( ) ) { } for ( Iterator widgetIter = widgets . keySet ( ) . iterator ( ) ; widgetIter . hasNext ( ) ; ) { IPropertyEditorWidget control = ( IPropertyEditorWidget ) widgets . get ( widgetIter . next ( ) ) ; control . cleanup ( ) ; ( ( Control ) control ) . dispose ( ) ; } widgets . clear ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""groupNameWidgets.keySet()="" + groupNameWidgets . keySet ( ) ) ; } for ( Iterator groupNameWidgetIter = groupNameWidgets . keySet ( ) . iterator ( ) ; groupNameWidgetIter . hasNext ( ) ; ) { Control control = ( Control ) groupNameWidgets . get ( groupNameWidgetIter . next ( ) ) ; control . dispose ( ) ; } groupNameWidgets . clear ( ) ; if ( conceptModel != null ) { List usedGroups = PropertyGroupHelper . getUsedGroups ( conceptModel ) ; for ( Iterator groupIter = usedGroups . iterator ( ) ; groupIter . hasNext ( ) ; ) { String groupName = ( String ) groupIter . next ( ) ; Control groupNameWidget = new GroupNameWidget ( widgetArea , SWT . NONE , groupName ) ; addWidget ( groupNameWidget ) ; groupNameWidgets . put ( groupName , groupNameWidget ) ; List usedPropertiesForGroup = PropertyGroupHelper . getUsedPropertiesForGroup ( groupName , conceptModel ) ; for ( Iterator propIter = usedPropertiesForGroup . iterator ( ) ; propIter . hasNext ( ) ; ) { String propertyId = ( String ) propIter . next ( ) ; ConceptPropertyInterface prop = ( ConceptPropertyInterface ) conceptModel . getEffectiveProperty ( propertyId ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""creating widget for property with id \"""" + propertyId + ""\"""" ) ; } IPropertyEditorWidget widget = PropertyEditorWidgetFactory . getWidget ( prop . getType ( ) , widgetArea , SWT . NONE , conceptModel , prop . getId ( ) , context , securityReference ) ; if ( widget != null ) { addWidget ( ( Control ) widget ) ; widgets . put ( prop . getId ( ) , widget ) ; focusWidget ( prop . getId ( ) ) ; } else { logger . error ( ""failed to get widget "" + propertyId ) ; } resizeWidgetArea ( ) ; } } } } +","protected void refreshMe ( ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""widgets.keySet()="" + widgets . keySet ( ) ) ; } for ( Iterator widgetIter = widgets . keySet ( ) . iterator ( ) ; widgetIter . hasNext ( ) ; ) { IPropertyEditorWidget control = ( IPropertyEditorWidget ) widgets . get ( widgetIter . next ( ) ) ; control . cleanup ( ) ; ( ( Control ) control ) . dispose ( ) ; } widgets . clear ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""groupNameWidgets.keySet()="" + groupNameWidgets . keySet ( ) ) ; } for ( Iterator groupNameWidgetIter = groupNameWidgets . keySet ( ) . iterator ( ) ; groupNameWidgetIter . hasNext ( ) ; ) { Control control = ( Control ) groupNameWidgets . get ( groupNameWidgetIter . next ( ) ) ; control . dispose ( ) ; } groupNameWidgets . clear ( ) ; if ( conceptModel != null ) { List usedGroups = PropertyGroupHelper . getUsedGroups ( conceptModel ) ; for ( Iterator groupIter = usedGroups . iterator ( ) ; groupIter . hasNext ( ) ; ) { String groupName = ( String ) groupIter . next ( ) ; Control groupNameWidget = new GroupNameWidget ( widgetArea , SWT . NONE , groupName ) ; addWidget ( groupNameWidget ) ; groupNameWidgets . put ( groupName , groupNameWidget ) ; List usedPropertiesForGroup = PropertyGroupHelper . getUsedPropertiesForGroup ( groupName , conceptModel ) ; for ( Iterator propIter = usedPropertiesForGroup . iterator ( ) ; propIter . hasNext ( ) ; ) { String propertyId = ( String ) propIter . next ( ) ; ConceptPropertyInterface prop = ( ConceptPropertyInterface ) conceptModel . getEffectiveProperty ( propertyId ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""creating widget for property with id \"""" + propertyId + ""\"""" ) ; } IPropertyEditorWidget widget = PropertyEditorWidgetFactory . getWidget ( prop . getType ( ) , widgetArea , SWT . NONE , conceptModel , prop . getId ( ) , context , securityReference ) ; if ( widget != null ) { addWidget ( ( Control ) widget ) ; widgets . put ( prop . getId ( ) , widget ) ; focusWidget ( prop . getId ( ) ) ; } else { logger . error ( ""failed to get widget "" + propertyId ) ; } resizeWidgetArea ( ) ; } } } } +" +378,"protected void refreshMe ( ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""widgets.keySet()="" + widgets . keySet ( ) ) ; } for ( Iterator widgetIter = widgets . keySet ( ) . iterator ( ) ; widgetIter . hasNext ( ) ; ) { IPropertyEditorWidget control = ( IPropertyEditorWidget ) widgets . get ( widgetIter . next ( ) ) ; control . cleanup ( ) ; ( ( Control ) control ) . dispose ( ) ; } widgets . clear ( ) ; if ( logger . isDebugEnabled ( ) ) { } for ( Iterator groupNameWidgetIter = groupNameWidgets . keySet ( ) . iterator ( ) ; groupNameWidgetIter . hasNext ( ) ; ) { Control control = ( Control ) groupNameWidgets . get ( groupNameWidgetIter . next ( ) ) ; control . dispose ( ) ; } groupNameWidgets . clear ( ) ; if ( conceptModel != null ) { List usedGroups = PropertyGroupHelper . getUsedGroups ( conceptModel ) ; for ( Iterator groupIter = usedGroups . iterator ( ) ; groupIter . hasNext ( ) ; ) { String groupName = ( String ) groupIter . next ( ) ; Control groupNameWidget = new GroupNameWidget ( widgetArea , SWT . NONE , groupName ) ; addWidget ( groupNameWidget ) ; groupNameWidgets . put ( groupName , groupNameWidget ) ; List usedPropertiesForGroup = PropertyGroupHelper . getUsedPropertiesForGroup ( groupName , conceptModel ) ; for ( Iterator propIter = usedPropertiesForGroup . iterator ( ) ; propIter . hasNext ( ) ; ) { String propertyId = ( String ) propIter . next ( ) ; ConceptPropertyInterface prop = ( ConceptPropertyInterface ) conceptModel . getEffectiveProperty ( propertyId ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""creating widget for property with id \"""" + propertyId + ""\"""" ) ; } IPropertyEditorWidget widget = PropertyEditorWidgetFactory . getWidget ( prop . getType ( ) , widgetArea , SWT . NONE , conceptModel , prop . getId ( ) , context , securityReference ) ; if ( widget != null ) { addWidget ( ( Control ) widget ) ; widgets . put ( prop . getId ( ) , widget ) ; focusWidget ( prop . getId ( ) ) ; } else { logger . error ( ""failed to get widget "" + propertyId ) ; } resizeWidgetArea ( ) ; } } } } +","protected void refreshMe ( ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""widgets.keySet()="" + widgets . keySet ( ) ) ; } for ( Iterator widgetIter = widgets . keySet ( ) . iterator ( ) ; widgetIter . hasNext ( ) ; ) { IPropertyEditorWidget control = ( IPropertyEditorWidget ) widgets . get ( widgetIter . next ( ) ) ; control . cleanup ( ) ; ( ( Control ) control ) . dispose ( ) ; } widgets . clear ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""groupNameWidgets.keySet()="" + groupNameWidgets . keySet ( ) ) ; } for ( Iterator groupNameWidgetIter = groupNameWidgets . keySet ( ) . iterator ( ) ; groupNameWidgetIter . hasNext ( ) ; ) { Control control = ( Control ) groupNameWidgets . get ( groupNameWidgetIter . next ( ) ) ; control . dispose ( ) ; } groupNameWidgets . clear ( ) ; if ( conceptModel != null ) { List usedGroups = PropertyGroupHelper . getUsedGroups ( conceptModel ) ; for ( Iterator groupIter = usedGroups . iterator ( ) ; groupIter . hasNext ( ) ; ) { String groupName = ( String ) groupIter . next ( ) ; Control groupNameWidget = new GroupNameWidget ( widgetArea , SWT . NONE , groupName ) ; addWidget ( groupNameWidget ) ; groupNameWidgets . put ( groupName , groupNameWidget ) ; List usedPropertiesForGroup = PropertyGroupHelper . getUsedPropertiesForGroup ( groupName , conceptModel ) ; for ( Iterator propIter = usedPropertiesForGroup . iterator ( ) ; propIter . hasNext ( ) ; ) { String propertyId = ( String ) propIter . next ( ) ; ConceptPropertyInterface prop = ( ConceptPropertyInterface ) conceptModel . getEffectiveProperty ( propertyId ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""creating widget for property with id \"""" + propertyId + ""\"""" ) ; } IPropertyEditorWidget widget = PropertyEditorWidgetFactory . getWidget ( prop . getType ( ) , widgetArea , SWT . NONE , conceptModel , prop . getId ( ) , context , securityReference ) ; if ( widget != null ) { addWidget ( ( Control ) widget ) ; widgets . put ( prop . getId ( ) , widget ) ; focusWidget ( prop . getId ( ) ) ; } else { logger . error ( ""failed to get widget "" + propertyId ) ; } resizeWidgetArea ( ) ; } } } } +" +379,"protected void refreshMe ( ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""widgets.keySet()="" + widgets . keySet ( ) ) ; } for ( Iterator widgetIter = widgets . keySet ( ) . iterator ( ) ; widgetIter . hasNext ( ) ; ) { IPropertyEditorWidget control = ( IPropertyEditorWidget ) widgets . get ( widgetIter . next ( ) ) ; control . cleanup ( ) ; ( ( Control ) control ) . dispose ( ) ; } widgets . clear ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""groupNameWidgets.keySet()="" + groupNameWidgets . keySet ( ) ) ; } for ( Iterator groupNameWidgetIter = groupNameWidgets . keySet ( ) . iterator ( ) ; groupNameWidgetIter . hasNext ( ) ; ) { Control control = ( Control ) groupNameWidgets . get ( groupNameWidgetIter . next ( ) ) ; control . dispose ( ) ; } groupNameWidgets . clear ( ) ; if ( conceptModel != null ) { List usedGroups = PropertyGroupHelper . getUsedGroups ( conceptModel ) ; for ( Iterator groupIter = usedGroups . iterator ( ) ; groupIter . hasNext ( ) ; ) { String groupName = ( String ) groupIter . next ( ) ; Control groupNameWidget = new GroupNameWidget ( widgetArea , SWT . NONE , groupName ) ; addWidget ( groupNameWidget ) ; groupNameWidgets . put ( groupName , groupNameWidget ) ; List usedPropertiesForGroup = PropertyGroupHelper . getUsedPropertiesForGroup ( groupName , conceptModel ) ; for ( Iterator propIter = usedPropertiesForGroup . iterator ( ) ; propIter . hasNext ( ) ; ) { String propertyId = ( String ) propIter . next ( ) ; ConceptPropertyInterface prop = ( ConceptPropertyInterface ) conceptModel . getEffectiveProperty ( propertyId ) ; if ( logger . isDebugEnabled ( ) ) { } IPropertyEditorWidget widget = PropertyEditorWidgetFactory . getWidget ( prop . getType ( ) , widgetArea , SWT . NONE , conceptModel , prop . getId ( ) , context , securityReference ) ; if ( widget != null ) { addWidget ( ( Control ) widget ) ; widgets . put ( prop . getId ( ) , widget ) ; focusWidget ( prop . getId ( ) ) ; } else { logger . error ( ""failed to get widget "" + propertyId ) ; } resizeWidgetArea ( ) ; } } } } +","protected void refreshMe ( ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""widgets.keySet()="" + widgets . keySet ( ) ) ; } for ( Iterator widgetIter = widgets . keySet ( ) . iterator ( ) ; widgetIter . hasNext ( ) ; ) { IPropertyEditorWidget control = ( IPropertyEditorWidget ) widgets . get ( widgetIter . next ( ) ) ; control . cleanup ( ) ; ( ( Control ) control ) . dispose ( ) ; } widgets . clear ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""groupNameWidgets.keySet()="" + groupNameWidgets . keySet ( ) ) ; } for ( Iterator groupNameWidgetIter = groupNameWidgets . keySet ( ) . iterator ( ) ; groupNameWidgetIter . hasNext ( ) ; ) { Control control = ( Control ) groupNameWidgets . get ( groupNameWidgetIter . next ( ) ) ; control . dispose ( ) ; } groupNameWidgets . clear ( ) ; if ( conceptModel != null ) { List usedGroups = PropertyGroupHelper . getUsedGroups ( conceptModel ) ; for ( Iterator groupIter = usedGroups . iterator ( ) ; groupIter . hasNext ( ) ; ) { String groupName = ( String ) groupIter . next ( ) ; Control groupNameWidget = new GroupNameWidget ( widgetArea , SWT . NONE , groupName ) ; addWidget ( groupNameWidget ) ; groupNameWidgets . put ( groupName , groupNameWidget ) ; List usedPropertiesForGroup = PropertyGroupHelper . getUsedPropertiesForGroup ( groupName , conceptModel ) ; for ( Iterator propIter = usedPropertiesForGroup . iterator ( ) ; propIter . hasNext ( ) ; ) { String propertyId = ( String ) propIter . next ( ) ; ConceptPropertyInterface prop = ( ConceptPropertyInterface ) conceptModel . getEffectiveProperty ( propertyId ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""creating widget for property with id \"""" + propertyId + ""\"""" ) ; } IPropertyEditorWidget widget = PropertyEditorWidgetFactory . getWidget ( prop . getType ( ) , widgetArea , SWT . NONE , conceptModel , prop . getId ( ) , context , securityReference ) ; if ( widget != null ) { addWidget ( ( Control ) widget ) ; widgets . put ( prop . getId ( ) , widget ) ; focusWidget ( prop . getId ( ) ) ; } else { logger . error ( ""failed to get widget "" + propertyId ) ; } resizeWidgetArea ( ) ; } } } } +" +380,"protected void refreshMe ( ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""widgets.keySet()="" + widgets . keySet ( ) ) ; } for ( Iterator widgetIter = widgets . keySet ( ) . iterator ( ) ; widgetIter . hasNext ( ) ; ) { IPropertyEditorWidget control = ( IPropertyEditorWidget ) widgets . get ( widgetIter . next ( ) ) ; control . cleanup ( ) ; ( ( Control ) control ) . dispose ( ) ; } widgets . clear ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""groupNameWidgets.keySet()="" + groupNameWidgets . keySet ( ) ) ; } for ( Iterator groupNameWidgetIter = groupNameWidgets . keySet ( ) . iterator ( ) ; groupNameWidgetIter . hasNext ( ) ; ) { Control control = ( Control ) groupNameWidgets . get ( groupNameWidgetIter . next ( ) ) ; control . dispose ( ) ; } groupNameWidgets . clear ( ) ; if ( conceptModel != null ) { List usedGroups = PropertyGroupHelper . getUsedGroups ( conceptModel ) ; for ( Iterator groupIter = usedGroups . iterator ( ) ; groupIter . hasNext ( ) ; ) { String groupName = ( String ) groupIter . next ( ) ; Control groupNameWidget = new GroupNameWidget ( widgetArea , SWT . NONE , groupName ) ; addWidget ( groupNameWidget ) ; groupNameWidgets . put ( groupName , groupNameWidget ) ; List usedPropertiesForGroup = PropertyGroupHelper . getUsedPropertiesForGroup ( groupName , conceptModel ) ; for ( Iterator propIter = usedPropertiesForGroup . iterator ( ) ; propIter . hasNext ( ) ; ) { String propertyId = ( String ) propIter . next ( ) ; ConceptPropertyInterface prop = ( ConceptPropertyInterface ) conceptModel . getEffectiveProperty ( propertyId ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""creating widget for property with id \"""" + propertyId + ""\"""" ) ; } IPropertyEditorWidget widget = PropertyEditorWidgetFactory . getWidget ( prop . getType ( ) , widgetArea , SWT . NONE , conceptModel , prop . getId ( ) , context , securityReference ) ; if ( widget != null ) { addWidget ( ( Control ) widget ) ; widgets . put ( prop . getId ( ) , widget ) ; focusWidget ( prop . getId ( ) ) ; } else { } resizeWidgetArea ( ) ; } } } } +","protected void refreshMe ( ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""widgets.keySet()="" + widgets . keySet ( ) ) ; } for ( Iterator widgetIter = widgets . keySet ( ) . iterator ( ) ; widgetIter . hasNext ( ) ; ) { IPropertyEditorWidget control = ( IPropertyEditorWidget ) widgets . get ( widgetIter . next ( ) ) ; control . cleanup ( ) ; ( ( Control ) control ) . dispose ( ) ; } widgets . clear ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""groupNameWidgets.keySet()="" + groupNameWidgets . keySet ( ) ) ; } for ( Iterator groupNameWidgetIter = groupNameWidgets . keySet ( ) . iterator ( ) ; groupNameWidgetIter . hasNext ( ) ; ) { Control control = ( Control ) groupNameWidgets . get ( groupNameWidgetIter . next ( ) ) ; control . dispose ( ) ; } groupNameWidgets . clear ( ) ; if ( conceptModel != null ) { List usedGroups = PropertyGroupHelper . getUsedGroups ( conceptModel ) ; for ( Iterator groupIter = usedGroups . iterator ( ) ; groupIter . hasNext ( ) ; ) { String groupName = ( String ) groupIter . next ( ) ; Control groupNameWidget = new GroupNameWidget ( widgetArea , SWT . NONE , groupName ) ; addWidget ( groupNameWidget ) ; groupNameWidgets . put ( groupName , groupNameWidget ) ; List usedPropertiesForGroup = PropertyGroupHelper . getUsedPropertiesForGroup ( groupName , conceptModel ) ; for ( Iterator propIter = usedPropertiesForGroup . iterator ( ) ; propIter . hasNext ( ) ; ) { String propertyId = ( String ) propIter . next ( ) ; ConceptPropertyInterface prop = ( ConceptPropertyInterface ) conceptModel . getEffectiveProperty ( propertyId ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""creating widget for property with id \"""" + propertyId + ""\"""" ) ; } IPropertyEditorWidget widget = PropertyEditorWidgetFactory . getWidget ( prop . getType ( ) , widgetArea , SWT . NONE , conceptModel , prop . getId ( ) , context , securityReference ) ; if ( widget != null ) { addWidget ( ( Control ) widget ) ; widgets . put ( prop . getId ( ) , widget ) ; focusWidget ( prop . getId ( ) ) ; } else { logger . error ( ""failed to get widget "" + propertyId ) ; } resizeWidgetArea ( ) ; } } } } +" +381,"private static void debugCommandStart ( String desc , List < String > argv ) { if ( log . isDebugEnabled ( ) ) { log . debug ( getCommand ( argv ) ) ; } } +","private static void debugCommandStart ( String desc , List < String > argv ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""START {} ################# "" , desc ) ; log . debug ( getCommand ( argv ) ) ; } } +" +382,"private static void debugCommandStart ( String desc , List < String > argv ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""START {} ################# "" , desc ) ; } } +","private static void debugCommandStart ( String desc , List < String > argv ) { if ( log . isDebugEnabled ( ) ) { log . debug ( ""START {} ################# "" , desc ) ; log . debug ( getCommand ( argv ) ) ; } } +" +383,"protected void postBusEventFromTransaction ( final AccountModelDao account , final AccountModelDao savedAccount , final ChangeType changeType , final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory , final InternalCallContext context ) throws BillingExceptionBase { switch ( changeType ) { case INSERT : break ; default : return ; } final Long recordId = savedAccount . getRecordId ( ) ; final InternalCallContext rehydratedContext = internalCallContextFactory . createInternalCallContext ( savedAccount , recordId , context ) ; final AccountCreationInternalEvent creationEvent = new DefaultAccountCreationEvent ( new DefaultAccountData ( savedAccount ) , savedAccount . getId ( ) , rehydratedContext . getAccountRecordId ( ) , rehydratedContext . getTenantRecordId ( ) , rehydratedContext . getUserToken ( ) ) ; try { eventBus . postFromTransaction ( creationEvent , entitySqlDaoWrapperFactory . getHandle ( ) . getConnection ( ) ) ; } catch ( final EventBusException e ) { } } +","protected void postBusEventFromTransaction ( final AccountModelDao account , final AccountModelDao savedAccount , final ChangeType changeType , final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory , final InternalCallContext context ) throws BillingExceptionBase { switch ( changeType ) { case INSERT : break ; default : return ; } final Long recordId = savedAccount . getRecordId ( ) ; final InternalCallContext rehydratedContext = internalCallContextFactory . createInternalCallContext ( savedAccount , recordId , context ) ; final AccountCreationInternalEvent creationEvent = new DefaultAccountCreationEvent ( new DefaultAccountData ( savedAccount ) , savedAccount . getId ( ) , rehydratedContext . getAccountRecordId ( ) , rehydratedContext . getTenantRecordId ( ) , rehydratedContext . getUserToken ( ) ) ; try { eventBus . postFromTransaction ( creationEvent , entitySqlDaoWrapperFactory . getHandle ( ) . getConnection ( ) ) ; } catch ( final EventBusException e ) { log . warn ( ""Failed to post account creation event for accountId='{}'"" , savedAccount . getId ( ) , e ) ; } } +" +384,"@ Test public void createAndDestroyEnvironment ( ) throws EnvironmentDriverException , InterruptedException { final Semaphore mutex = new Semaphore ( 0 ) ; ObjectWrapper < Throwable > exceptionWrapper = new ObjectWrapper < > ( ) ; String dummyImageId = ""abcd1234"" ; String dummyRepoUrl = ""test.repo.url/repo"" ; final StartedEnvironment startedEnv = environmentDriver . startEnvironment ( dummyImageId , dummyRepoUrl , SystemImageType . DOCKER_IMAGE , DUMMY_REPOSITORY_CONFIGURATION , new DebugData ( false ) , ""put-access-token-here"" , false , Collections . emptyMap ( ) ) ; Consumer < RunningEnvironment > onEnvironmentStarted = ( runningEnvironment ) -> { boolean containerDestroyed = false ; try { assertThatContainerIsRunning ( runningEnvironment ) ; destroyEnvironment ( runningEnvironment ) ; containerDestroyed = true ; assertThatContainerIsNotRunning ( runningEnvironment ) ; mutex . release ( ) ; } catch ( Throwable e ) { exceptionWrapper . set ( e ) ; } finally { if ( ! containerDestroyed ) { destroyEnvironmentWithReport ( runningEnvironment ) ; } } mutex . release ( ) ; } ; Consumer < Exception > onError = ( e ) -> { try { startedEnv . destroyEnvironment ( ) ; mutex . release ( ) ; } catch ( EnvironmentDriverException e1 ) { logger . error ( ""Environment LEAK! The running environment was not destroyed. ID: "" + startedEnv . getId ( ) , e1 ) ; } fail ( ""Failed to init builder. "" + e . getMessage ( ) ) ; } ; startedEnv . monitorInitialization ( onEnvironmentStarted , onError ) ; boolean completed = mutex . tryAcquire ( TEST_EXECUTION_TIMEOUT , TimeUnit . SECONDS ) ; Throwable exception = exceptionWrapper . get ( ) ; if ( exception != null ) { logger . error ( """" , exception ) ; fail ( exception . getMessage ( ) ) ; } assertTrue ( ""timeout reached, test has not complete."" , completed ) ; } +","@ Test public void createAndDestroyEnvironment ( ) throws EnvironmentDriverException , InterruptedException { final Semaphore mutex = new Semaphore ( 0 ) ; ObjectWrapper < Throwable > exceptionWrapper = new ObjectWrapper < > ( ) ; String dummyImageId = ""abcd1234"" ; String dummyRepoUrl = ""test.repo.url/repo"" ; final StartedEnvironment startedEnv = environmentDriver . startEnvironment ( dummyImageId , dummyRepoUrl , SystemImageType . DOCKER_IMAGE , DUMMY_REPOSITORY_CONFIGURATION , new DebugData ( false ) , ""put-access-token-here"" , false , Collections . emptyMap ( ) ) ; Consumer < RunningEnvironment > onEnvironmentStarted = ( runningEnvironment ) -> { boolean containerDestroyed = false ; try { assertThatContainerIsRunning ( runningEnvironment ) ; destroyEnvironment ( runningEnvironment ) ; containerDestroyed = true ; assertThatContainerIsNotRunning ( runningEnvironment ) ; mutex . release ( ) ; } catch ( Throwable e ) { exceptionWrapper . set ( e ) ; } finally { if ( ! containerDestroyed ) { destroyEnvironmentWithReport ( runningEnvironment ) ; } } mutex . release ( ) ; } ; Consumer < Exception > onError = ( e ) -> { try { logger . info ( ""Trying to destroy environment due to an error:"" , e ) ; startedEnv . destroyEnvironment ( ) ; mutex . release ( ) ; } catch ( EnvironmentDriverException e1 ) { logger . error ( ""Environment LEAK! The running environment was not destroyed. ID: "" + startedEnv . getId ( ) , e1 ) ; } fail ( ""Failed to init builder. "" + e . getMessage ( ) ) ; } ; startedEnv . monitorInitialization ( onEnvironmentStarted , onError ) ; boolean completed = mutex . tryAcquire ( TEST_EXECUTION_TIMEOUT , TimeUnit . SECONDS ) ; Throwable exception = exceptionWrapper . get ( ) ; if ( exception != null ) { logger . error ( """" , exception ) ; fail ( exception . getMessage ( ) ) ; } assertTrue ( ""timeout reached, test has not complete."" , completed ) ; } +" +385,"@ Test public void createAndDestroyEnvironment ( ) throws EnvironmentDriverException , InterruptedException { final Semaphore mutex = new Semaphore ( 0 ) ; ObjectWrapper < Throwable > exceptionWrapper = new ObjectWrapper < > ( ) ; String dummyImageId = ""abcd1234"" ; String dummyRepoUrl = ""test.repo.url/repo"" ; final StartedEnvironment startedEnv = environmentDriver . startEnvironment ( dummyImageId , dummyRepoUrl , SystemImageType . DOCKER_IMAGE , DUMMY_REPOSITORY_CONFIGURATION , new DebugData ( false ) , ""put-access-token-here"" , false , Collections . emptyMap ( ) ) ; Consumer < RunningEnvironment > onEnvironmentStarted = ( runningEnvironment ) -> { boolean containerDestroyed = false ; try { assertThatContainerIsRunning ( runningEnvironment ) ; destroyEnvironment ( runningEnvironment ) ; containerDestroyed = true ; assertThatContainerIsNotRunning ( runningEnvironment ) ; mutex . release ( ) ; } catch ( Throwable e ) { exceptionWrapper . set ( e ) ; } finally { if ( ! containerDestroyed ) { destroyEnvironmentWithReport ( runningEnvironment ) ; } } mutex . release ( ) ; } ; Consumer < Exception > onError = ( e ) -> { try { logger . info ( ""Trying to destroy environment due to an error:"" , e ) ; startedEnv . destroyEnvironment ( ) ; mutex . release ( ) ; } catch ( EnvironmentDriverException e1 ) { } fail ( ""Failed to init builder. "" + e . getMessage ( ) ) ; } ; startedEnv . monitorInitialization ( onEnvironmentStarted , onError ) ; boolean completed = mutex . tryAcquire ( TEST_EXECUTION_TIMEOUT , TimeUnit . SECONDS ) ; Throwable exception = exceptionWrapper . get ( ) ; if ( exception != null ) { logger . error ( """" , exception ) ; fail ( exception . getMessage ( ) ) ; } assertTrue ( ""timeout reached, test has not complete."" , completed ) ; } +","@ Test public void createAndDestroyEnvironment ( ) throws EnvironmentDriverException , InterruptedException { final Semaphore mutex = new Semaphore ( 0 ) ; ObjectWrapper < Throwable > exceptionWrapper = new ObjectWrapper < > ( ) ; String dummyImageId = ""abcd1234"" ; String dummyRepoUrl = ""test.repo.url/repo"" ; final StartedEnvironment startedEnv = environmentDriver . startEnvironment ( dummyImageId , dummyRepoUrl , SystemImageType . DOCKER_IMAGE , DUMMY_REPOSITORY_CONFIGURATION , new DebugData ( false ) , ""put-access-token-here"" , false , Collections . emptyMap ( ) ) ; Consumer < RunningEnvironment > onEnvironmentStarted = ( runningEnvironment ) -> { boolean containerDestroyed = false ; try { assertThatContainerIsRunning ( runningEnvironment ) ; destroyEnvironment ( runningEnvironment ) ; containerDestroyed = true ; assertThatContainerIsNotRunning ( runningEnvironment ) ; mutex . release ( ) ; } catch ( Throwable e ) { exceptionWrapper . set ( e ) ; } finally { if ( ! containerDestroyed ) { destroyEnvironmentWithReport ( runningEnvironment ) ; } } mutex . release ( ) ; } ; Consumer < Exception > onError = ( e ) -> { try { logger . info ( ""Trying to destroy environment due to an error:"" , e ) ; startedEnv . destroyEnvironment ( ) ; mutex . release ( ) ; } catch ( EnvironmentDriverException e1 ) { logger . error ( ""Environment LEAK! The running environment was not destroyed. ID: "" + startedEnv . getId ( ) , e1 ) ; } fail ( ""Failed to init builder. "" + e . getMessage ( ) ) ; } ; startedEnv . monitorInitialization ( onEnvironmentStarted , onError ) ; boolean completed = mutex . tryAcquire ( TEST_EXECUTION_TIMEOUT , TimeUnit . SECONDS ) ; Throwable exception = exceptionWrapper . get ( ) ; if ( exception != null ) { logger . error ( """" , exception ) ; fail ( exception . getMessage ( ) ) ; } assertTrue ( ""timeout reached, test has not complete."" , completed ) ; } +" +386,"@ Test public void createAndDestroyEnvironment ( ) throws EnvironmentDriverException , InterruptedException { final Semaphore mutex = new Semaphore ( 0 ) ; ObjectWrapper < Throwable > exceptionWrapper = new ObjectWrapper < > ( ) ; String dummyImageId = ""abcd1234"" ; String dummyRepoUrl = ""test.repo.url/repo"" ; final StartedEnvironment startedEnv = environmentDriver . startEnvironment ( dummyImageId , dummyRepoUrl , SystemImageType . DOCKER_IMAGE , DUMMY_REPOSITORY_CONFIGURATION , new DebugData ( false ) , ""put-access-token-here"" , false , Collections . emptyMap ( ) ) ; Consumer < RunningEnvironment > onEnvironmentStarted = ( runningEnvironment ) -> { boolean containerDestroyed = false ; try { assertThatContainerIsRunning ( runningEnvironment ) ; destroyEnvironment ( runningEnvironment ) ; containerDestroyed = true ; assertThatContainerIsNotRunning ( runningEnvironment ) ; mutex . release ( ) ; } catch ( Throwable e ) { exceptionWrapper . set ( e ) ; } finally { if ( ! containerDestroyed ) { destroyEnvironmentWithReport ( runningEnvironment ) ; } } mutex . release ( ) ; } ; Consumer < Exception > onError = ( e ) -> { try { logger . info ( ""Trying to destroy environment due to an error:"" , e ) ; startedEnv . destroyEnvironment ( ) ; mutex . release ( ) ; } catch ( EnvironmentDriverException e1 ) { logger . error ( ""Environment LEAK! The running environment was not destroyed. ID: "" + startedEnv . getId ( ) , e1 ) ; } fail ( ""Failed to init builder. "" + e . getMessage ( ) ) ; } ; startedEnv . monitorInitialization ( onEnvironmentStarted , onError ) ; boolean completed = mutex . tryAcquire ( TEST_EXECUTION_TIMEOUT , TimeUnit . SECONDS ) ; Throwable exception = exceptionWrapper . get ( ) ; if ( exception != null ) { fail ( exception . getMessage ( ) ) ; } assertTrue ( ""timeout reached, test has not complete."" , completed ) ; } +","@ Test public void createAndDestroyEnvironment ( ) throws EnvironmentDriverException , InterruptedException { final Semaphore mutex = new Semaphore ( 0 ) ; ObjectWrapper < Throwable > exceptionWrapper = new ObjectWrapper < > ( ) ; String dummyImageId = ""abcd1234"" ; String dummyRepoUrl = ""test.repo.url/repo"" ; final StartedEnvironment startedEnv = environmentDriver . startEnvironment ( dummyImageId , dummyRepoUrl , SystemImageType . DOCKER_IMAGE , DUMMY_REPOSITORY_CONFIGURATION , new DebugData ( false ) , ""put-access-token-here"" , false , Collections . emptyMap ( ) ) ; Consumer < RunningEnvironment > onEnvironmentStarted = ( runningEnvironment ) -> { boolean containerDestroyed = false ; try { assertThatContainerIsRunning ( runningEnvironment ) ; destroyEnvironment ( runningEnvironment ) ; containerDestroyed = true ; assertThatContainerIsNotRunning ( runningEnvironment ) ; mutex . release ( ) ; } catch ( Throwable e ) { exceptionWrapper . set ( e ) ; } finally { if ( ! containerDestroyed ) { destroyEnvironmentWithReport ( runningEnvironment ) ; } } mutex . release ( ) ; } ; Consumer < Exception > onError = ( e ) -> { try { logger . info ( ""Trying to destroy environment due to an error:"" , e ) ; startedEnv . destroyEnvironment ( ) ; mutex . release ( ) ; } catch ( EnvironmentDriverException e1 ) { logger . error ( ""Environment LEAK! The running environment was not destroyed. ID: "" + startedEnv . getId ( ) , e1 ) ; } fail ( ""Failed to init builder. "" + e . getMessage ( ) ) ; } ; startedEnv . monitorInitialization ( onEnvironmentStarted , onError ) ; boolean completed = mutex . tryAcquire ( TEST_EXECUTION_TIMEOUT , TimeUnit . SECONDS ) ; Throwable exception = exceptionWrapper . get ( ) ; if ( exception != null ) { logger . error ( """" , exception ) ; fail ( exception . getMessage ( ) ) ; } assertTrue ( ""timeout reached, test has not complete."" , completed ) ; } +" +387,"public static < T > T getFromJobExecutionContext ( ExecutionContext context , String key , Type typeOfT ) { Object valueString = context . get ( key ) ; if ( valueString != null ) { if ( valueString instanceof String ) { Gson gson = new Gson ( ) ; return ( T ) gson . fromJson ( ( String ) valueString , typeOfT ) ; } else { } } return null ; } +","public static < T > T getFromJobExecutionContext ( ExecutionContext context , String key , Type typeOfT ) { Object valueString = context . get ( key ) ; if ( valueString != null ) { if ( valueString instanceof String ) { Gson gson = new Gson ( ) ; return ( T ) gson . fromJson ( ( String ) valueString , typeOfT ) ; } else { logger . error ( ""invalid data type saved into execution context: "" + valueString . getClass ( ) + "", "" + valueString ) ; } } return null ; } +" +388,"public DiscountProductGroup postDiscountIdDiscountProductGroup ( Long id , DiscountProductGroup discountProductGroup ) throws Exception { HttpInvoker . HttpResponse httpResponse = postDiscountIdDiscountProductGroupHttpResponse ( id , discountProductGroup ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return DiscountProductGroupSerDes . toDTO ( content ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +","public DiscountProductGroup postDiscountIdDiscountProductGroup ( Long id , DiscountProductGroup discountProductGroup ) throws Exception { HttpInvoker . HttpResponse httpResponse = postDiscountIdDiscountProductGroupHttpResponse ( id , discountProductGroup ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return DiscountProductGroupSerDes . toDTO ( content ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +" +389,"public DiscountProductGroup postDiscountIdDiscountProductGroup ( Long id , DiscountProductGroup discountProductGroup ) throws Exception { HttpInvoker . HttpResponse httpResponse = postDiscountIdDiscountProductGroupHttpResponse ( id , discountProductGroup ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return DiscountProductGroupSerDes . toDTO ( content ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +","public DiscountProductGroup postDiscountIdDiscountProductGroup ( Long id , DiscountProductGroup discountProductGroup ) throws Exception { HttpInvoker . HttpResponse httpResponse = postDiscountIdDiscountProductGroupHttpResponse ( id , discountProductGroup ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return DiscountProductGroupSerDes . toDTO ( content ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +" +390,"public DiscountProductGroup postDiscountIdDiscountProductGroup ( Long id , DiscountProductGroup discountProductGroup ) throws Exception { HttpInvoker . HttpResponse httpResponse = postDiscountIdDiscountProductGroupHttpResponse ( id , discountProductGroup ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return DiscountProductGroupSerDes . toDTO ( content ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +","public DiscountProductGroup postDiscountIdDiscountProductGroup ( Long id , DiscountProductGroup discountProductGroup ) throws Exception { HttpInvoker . HttpResponse httpResponse = postDiscountIdDiscountProductGroupHttpResponse ( id , discountProductGroup ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return DiscountProductGroupSerDes . toDTO ( content ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +" +391,"public DiscountProductGroup postDiscountIdDiscountProductGroup ( Long id , DiscountProductGroup discountProductGroup ) throws Exception { HttpInvoker . HttpResponse httpResponse = postDiscountIdDiscountProductGroupHttpResponse ( id , discountProductGroup ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return DiscountProductGroupSerDes . toDTO ( content ) ; } catch ( Exception e ) { throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +","public DiscountProductGroup postDiscountIdDiscountProductGroup ( Long id , DiscountProductGroup discountProductGroup ) throws Exception { HttpInvoker . HttpResponse httpResponse = postDiscountIdDiscountProductGroupHttpResponse ( id , discountProductGroup ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response content: "" + content ) ; _logger . log ( Level . WARNING , ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( ""HTTP response content: "" + content ) ; _logger . fine ( ""HTTP response message: "" + httpResponse . getMessage ( ) ) ; _logger . fine ( ""HTTP response status code: "" + httpResponse . getStatusCode ( ) ) ; } try { return DiscountProductGroupSerDes . toDTO ( content ) ; } catch ( Exception e ) { _logger . log ( Level . WARNING , ""Unable to process HTTP response: "" + content , e ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } } +" +392,"public void close ( ) { try { if ( _writer == null ) { return ; } _writer . close ( ) ; _writer = null ; } catch ( IOException ioException ) { if ( _log . isDebugEnabled ( ) ) { } _hasError = true ; } } +","public void close ( ) { try { if ( _writer == null ) { return ; } _writer . close ( ) ; _writer = null ; } catch ( IOException ioException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ioException , ioException ) ; } _hasError = true ; } } +" +393,"public static void waitForServerToBeUp ( ) { ZooKeeper zk = null ; while ( true ) { try { Thread . sleep ( 2000 ) ; if ( zk == null ) { zk = zooKeeper ( 60000 , null ) ; } } catch ( InterruptedException e ) { log . error ( ""ZooKeeperSerivce startup failed"" ) ; } ZooKeeper . States st = zk . getState ( ) ; if ( st == ZooKeeper . States . CONNECTED ) { break ; } } } +","public static void waitForServerToBeUp ( ) { ZooKeeper zk = null ; while ( true ) { try { log . debug ( ""ZooKeeper server is starting..."" ) ; Thread . sleep ( 2000 ) ; if ( zk == null ) { zk = zooKeeper ( 60000 , null ) ; } } catch ( InterruptedException e ) { log . error ( ""ZooKeeperSerivce startup failed"" ) ; } ZooKeeper . States st = zk . getState ( ) ; if ( st == ZooKeeper . States . CONNECTED ) { break ; } } } +" +394,"public static void waitForServerToBeUp ( ) { ZooKeeper zk = null ; while ( true ) { try { log . debug ( ""ZooKeeper server is starting..."" ) ; Thread . sleep ( 2000 ) ; if ( zk == null ) { zk = zooKeeper ( 60000 , null ) ; } } catch ( InterruptedException e ) { } ZooKeeper . States st = zk . getState ( ) ; if ( st == ZooKeeper . States . CONNECTED ) { break ; } } } +","public static void waitForServerToBeUp ( ) { ZooKeeper zk = null ; while ( true ) { try { log . debug ( ""ZooKeeper server is starting..."" ) ; Thread . sleep ( 2000 ) ; if ( zk == null ) { zk = zooKeeper ( 60000 , null ) ; } } catch ( InterruptedException e ) { log . error ( ""ZooKeeperSerivce startup failed"" ) ; } ZooKeeper . States st = zk . getState ( ) ; if ( st == ZooKeeper . States . CONNECTED ) { break ; } } } +" +395,"@ SuppressWarnings ( ""rawtypes"" ) @ Test ( groups = ""Live"" ) public void testJcloudsCreateBogStandard ( ) throws Exception { JcloudsSshMachineLocation m1 = obtainMachine ( ImmutableMap . of ( ) ) ; Map details = MutableMap . of ( ""id"" , m1 . getJcloudsId ( ) , ""hostname"" , m1 . getAddress ( ) . getHostAddress ( ) , ""user"" , m1 . getUser ( ) ) ; log . info ( ""got machine "" + m1 + "" at "" + jcloudsLocation + "": "" + details + ""; now trying to rebind"" ) ; String result ; result = execWithOutput ( m1 , Arrays . asList ( ""echo trying m1"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m1"" ) ) ; log . info ( ""now trying rebind "" + m1 ) ; JcloudsSshMachineLocation m2 = ( JcloudsSshMachineLocation ) jcloudsLocation . registerMachine ( details ) ; result = execWithOutput ( m2 , Arrays . asList ( ""echo trying m2"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m2"" ) ) ; } +","@ SuppressWarnings ( ""rawtypes"" ) @ Test ( groups = ""Live"" ) public void testJcloudsCreateBogStandard ( ) throws Exception { log . info ( ""TEST testJcloudsCreateBogStandard"" ) ; JcloudsSshMachineLocation m1 = obtainMachine ( ImmutableMap . of ( ) ) ; Map details = MutableMap . of ( ""id"" , m1 . getJcloudsId ( ) , ""hostname"" , m1 . getAddress ( ) . getHostAddress ( ) , ""user"" , m1 . getUser ( ) ) ; log . info ( ""got machine "" + m1 + "" at "" + jcloudsLocation + "": "" + details + ""; now trying to rebind"" ) ; String result ; result = execWithOutput ( m1 , Arrays . asList ( ""echo trying m1"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m1"" ) ) ; log . info ( ""now trying rebind "" + m1 ) ; JcloudsSshMachineLocation m2 = ( JcloudsSshMachineLocation ) jcloudsLocation . registerMachine ( details ) ; result = execWithOutput ( m2 , Arrays . asList ( ""echo trying m2"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m2"" ) ) ; } +" +396,"@ SuppressWarnings ( ""rawtypes"" ) @ Test ( groups = ""Live"" ) public void testJcloudsCreateBogStandard ( ) throws Exception { log . info ( ""TEST testJcloudsCreateBogStandard"" ) ; JcloudsSshMachineLocation m1 = obtainMachine ( ImmutableMap . of ( ) ) ; Map details = MutableMap . of ( ""id"" , m1 . getJcloudsId ( ) , ""hostname"" , m1 . getAddress ( ) . getHostAddress ( ) , ""user"" , m1 . getUser ( ) ) ; String result ; result = execWithOutput ( m1 , Arrays . asList ( ""echo trying m1"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m1"" ) ) ; log . info ( ""now trying rebind "" + m1 ) ; JcloudsSshMachineLocation m2 = ( JcloudsSshMachineLocation ) jcloudsLocation . registerMachine ( details ) ; result = execWithOutput ( m2 , Arrays . asList ( ""echo trying m2"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m2"" ) ) ; } +","@ SuppressWarnings ( ""rawtypes"" ) @ Test ( groups = ""Live"" ) public void testJcloudsCreateBogStandard ( ) throws Exception { log . info ( ""TEST testJcloudsCreateBogStandard"" ) ; JcloudsSshMachineLocation m1 = obtainMachine ( ImmutableMap . of ( ) ) ; Map details = MutableMap . of ( ""id"" , m1 . getJcloudsId ( ) , ""hostname"" , m1 . getAddress ( ) . getHostAddress ( ) , ""user"" , m1 . getUser ( ) ) ; log . info ( ""got machine "" + m1 + "" at "" + jcloudsLocation + "": "" + details + ""; now trying to rebind"" ) ; String result ; result = execWithOutput ( m1 , Arrays . asList ( ""echo trying m1"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m1"" ) ) ; log . info ( ""now trying rebind "" + m1 ) ; JcloudsSshMachineLocation m2 = ( JcloudsSshMachineLocation ) jcloudsLocation . registerMachine ( details ) ; result = execWithOutput ( m2 , Arrays . asList ( ""echo trying m2"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m2"" ) ) ; } +" +397,"@ SuppressWarnings ( ""rawtypes"" ) @ Test ( groups = ""Live"" ) public void testJcloudsCreateBogStandard ( ) throws Exception { log . info ( ""TEST testJcloudsCreateBogStandard"" ) ; JcloudsSshMachineLocation m1 = obtainMachine ( ImmutableMap . of ( ) ) ; Map details = MutableMap . of ( ""id"" , m1 . getJcloudsId ( ) , ""hostname"" , m1 . getAddress ( ) . getHostAddress ( ) , ""user"" , m1 . getUser ( ) ) ; log . info ( ""got machine "" + m1 + "" at "" + jcloudsLocation + "": "" + details + ""; now trying to rebind"" ) ; String result ; result = execWithOutput ( m1 , Arrays . asList ( ""echo trying m1"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m1"" ) ) ; JcloudsSshMachineLocation m2 = ( JcloudsSshMachineLocation ) jcloudsLocation . registerMachine ( details ) ; result = execWithOutput ( m2 , Arrays . asList ( ""echo trying m2"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m2"" ) ) ; } +","@ SuppressWarnings ( ""rawtypes"" ) @ Test ( groups = ""Live"" ) public void testJcloudsCreateBogStandard ( ) throws Exception { log . info ( ""TEST testJcloudsCreateBogStandard"" ) ; JcloudsSshMachineLocation m1 = obtainMachine ( ImmutableMap . of ( ) ) ; Map details = MutableMap . of ( ""id"" , m1 . getJcloudsId ( ) , ""hostname"" , m1 . getAddress ( ) . getHostAddress ( ) , ""user"" , m1 . getUser ( ) ) ; log . info ( ""got machine "" + m1 + "" at "" + jcloudsLocation + "": "" + details + ""; now trying to rebind"" ) ; String result ; result = execWithOutput ( m1 , Arrays . asList ( ""echo trying m1"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m1"" ) ) ; log . info ( ""now trying rebind "" + m1 ) ; JcloudsSshMachineLocation m2 = ( JcloudsSshMachineLocation ) jcloudsLocation . registerMachine ( details ) ; result = execWithOutput ( m2 , Arrays . asList ( ""echo trying m2"" , ""hostname"" , ""date"" ) ) ; Assert . assertTrue ( result . contains ( ""trying m2"" ) ) ; } +" +398,"private static void count ( ) { DbConn cnx = null ; try { cnx = Helpers . getNewDbSession ( ) ; } catch ( Exception e ) { throw new JqmRuntimeException ( ""Could not fetch node count"" , e ) ; } finally { Helpers . closeQuietly ( cnx ) ; } } +","private static void count ( ) { DbConn cnx = null ; try { cnx = Helpers . getNewDbSession ( ) ; jqmlogger . info ( ""Existing nodes: "" + MetaService . getNodes ( cnx ) . size ( ) ) ; } catch ( Exception e ) { throw new JqmRuntimeException ( ""Could not fetch node count"" , e ) ; } finally { Helpers . closeQuietly ( cnx ) ; } } +" +399,"protected byte [ ] getData ( Attributes attributes ) { if ( isRelative ( uri ) ) { File img = OmFileHelper . getUserProfilePicture ( userId , uri ) ; try ( InputStream is = new FileInputStream ( img ) ) { return IOUtils . toByteArray ( is ) ; } catch ( Exception e ) { } } return null ; } +","protected byte [ ] getData ( Attributes attributes ) { if ( isRelative ( uri ) ) { File img = OmFileHelper . getUserProfilePicture ( userId , uri ) ; try ( InputStream is = new FileInputStream ( img ) ) { return IOUtils . toByteArray ( is ) ; } catch ( Exception e ) { log . error ( ""failed to get bytes from image"" , e ) ; } } return null ; } +" +400,"private boolean stoppingCriteriaSatisfied ( ) { if ( candidates . isEmpty ( ) ) { return true ; } boolean timeout = isTimeExpired ( ) ; if ( timeout ) { logger . info ( ""Stopping algorithm: Max. execution time was reached."" ) ; return true ; } SearchTreeNode bestNode = candidates . last ( ) ; boolean perfectDefinitionFound = bestNode . getAccuracy ( ) == 1.0 ; if ( stopOnFirstDefinition && perfectDefinitionFound ) { logger . info ( ""Stopping algorithm: Perfect definition found."" ) ; return true ; } return false ; } +","private boolean stoppingCriteriaSatisfied ( ) { if ( candidates . isEmpty ( ) ) { logger . info ( ""Stopping algorithm: No candidates left."" ) ; return true ; } boolean timeout = isTimeExpired ( ) ; if ( timeout ) { logger . info ( ""Stopping algorithm: Max. execution time was reached."" ) ; return true ; } SearchTreeNode bestNode = candidates . last ( ) ; boolean perfectDefinitionFound = bestNode . getAccuracy ( ) == 1.0 ; if ( stopOnFirstDefinition && perfectDefinitionFound ) { logger . info ( ""Stopping algorithm: Perfect definition found."" ) ; return true ; } return false ; } +" +401,"private boolean stoppingCriteriaSatisfied ( ) { if ( candidates . isEmpty ( ) ) { logger . info ( ""Stopping algorithm: No candidates left."" ) ; return true ; } boolean timeout = isTimeExpired ( ) ; if ( timeout ) { return true ; } SearchTreeNode bestNode = candidates . last ( ) ; boolean perfectDefinitionFound = bestNode . getAccuracy ( ) == 1.0 ; if ( stopOnFirstDefinition && perfectDefinitionFound ) { logger . info ( ""Stopping algorithm: Perfect definition found."" ) ; return true ; } return false ; } +","private boolean stoppingCriteriaSatisfied ( ) { if ( candidates . isEmpty ( ) ) { logger . info ( ""Stopping algorithm: No candidates left."" ) ; return true ; } boolean timeout = isTimeExpired ( ) ; if ( timeout ) { logger . info ( ""Stopping algorithm: Max. execution time was reached."" ) ; return true ; } SearchTreeNode bestNode = candidates . last ( ) ; boolean perfectDefinitionFound = bestNode . getAccuracy ( ) == 1.0 ; if ( stopOnFirstDefinition && perfectDefinitionFound ) { logger . info ( ""Stopping algorithm: Perfect definition found."" ) ; return true ; } return false ; } +" +402,"private boolean stoppingCriteriaSatisfied ( ) { if ( candidates . isEmpty ( ) ) { logger . info ( ""Stopping algorithm: No candidates left."" ) ; return true ; } boolean timeout = isTimeExpired ( ) ; if ( timeout ) { logger . info ( ""Stopping algorithm: Max. execution time was reached."" ) ; return true ; } SearchTreeNode bestNode = candidates . last ( ) ; boolean perfectDefinitionFound = bestNode . getAccuracy ( ) == 1.0 ; if ( stopOnFirstDefinition && perfectDefinitionFound ) { return true ; } return false ; } +","private boolean stoppingCriteriaSatisfied ( ) { if ( candidates . isEmpty ( ) ) { logger . info ( ""Stopping algorithm: No candidates left."" ) ; return true ; } boolean timeout = isTimeExpired ( ) ; if ( timeout ) { logger . info ( ""Stopping algorithm: Max. execution time was reached."" ) ; return true ; } SearchTreeNode bestNode = candidates . last ( ) ; boolean perfectDefinitionFound = bestNode . getAccuracy ( ) == 1.0 ; if ( stopOnFirstDefinition && perfectDefinitionFound ) { logger . info ( ""Stopping algorithm: Perfect definition found."" ) ; return true ; } return false ; } +" +403,"public boolean transitionToFailed ( Throwable throwable ) { cleanupQueryQuietly ( ) ; queryStateTimer . endQuery ( ) ; requireNonNull ( throwable , ""throwable is null"" ) ; failureCause . compareAndSet ( null , toFailure ( throwable ) ) ; QueryState oldState = queryState . trySet ( FAILED ) ; if ( oldState . isDone ( ) ) { return false ; } try { QUERY_STATE_LOG . debug ( throwable , ""Query %s failed"" , queryId ) ; session . getTransactionId ( ) . ifPresent ( transactionId -> { try { if ( transactionManager . transactionExists ( transactionId ) && transactionManager . isAutoCommit ( transactionId ) ) { transactionManager . asyncAbort ( transactionId ) ; return ; } } catch ( RuntimeException e ) { QUERY_STATE_LOG . error ( e , ""Error aborting transaction for failed query. Transaction will be failed directly"" ) ; } transactionManager . fail ( transactionId ) ; } ) ; } finally { if ( oldState . ordinal ( ) <= PLANNING . ordinal ( ) ) { finalQueryInfo . compareAndSet ( Optional . empty ( ) , Optional . of ( getQueryInfo ( Optional . empty ( ) ) ) ) ; } } return true ; } +","public boolean transitionToFailed ( Throwable throwable ) { cleanupQueryQuietly ( ) ; queryStateTimer . endQuery ( ) ; requireNonNull ( throwable , ""throwable is null"" ) ; failureCause . compareAndSet ( null , toFailure ( throwable ) ) ; QueryState oldState = queryState . trySet ( FAILED ) ; if ( oldState . isDone ( ) ) { QUERY_STATE_LOG . debug ( throwable , ""Failure after query %s finished"" , queryId ) ; return false ; } try { QUERY_STATE_LOG . debug ( throwable , ""Query %s failed"" , queryId ) ; session . getTransactionId ( ) . ifPresent ( transactionId -> { try { if ( transactionManager . transactionExists ( transactionId ) && transactionManager . isAutoCommit ( transactionId ) ) { transactionManager . asyncAbort ( transactionId ) ; return ; } } catch ( RuntimeException e ) { QUERY_STATE_LOG . error ( e , ""Error aborting transaction for failed query. Transaction will be failed directly"" ) ; } transactionManager . fail ( transactionId ) ; } ) ; } finally { if ( oldState . ordinal ( ) <= PLANNING . ordinal ( ) ) { finalQueryInfo . compareAndSet ( Optional . empty ( ) , Optional . of ( getQueryInfo ( Optional . empty ( ) ) ) ) ; } } return true ; } +" +404,"public boolean transitionToFailed ( Throwable throwable ) { cleanupQueryQuietly ( ) ; queryStateTimer . endQuery ( ) ; requireNonNull ( throwable , ""throwable is null"" ) ; failureCause . compareAndSet ( null , toFailure ( throwable ) ) ; QueryState oldState = queryState . trySet ( FAILED ) ; if ( oldState . isDone ( ) ) { QUERY_STATE_LOG . debug ( throwable , ""Failure after query %s finished"" , queryId ) ; return false ; } try { session . getTransactionId ( ) . ifPresent ( transactionId -> { try { if ( transactionManager . transactionExists ( transactionId ) && transactionManager . isAutoCommit ( transactionId ) ) { transactionManager . asyncAbort ( transactionId ) ; return ; } } catch ( RuntimeException e ) { QUERY_STATE_LOG . error ( e , ""Error aborting transaction for failed query. Transaction will be failed directly"" ) ; } transactionManager . fail ( transactionId ) ; } ) ; } finally { if ( oldState . ordinal ( ) <= PLANNING . ordinal ( ) ) { finalQueryInfo . compareAndSet ( Optional . empty ( ) , Optional . of ( getQueryInfo ( Optional . empty ( ) ) ) ) ; } } return true ; } +","public boolean transitionToFailed ( Throwable throwable ) { cleanupQueryQuietly ( ) ; queryStateTimer . endQuery ( ) ; requireNonNull ( throwable , ""throwable is null"" ) ; failureCause . compareAndSet ( null , toFailure ( throwable ) ) ; QueryState oldState = queryState . trySet ( FAILED ) ; if ( oldState . isDone ( ) ) { QUERY_STATE_LOG . debug ( throwable , ""Failure after query %s finished"" , queryId ) ; return false ; } try { QUERY_STATE_LOG . debug ( throwable , ""Query %s failed"" , queryId ) ; session . getTransactionId ( ) . ifPresent ( transactionId -> { try { if ( transactionManager . transactionExists ( transactionId ) && transactionManager . isAutoCommit ( transactionId ) ) { transactionManager . asyncAbort ( transactionId ) ; return ; } } catch ( RuntimeException e ) { QUERY_STATE_LOG . error ( e , ""Error aborting transaction for failed query. Transaction will be failed directly"" ) ; } transactionManager . fail ( transactionId ) ; } ) ; } finally { if ( oldState . ordinal ( ) <= PLANNING . ordinal ( ) ) { finalQueryInfo . compareAndSet ( Optional . empty ( ) , Optional . of ( getQueryInfo ( Optional . empty ( ) ) ) ) ; } } return true ; } +" +405,"public boolean transitionToFailed ( Throwable throwable ) { cleanupQueryQuietly ( ) ; queryStateTimer . endQuery ( ) ; requireNonNull ( throwable , ""throwable is null"" ) ; failureCause . compareAndSet ( null , toFailure ( throwable ) ) ; QueryState oldState = queryState . trySet ( FAILED ) ; if ( oldState . isDone ( ) ) { QUERY_STATE_LOG . debug ( throwable , ""Failure after query %s finished"" , queryId ) ; return false ; } try { QUERY_STATE_LOG . debug ( throwable , ""Query %s failed"" , queryId ) ; } finally { if ( oldState . ordinal ( ) <= PLANNING . ordinal ( ) ) { finalQueryInfo . compareAndSet ( Optional . empty ( ) , Optional . of ( getQueryInfo ( Optional . empty ( ) ) ) ) ; } } return true ; } +","public boolean transitionToFailed ( Throwable throwable ) { cleanupQueryQuietly ( ) ; queryStateTimer . endQuery ( ) ; requireNonNull ( throwable , ""throwable is null"" ) ; failureCause . compareAndSet ( null , toFailure ( throwable ) ) ; QueryState oldState = queryState . trySet ( FAILED ) ; if ( oldState . isDone ( ) ) { QUERY_STATE_LOG . debug ( throwable , ""Failure after query %s finished"" , queryId ) ; return false ; } try { QUERY_STATE_LOG . debug ( throwable , ""Query %s failed"" , queryId ) ; session . getTransactionId ( ) . ifPresent ( transactionId -> { try { if ( transactionManager . transactionExists ( transactionId ) && transactionManager . isAutoCommit ( transactionId ) ) { transactionManager . asyncAbort ( transactionId ) ; return ; } } catch ( RuntimeException e ) { QUERY_STATE_LOG . error ( e , ""Error aborting transaction for failed query. Transaction will be failed directly"" ) ; } transactionManager . fail ( transactionId ) ; } ) ; } finally { if ( oldState . ordinal ( ) <= PLANNING . ordinal ( ) ) { finalQueryInfo . compareAndSet ( Optional . empty ( ) , Optional . of ( getQueryInfo ( Optional . empty ( ) ) ) ) ; } } return true ; } +" +406,"public boolean transitionToFailed ( Throwable throwable ) { cleanupQueryQuietly ( ) ; queryStateTimer . endQuery ( ) ; requireNonNull ( throwable , ""throwable is null"" ) ; failureCause . compareAndSet ( null , toFailure ( throwable ) ) ; QueryState oldState = queryState . trySet ( FAILED ) ; if ( oldState . isDone ( ) ) { QUERY_STATE_LOG . debug ( throwable , ""Failure after query %s finished"" , queryId ) ; return false ; } try { QUERY_STATE_LOG . debug ( throwable , ""Query %s failed"" , queryId ) ; session . getTransactionId ( ) . ifPresent ( transactionId -> { try { if ( transactionManager . transactionExists ( transactionId ) && transactionManager . isAutoCommit ( transactionId ) ) { transactionManager . asyncAbort ( transactionId ) ; return ; } } catch ( RuntimeException e ) { } transactionManager . fail ( transactionId ) ; } ) ; } finally { if ( oldState . ordinal ( ) <= PLANNING . ordinal ( ) ) { finalQueryInfo . compareAndSet ( Optional . empty ( ) , Optional . of ( getQueryInfo ( Optional . empty ( ) ) ) ) ; } } return true ; } +","public boolean transitionToFailed ( Throwable throwable ) { cleanupQueryQuietly ( ) ; queryStateTimer . endQuery ( ) ; requireNonNull ( throwable , ""throwable is null"" ) ; failureCause . compareAndSet ( null , toFailure ( throwable ) ) ; QueryState oldState = queryState . trySet ( FAILED ) ; if ( oldState . isDone ( ) ) { QUERY_STATE_LOG . debug ( throwable , ""Failure after query %s finished"" , queryId ) ; return false ; } try { QUERY_STATE_LOG . debug ( throwable , ""Query %s failed"" , queryId ) ; session . getTransactionId ( ) . ifPresent ( transactionId -> { try { if ( transactionManager . transactionExists ( transactionId ) && transactionManager . isAutoCommit ( transactionId ) ) { transactionManager . asyncAbort ( transactionId ) ; return ; } } catch ( RuntimeException e ) { QUERY_STATE_LOG . error ( e , ""Error aborting transaction for failed query. Transaction will be failed directly"" ) ; } transactionManager . fail ( transactionId ) ; } ) ; } finally { if ( oldState . ordinal ( ) <= PLANNING . ordinal ( ) ) { finalQueryInfo . compareAndSet ( Optional . empty ( ) , Optional . of ( getQueryInfo ( Optional . empty ( ) ) ) ) ; } } return true ; } +" +407,"public void logInfo ( String string ) { } +","public void logInfo ( String string ) { logger . info ( string ) ; } +" +408,"@ Test ( groups = ""Integration"" ) public void test_localhost ( ) throws Exception { PostgreSqlNode pgsql = app . createAndManageChild ( EntitySpec . create ( PostgreSqlNode . class ) . configure ( DatastoreCommon . CREATION_SCRIPT_CONTENTS , CREATION_SCRIPT ) . configure ( PostgreSqlNode . MAX_CONNECTIONS , 10 ) . configure ( PostgreSqlNode . SHARED_MEMORY , ""512kB"" ) ) ; app . start ( ImmutableList . of ( loc ) ) ; String url = pgsql . getAttribute ( DatastoreCommon . DATASTORE_URL ) ; new VogellaExampleAccess ( ""org.postgresql.Driver"" , url ) . readModifyAndRevertDataBase ( ) ; log . info ( ""Ran vogella PostgreSql example -- SUCCESS"" ) ; } +","@ Test ( groups = ""Integration"" ) public void test_localhost ( ) throws Exception { PostgreSqlNode pgsql = app . createAndManageChild ( EntitySpec . create ( PostgreSqlNode . class ) . configure ( DatastoreCommon . CREATION_SCRIPT_CONTENTS , CREATION_SCRIPT ) . configure ( PostgreSqlNode . MAX_CONNECTIONS , 10 ) . configure ( PostgreSqlNode . SHARED_MEMORY , ""512kB"" ) ) ; app . start ( ImmutableList . of ( loc ) ) ; String url = pgsql . getAttribute ( DatastoreCommon . DATASTORE_URL ) ; log . info ( ""PostgreSql started on "" + url ) ; new VogellaExampleAccess ( ""org.postgresql.Driver"" , url ) . readModifyAndRevertDataBase ( ) ; log . info ( ""Ran vogella PostgreSql example -- SUCCESS"" ) ; } +" +409,"@ Test ( groups = ""Integration"" ) public void test_localhost ( ) throws Exception { PostgreSqlNode pgsql = app . createAndManageChild ( EntitySpec . create ( PostgreSqlNode . class ) . configure ( DatastoreCommon . CREATION_SCRIPT_CONTENTS , CREATION_SCRIPT ) . configure ( PostgreSqlNode . MAX_CONNECTIONS , 10 ) . configure ( PostgreSqlNode . SHARED_MEMORY , ""512kB"" ) ) ; app . start ( ImmutableList . of ( loc ) ) ; String url = pgsql . getAttribute ( DatastoreCommon . DATASTORE_URL ) ; log . info ( ""PostgreSql started on "" + url ) ; new VogellaExampleAccess ( ""org.postgresql.Driver"" , url ) . readModifyAndRevertDataBase ( ) ; } +","@ Test ( groups = ""Integration"" ) public void test_localhost ( ) throws Exception { PostgreSqlNode pgsql = app . createAndManageChild ( EntitySpec . create ( PostgreSqlNode . class ) . configure ( DatastoreCommon . CREATION_SCRIPT_CONTENTS , CREATION_SCRIPT ) . configure ( PostgreSqlNode . MAX_CONNECTIONS , 10 ) . configure ( PostgreSqlNode . SHARED_MEMORY , ""512kB"" ) ) ; app . start ( ImmutableList . of ( loc ) ) ; String url = pgsql . getAttribute ( DatastoreCommon . DATASTORE_URL ) ; log . info ( ""PostgreSql started on "" + url ) ; new VogellaExampleAccess ( ""org.postgresql.Driver"" , url ) . readModifyAndRevertDataBase ( ) ; log . info ( ""Ran vogella PostgreSql example -- SUCCESS"" ) ; } +" +410,"private void setupWsClient ( ) throws ExecutionException , IOException , InterruptedException , TimeoutException { if ( wsClient != null ) { } } +","private void setupWsClient ( ) throws ExecutionException , IOException , InterruptedException , TimeoutException { if ( wsClient != null ) { wsClient . addHandler ( recordingNameTopic , true , new Handler ( ) { @ Override public void onMessage ( String type , String topic , Object data ) { numSubscribers = Integer . valueOf ( ( String ) data ) ; logger . info ( ""Number of subscribers for recording started at {} is now {}"" , getStartTime ( ) , numSubscribers ) ; } @ Override public void onClose ( ) { numSubscribers = 0 ; } } ) ; } } +" +411,"private void setupWsClient ( ) throws ExecutionException , IOException , InterruptedException , TimeoutException { if ( wsClient != null ) { wsClient . addHandler ( recordingNameTopic , true , new Handler ( ) { @ Override public void onMessage ( String type , String topic , Object data ) { numSubscribers = Integer . valueOf ( ( String ) data ) ; } @ Override public void onClose ( ) { numSubscribers = 0 ; } } ) ; } } +","private void setupWsClient ( ) throws ExecutionException , IOException , InterruptedException , TimeoutException { if ( wsClient != null ) { wsClient . addHandler ( recordingNameTopic , true , new Handler ( ) { @ Override public void onMessage ( String type , String topic , Object data ) { numSubscribers = Integer . valueOf ( ( String ) data ) ; logger . info ( ""Number of subscribers for recording started at {} is now {}"" , getStartTime ( ) , numSubscribers ) ; } @ Override public void onClose ( ) { numSubscribers = 0 ; } } ) ; } } +" +412,"public String save ( ) throws Exception { try { this . scope . setDisplayName ( this . scope . getDisplayName ( ) . trim ( ) ) ; this . scope . setId ( this . scope . getDisplayName ( ) ) ; updateDynamicScripts ( ) ; updateClaims ( ) ; saveAttributesJson ( ) ; if ( update ) { try { scopeService . updateScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** UPDATED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( BasePersistenceException ex ) { facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to update scope '#{updateScopeAction.scope.displayName}'"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""Scope '#{updateScopeAction.scope.displayName}' updated successfully"" ) ; } else { this . inum = scopeService . generateInumForNewScope ( ) ; String dn = scopeService . getDnForScope ( this . inum ) ; this . scope . setDn ( dn ) ; this . scope . setInum ( this . inum ) ; try { scopeService . addScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** ADDED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( Exception ex ) { log . error ( ""Failed to add new scope {}"" , this . scope . getInum ( ) , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to add new scope"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""New scope '#{updateScopeAction.scope.displayName}' added successfully"" ) ; conversationService . endConversation ( ) ; this . update = true ; } log . debug ( "" returning success updating or saving scope"" ) ; return OxTrustConstants . RESULT_SUCCESS ; } catch ( Exception e ) { log . info ( """" , e ) ; return OxTrustConstants . RESULT_FAILURE ; } } +","public String save ( ) throws Exception { try { this . scope . setDisplayName ( this . scope . getDisplayName ( ) . trim ( ) ) ; this . scope . setId ( this . scope . getDisplayName ( ) ) ; updateDynamicScripts ( ) ; updateClaims ( ) ; saveAttributesJson ( ) ; if ( update ) { try { scopeService . updateScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** UPDATED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( BasePersistenceException ex ) { log . error ( ""Failed to update scope {}"" , this . inum , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to update scope '#{updateScopeAction.scope.displayName}'"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""Scope '#{updateScopeAction.scope.displayName}' updated successfully"" ) ; } else { this . inum = scopeService . generateInumForNewScope ( ) ; String dn = scopeService . getDnForScope ( this . inum ) ; this . scope . setDn ( dn ) ; this . scope . setInum ( this . inum ) ; try { scopeService . addScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** ADDED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( Exception ex ) { log . error ( ""Failed to add new scope {}"" , this . scope . getInum ( ) , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to add new scope"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""New scope '#{updateScopeAction.scope.displayName}' added successfully"" ) ; conversationService . endConversation ( ) ; this . update = true ; } log . debug ( "" returning success updating or saving scope"" ) ; return OxTrustConstants . RESULT_SUCCESS ; } catch ( Exception e ) { log . info ( """" , e ) ; return OxTrustConstants . RESULT_FAILURE ; } } +" +413,"public String save ( ) throws Exception { try { this . scope . setDisplayName ( this . scope . getDisplayName ( ) . trim ( ) ) ; this . scope . setId ( this . scope . getDisplayName ( ) ) ; updateDynamicScripts ( ) ; updateClaims ( ) ; saveAttributesJson ( ) ; if ( update ) { try { scopeService . updateScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** UPDATED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( BasePersistenceException ex ) { log . error ( ""Failed to update scope {}"" , this . inum , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to update scope '#{updateScopeAction.scope.displayName}'"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""Scope '#{updateScopeAction.scope.displayName}' updated successfully"" ) ; } else { this . inum = scopeService . generateInumForNewScope ( ) ; String dn = scopeService . getDnForScope ( this . inum ) ; this . scope . setDn ( dn ) ; this . scope . setInum ( this . inum ) ; try { scopeService . addScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** ADDED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( Exception ex ) { facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to add new scope"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""New scope '#{updateScopeAction.scope.displayName}' added successfully"" ) ; conversationService . endConversation ( ) ; this . update = true ; } log . debug ( "" returning success updating or saving scope"" ) ; return OxTrustConstants . RESULT_SUCCESS ; } catch ( Exception e ) { log . info ( """" , e ) ; return OxTrustConstants . RESULT_FAILURE ; } } +","public String save ( ) throws Exception { try { this . scope . setDisplayName ( this . scope . getDisplayName ( ) . trim ( ) ) ; this . scope . setId ( this . scope . getDisplayName ( ) ) ; updateDynamicScripts ( ) ; updateClaims ( ) ; saveAttributesJson ( ) ; if ( update ) { try { scopeService . updateScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** UPDATED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( BasePersistenceException ex ) { log . error ( ""Failed to update scope {}"" , this . inum , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to update scope '#{updateScopeAction.scope.displayName}'"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""Scope '#{updateScopeAction.scope.displayName}' updated successfully"" ) ; } else { this . inum = scopeService . generateInumForNewScope ( ) ; String dn = scopeService . getDnForScope ( this . inum ) ; this . scope . setDn ( dn ) ; this . scope . setInum ( this . inum ) ; try { scopeService . addScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** ADDED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( Exception ex ) { log . error ( ""Failed to add new scope {}"" , this . scope . getInum ( ) , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to add new scope"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""New scope '#{updateScopeAction.scope.displayName}' added successfully"" ) ; conversationService . endConversation ( ) ; this . update = true ; } log . debug ( "" returning success updating or saving scope"" ) ; return OxTrustConstants . RESULT_SUCCESS ; } catch ( Exception e ) { log . info ( """" , e ) ; return OxTrustConstants . RESULT_FAILURE ; } } +" +414,"public String save ( ) throws Exception { try { this . scope . setDisplayName ( this . scope . getDisplayName ( ) . trim ( ) ) ; this . scope . setId ( this . scope . getDisplayName ( ) ) ; updateDynamicScripts ( ) ; updateClaims ( ) ; saveAttributesJson ( ) ; if ( update ) { try { scopeService . updateScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** UPDATED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( BasePersistenceException ex ) { log . error ( ""Failed to update scope {}"" , this . inum , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to update scope '#{updateScopeAction.scope.displayName}'"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""Scope '#{updateScopeAction.scope.displayName}' updated successfully"" ) ; } else { this . inum = scopeService . generateInumForNewScope ( ) ; String dn = scopeService . getDnForScope ( this . inum ) ; this . scope . setDn ( dn ) ; this . scope . setInum ( this . inum ) ; try { scopeService . addScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** ADDED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( Exception ex ) { log . error ( ""Failed to add new scope {}"" , this . scope . getInum ( ) , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to add new scope"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""New scope '#{updateScopeAction.scope.displayName}' added successfully"" ) ; conversationService . endConversation ( ) ; this . update = true ; } return OxTrustConstants . RESULT_SUCCESS ; } catch ( Exception e ) { log . info ( """" , e ) ; return OxTrustConstants . RESULT_FAILURE ; } } +","public String save ( ) throws Exception { try { this . scope . setDisplayName ( this . scope . getDisplayName ( ) . trim ( ) ) ; this . scope . setId ( this . scope . getDisplayName ( ) ) ; updateDynamicScripts ( ) ; updateClaims ( ) ; saveAttributesJson ( ) ; if ( update ) { try { scopeService . updateScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** UPDATED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( BasePersistenceException ex ) { log . error ( ""Failed to update scope {}"" , this . inum , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to update scope '#{updateScopeAction.scope.displayName}'"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""Scope '#{updateScopeAction.scope.displayName}' updated successfully"" ) ; } else { this . inum = scopeService . generateInumForNewScope ( ) ; String dn = scopeService . getDnForScope ( this . inum ) ; this . scope . setDn ( dn ) ; this . scope . setInum ( this . inum ) ; try { scopeService . addScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** ADDED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( Exception ex ) { log . error ( ""Failed to add new scope {}"" , this . scope . getInum ( ) , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to add new scope"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""New scope '#{updateScopeAction.scope.displayName}' added successfully"" ) ; conversationService . endConversation ( ) ; this . update = true ; } log . debug ( "" returning success updating or saving scope"" ) ; return OxTrustConstants . RESULT_SUCCESS ; } catch ( Exception e ) { log . info ( """" , e ) ; return OxTrustConstants . RESULT_FAILURE ; } } +" +415,"public String save ( ) throws Exception { try { this . scope . setDisplayName ( this . scope . getDisplayName ( ) . trim ( ) ) ; this . scope . setId ( this . scope . getDisplayName ( ) ) ; updateDynamicScripts ( ) ; updateClaims ( ) ; saveAttributesJson ( ) ; if ( update ) { try { scopeService . updateScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** UPDATED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( BasePersistenceException ex ) { log . error ( ""Failed to update scope {}"" , this . inum , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to update scope '#{updateScopeAction.scope.displayName}'"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""Scope '#{updateScopeAction.scope.displayName}' updated successfully"" ) ; } else { this . inum = scopeService . generateInumForNewScope ( ) ; String dn = scopeService . getDnForScope ( this . inum ) ; this . scope . setDn ( dn ) ; this . scope . setInum ( this . inum ) ; try { scopeService . addScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** ADDED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( Exception ex ) { log . error ( ""Failed to add new scope {}"" , this . scope . getInum ( ) , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to add new scope"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""New scope '#{updateScopeAction.scope.displayName}' added successfully"" ) ; conversationService . endConversation ( ) ; this . update = true ; } log . debug ( "" returning success updating or saving scope"" ) ; return OxTrustConstants . RESULT_SUCCESS ; } catch ( Exception e ) { return OxTrustConstants . RESULT_FAILURE ; } } +","public String save ( ) throws Exception { try { this . scope . setDisplayName ( this . scope . getDisplayName ( ) . trim ( ) ) ; this . scope . setId ( this . scope . getDisplayName ( ) ) ; updateDynamicScripts ( ) ; updateClaims ( ) ; saveAttributesJson ( ) ; if ( update ) { try { scopeService . updateScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** UPDATED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( BasePersistenceException ex ) { log . error ( ""Failed to update scope {}"" , this . inum , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to update scope '#{updateScopeAction.scope.displayName}'"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""Scope '#{updateScopeAction.scope.displayName}' updated successfully"" ) ; } else { this . inum = scopeService . generateInumForNewScope ( ) ; String dn = scopeService . getDnForScope ( this . inum ) ; this . scope . setDn ( dn ) ; this . scope . setInum ( this . inum ) ; try { scopeService . addScope ( this . scope ) ; oxTrustAuditService . audit ( ""OPENID SCOPE "" + this . scope . getInum ( ) + "" **"" + this . scope . getDisplayName ( ) + ""** ADDED"" , identity . getUser ( ) , ( HttpServletRequest ) FacesContext . getCurrentInstance ( ) . getExternalContext ( ) . getRequest ( ) ) ; } catch ( Exception ex ) { log . error ( ""Failed to add new scope {}"" , this . scope . getInum ( ) , ex ) ; facesMessages . add ( FacesMessage . SEVERITY_ERROR , ""Failed to add new scope"" ) ; return OxTrustConstants . RESULT_FAILURE ; } facesMessages . add ( FacesMessage . SEVERITY_INFO , ""New scope '#{updateScopeAction.scope.displayName}' added successfully"" ) ; conversationService . endConversation ( ) ; this . update = true ; } log . debug ( "" returning success updating or saving scope"" ) ; return OxTrustConstants . RESULT_SUCCESS ; } catch ( Exception e ) { log . info ( """" , e ) ; return OxTrustConstants . RESULT_FAILURE ; } } +" +416,"protected void doDeletePersistentVolumeClaim ( Exchange exchange , String operation ) throws Exception { String pvcName = exchange . getIn ( ) . getHeader ( KubernetesConstants . KUBERNETES_PERSISTENT_VOLUME_CLAIM_NAME , String . class ) ; String namespaceName = exchange . getIn ( ) . getHeader ( KubernetesConstants . KUBERNETES_NAMESPACE_NAME , String . class ) ; if ( ObjectHelper . isEmpty ( pvcName ) ) { throw new IllegalArgumentException ( ""Delete a specific Persistent Volume Claim require specify a Persistent Volume Claim name"" ) ; } if ( ObjectHelper . isEmpty ( namespaceName ) ) { LOG . error ( ""Delete a specific Persistent Volume Claim require specify a namespace name"" ) ; throw new IllegalArgumentException ( ""Delete a specific Persistent Volume Claim require specify a namespace name"" ) ; } boolean pvcDeleted = getEndpoint ( ) . getKubernetesClient ( ) . persistentVolumeClaims ( ) . inNamespace ( namespaceName ) . withName ( pvcName ) . delete ( ) ; MessageHelper . copyHeaders ( exchange . getIn ( ) , exchange . getOut ( ) , true ) ; exchange . getOut ( ) . setBody ( pvcDeleted ) ; } +","protected void doDeletePersistentVolumeClaim ( Exchange exchange , String operation ) throws Exception { String pvcName = exchange . getIn ( ) . getHeader ( KubernetesConstants . KUBERNETES_PERSISTENT_VOLUME_CLAIM_NAME , String . class ) ; String namespaceName = exchange . getIn ( ) . getHeader ( KubernetesConstants . KUBERNETES_NAMESPACE_NAME , String . class ) ; if ( ObjectHelper . isEmpty ( pvcName ) ) { LOG . error ( ""Delete a specific Persistent Volume Claim require specify a Persistent Volume Claim name"" ) ; throw new IllegalArgumentException ( ""Delete a specific Persistent Volume Claim require specify a Persistent Volume Claim name"" ) ; } if ( ObjectHelper . isEmpty ( namespaceName ) ) { LOG . error ( ""Delete a specific Persistent Volume Claim require specify a namespace name"" ) ; throw new IllegalArgumentException ( ""Delete a specific Persistent Volume Claim require specify a namespace name"" ) ; } boolean pvcDeleted = getEndpoint ( ) . getKubernetesClient ( ) . persistentVolumeClaims ( ) . inNamespace ( namespaceName ) . withName ( pvcName ) . delete ( ) ; MessageHelper . copyHeaders ( exchange . getIn ( ) , exchange . getOut ( ) , true ) ; exchange . getOut ( ) . setBody ( pvcDeleted ) ; } +" +417,"protected void doDeletePersistentVolumeClaim ( Exchange exchange , String operation ) throws Exception { String pvcName = exchange . getIn ( ) . getHeader ( KubernetesConstants . KUBERNETES_PERSISTENT_VOLUME_CLAIM_NAME , String . class ) ; String namespaceName = exchange . getIn ( ) . getHeader ( KubernetesConstants . KUBERNETES_NAMESPACE_NAME , String . class ) ; if ( ObjectHelper . isEmpty ( pvcName ) ) { LOG . error ( ""Delete a specific Persistent Volume Claim require specify a Persistent Volume Claim name"" ) ; throw new IllegalArgumentException ( ""Delete a specific Persistent Volume Claim require specify a Persistent Volume Claim name"" ) ; } if ( ObjectHelper . isEmpty ( namespaceName ) ) { throw new IllegalArgumentException ( ""Delete a specific Persistent Volume Claim require specify a namespace name"" ) ; } boolean pvcDeleted = getEndpoint ( ) . getKubernetesClient ( ) . persistentVolumeClaims ( ) . inNamespace ( namespaceName ) . withName ( pvcName ) . delete ( ) ; MessageHelper . copyHeaders ( exchange . getIn ( ) , exchange . getOut ( ) , true ) ; exchange . getOut ( ) . setBody ( pvcDeleted ) ; } +","protected void doDeletePersistentVolumeClaim ( Exchange exchange , String operation ) throws Exception { String pvcName = exchange . getIn ( ) . getHeader ( KubernetesConstants . KUBERNETES_PERSISTENT_VOLUME_CLAIM_NAME , String . class ) ; String namespaceName = exchange . getIn ( ) . getHeader ( KubernetesConstants . KUBERNETES_NAMESPACE_NAME , String . class ) ; if ( ObjectHelper . isEmpty ( pvcName ) ) { LOG . error ( ""Delete a specific Persistent Volume Claim require specify a Persistent Volume Claim name"" ) ; throw new IllegalArgumentException ( ""Delete a specific Persistent Volume Claim require specify a Persistent Volume Claim name"" ) ; } if ( ObjectHelper . isEmpty ( namespaceName ) ) { LOG . error ( ""Delete a specific Persistent Volume Claim require specify a namespace name"" ) ; throw new IllegalArgumentException ( ""Delete a specific Persistent Volume Claim require specify a namespace name"" ) ; } boolean pvcDeleted = getEndpoint ( ) . getKubernetesClient ( ) . persistentVolumeClaims ( ) . inNamespace ( namespaceName ) . withName ( pvcName ) . delete ( ) ; MessageHelper . copyHeaders ( exchange . getIn ( ) , exchange . getOut ( ) , true ) ; exchange . getOut ( ) . setBody ( pvcDeleted ) ; } +" +418,"public void apply ( Context resources ) { RunningState runningState = resources . getRepository ( ) ; BinaryStore binaryStore = runningState . binaryStore ( ) ; try { if ( binaryStore instanceof FileSystemBinaryStore ) { ( ( FileSystemBinaryStore ) binaryStore ) . upgradeTrashContentFormat ( ) ; } } catch ( BinaryStoreException e ) { LOGGER . error ( e , JcrI18n . upgrade4_0_0_Beta3_Failed , e . getMessage ( ) ) ; } } +","public void apply ( Context resources ) { LOGGER . info ( JcrI18n . upgrade4_0_0_Beta3_Running ) ; RunningState runningState = resources . getRepository ( ) ; BinaryStore binaryStore = runningState . binaryStore ( ) ; try { if ( binaryStore instanceof FileSystemBinaryStore ) { ( ( FileSystemBinaryStore ) binaryStore ) . upgradeTrashContentFormat ( ) ; } } catch ( BinaryStoreException e ) { LOGGER . error ( e , JcrI18n . upgrade4_0_0_Beta3_Failed , e . getMessage ( ) ) ; } } +" +419,"public void apply ( Context resources ) { LOGGER . info ( JcrI18n . upgrade4_0_0_Beta3_Running ) ; RunningState runningState = resources . getRepository ( ) ; BinaryStore binaryStore = runningState . binaryStore ( ) ; try { if ( binaryStore instanceof FileSystemBinaryStore ) { ( ( FileSystemBinaryStore ) binaryStore ) . upgradeTrashContentFormat ( ) ; } } catch ( BinaryStoreException e ) { } } +","public void apply ( Context resources ) { LOGGER . info ( JcrI18n . upgrade4_0_0_Beta3_Running ) ; RunningState runningState = resources . getRepository ( ) ; BinaryStore binaryStore = runningState . binaryStore ( ) ; try { if ( binaryStore instanceof FileSystemBinaryStore ) { ( ( FileSystemBinaryStore ) binaryStore ) . upgradeTrashContentFormat ( ) ; } } catch ( BinaryStoreException e ) { LOGGER . error ( e , JcrI18n . upgrade4_0_0_Beta3_Failed , e . getMessage ( ) ) ; } } +" +420,"protected void processTXOriginatorRecoveryMessage ( final ClusterDistributionManager dm , final TXOriginatorRecoveryMessage msg ) { ReplyException replyException = null ; try { TXCommitMessage . getTracker ( ) . waitToProcess ( msg . txLockId , dm ) ; } catch ( RuntimeException t ) { logger . warn ( ""[processTXOriginatorRecoveryMessage] throwable:"" , t ) ; { replyException = new ReplyException ( t ) ; } } finally { TXOriginatorRecoveryReplyMessage replyMsg = new TXOriginatorRecoveryReplyMessage ( ) ; replyMsg . txLockId = txLockId ; replyMsg . setProcessorId ( getProcessorId ( ) ) ; replyMsg . setRecipient ( getSender ( ) ) ; replyMsg . setException ( replyException ) ; if ( getSender ( ) . equals ( dm . getId ( ) ) ) { logger . info ( ""[processTXOriginatorRecoveryMessage] locally process reply"" ) ; replyMsg . setSender ( dm . getId ( ) ) ; replyMsg . dmProcess ( dm ) ; } else { logger . info ( ""[processTXOriginatorRecoveryMessage] send reply"" ) ; dm . putOutgoing ( replyMsg ) ; } } } +","protected void processTXOriginatorRecoveryMessage ( final ClusterDistributionManager dm , final TXOriginatorRecoveryMessage msg ) { ReplyException replyException = null ; logger . info ( ""[processTXOriginatorRecoveryMessage]"" ) ; try { TXCommitMessage . getTracker ( ) . waitToProcess ( msg . txLockId , dm ) ; } catch ( RuntimeException t ) { logger . warn ( ""[processTXOriginatorRecoveryMessage] throwable:"" , t ) ; { replyException = new ReplyException ( t ) ; } } finally { TXOriginatorRecoveryReplyMessage replyMsg = new TXOriginatorRecoveryReplyMessage ( ) ; replyMsg . txLockId = txLockId ; replyMsg . setProcessorId ( getProcessorId ( ) ) ; replyMsg . setRecipient ( getSender ( ) ) ; replyMsg . setException ( replyException ) ; if ( getSender ( ) . equals ( dm . getId ( ) ) ) { logger . info ( ""[processTXOriginatorRecoveryMessage] locally process reply"" ) ; replyMsg . setSender ( dm . getId ( ) ) ; replyMsg . dmProcess ( dm ) ; } else { logger . info ( ""[processTXOriginatorRecoveryMessage] send reply"" ) ; dm . putOutgoing ( replyMsg ) ; } } } +" +421,"protected void processTXOriginatorRecoveryMessage ( final ClusterDistributionManager dm , final TXOriginatorRecoveryMessage msg ) { ReplyException replyException = null ; logger . info ( ""[processTXOriginatorRecoveryMessage]"" ) ; try { TXCommitMessage . getTracker ( ) . waitToProcess ( msg . txLockId , dm ) ; } catch ( RuntimeException t ) { { replyException = new ReplyException ( t ) ; } } finally { TXOriginatorRecoveryReplyMessage replyMsg = new TXOriginatorRecoveryReplyMessage ( ) ; replyMsg . txLockId = txLockId ; replyMsg . setProcessorId ( getProcessorId ( ) ) ; replyMsg . setRecipient ( getSender ( ) ) ; replyMsg . setException ( replyException ) ; if ( getSender ( ) . equals ( dm . getId ( ) ) ) { logger . info ( ""[processTXOriginatorRecoveryMessage] locally process reply"" ) ; replyMsg . setSender ( dm . getId ( ) ) ; replyMsg . dmProcess ( dm ) ; } else { logger . info ( ""[processTXOriginatorRecoveryMessage] send reply"" ) ; dm . putOutgoing ( replyMsg ) ; } } } +","protected void processTXOriginatorRecoveryMessage ( final ClusterDistributionManager dm , final TXOriginatorRecoveryMessage msg ) { ReplyException replyException = null ; logger . info ( ""[processTXOriginatorRecoveryMessage]"" ) ; try { TXCommitMessage . getTracker ( ) . waitToProcess ( msg . txLockId , dm ) ; } catch ( RuntimeException t ) { logger . warn ( ""[processTXOriginatorRecoveryMessage] throwable:"" , t ) ; { replyException = new ReplyException ( t ) ; } } finally { TXOriginatorRecoveryReplyMessage replyMsg = new TXOriginatorRecoveryReplyMessage ( ) ; replyMsg . txLockId = txLockId ; replyMsg . setProcessorId ( getProcessorId ( ) ) ; replyMsg . setRecipient ( getSender ( ) ) ; replyMsg . setException ( replyException ) ; if ( getSender ( ) . equals ( dm . getId ( ) ) ) { logger . info ( ""[processTXOriginatorRecoveryMessage] locally process reply"" ) ; replyMsg . setSender ( dm . getId ( ) ) ; replyMsg . dmProcess ( dm ) ; } else { logger . info ( ""[processTXOriginatorRecoveryMessage] send reply"" ) ; dm . putOutgoing ( replyMsg ) ; } } } +" +422,"protected void processTXOriginatorRecoveryMessage ( final ClusterDistributionManager dm , final TXOriginatorRecoveryMessage msg ) { ReplyException replyException = null ; logger . info ( ""[processTXOriginatorRecoveryMessage]"" ) ; try { TXCommitMessage . getTracker ( ) . waitToProcess ( msg . txLockId , dm ) ; } catch ( RuntimeException t ) { logger . warn ( ""[processTXOriginatorRecoveryMessage] throwable:"" , t ) ; { replyException = new ReplyException ( t ) ; } } finally { TXOriginatorRecoveryReplyMessage replyMsg = new TXOriginatorRecoveryReplyMessage ( ) ; replyMsg . txLockId = txLockId ; replyMsg . setProcessorId ( getProcessorId ( ) ) ; replyMsg . setRecipient ( getSender ( ) ) ; replyMsg . setException ( replyException ) ; if ( getSender ( ) . equals ( dm . getId ( ) ) ) { replyMsg . setSender ( dm . getId ( ) ) ; replyMsg . dmProcess ( dm ) ; } else { logger . info ( ""[processTXOriginatorRecoveryMessage] send reply"" ) ; dm . putOutgoing ( replyMsg ) ; } } } +","protected void processTXOriginatorRecoveryMessage ( final ClusterDistributionManager dm , final TXOriginatorRecoveryMessage msg ) { ReplyException replyException = null ; logger . info ( ""[processTXOriginatorRecoveryMessage]"" ) ; try { TXCommitMessage . getTracker ( ) . waitToProcess ( msg . txLockId , dm ) ; } catch ( RuntimeException t ) { logger . warn ( ""[processTXOriginatorRecoveryMessage] throwable:"" , t ) ; { replyException = new ReplyException ( t ) ; } } finally { TXOriginatorRecoveryReplyMessage replyMsg = new TXOriginatorRecoveryReplyMessage ( ) ; replyMsg . txLockId = txLockId ; replyMsg . setProcessorId ( getProcessorId ( ) ) ; replyMsg . setRecipient ( getSender ( ) ) ; replyMsg . setException ( replyException ) ; if ( getSender ( ) . equals ( dm . getId ( ) ) ) { logger . info ( ""[processTXOriginatorRecoveryMessage] locally process reply"" ) ; replyMsg . setSender ( dm . getId ( ) ) ; replyMsg . dmProcess ( dm ) ; } else { logger . info ( ""[processTXOriginatorRecoveryMessage] send reply"" ) ; dm . putOutgoing ( replyMsg ) ; } } } +" +423,"protected void processTXOriginatorRecoveryMessage ( final ClusterDistributionManager dm , final TXOriginatorRecoveryMessage msg ) { ReplyException replyException = null ; logger . info ( ""[processTXOriginatorRecoveryMessage]"" ) ; try { TXCommitMessage . getTracker ( ) . waitToProcess ( msg . txLockId , dm ) ; } catch ( RuntimeException t ) { logger . warn ( ""[processTXOriginatorRecoveryMessage] throwable:"" , t ) ; { replyException = new ReplyException ( t ) ; } } finally { TXOriginatorRecoveryReplyMessage replyMsg = new TXOriginatorRecoveryReplyMessage ( ) ; replyMsg . txLockId = txLockId ; replyMsg . setProcessorId ( getProcessorId ( ) ) ; replyMsg . setRecipient ( getSender ( ) ) ; replyMsg . setException ( replyException ) ; if ( getSender ( ) . equals ( dm . getId ( ) ) ) { logger . info ( ""[processTXOriginatorRecoveryMessage] locally process reply"" ) ; replyMsg . setSender ( dm . getId ( ) ) ; replyMsg . dmProcess ( dm ) ; } else { dm . putOutgoing ( replyMsg ) ; } } } +","protected void processTXOriginatorRecoveryMessage ( final ClusterDistributionManager dm , final TXOriginatorRecoveryMessage msg ) { ReplyException replyException = null ; logger . info ( ""[processTXOriginatorRecoveryMessage]"" ) ; try { TXCommitMessage . getTracker ( ) . waitToProcess ( msg . txLockId , dm ) ; } catch ( RuntimeException t ) { logger . warn ( ""[processTXOriginatorRecoveryMessage] throwable:"" , t ) ; { replyException = new ReplyException ( t ) ; } } finally { TXOriginatorRecoveryReplyMessage replyMsg = new TXOriginatorRecoveryReplyMessage ( ) ; replyMsg . txLockId = txLockId ; replyMsg . setProcessorId ( getProcessorId ( ) ) ; replyMsg . setRecipient ( getSender ( ) ) ; replyMsg . setException ( replyException ) ; if ( getSender ( ) . equals ( dm . getId ( ) ) ) { logger . info ( ""[processTXOriginatorRecoveryMessage] locally process reply"" ) ; replyMsg . setSender ( dm . getId ( ) ) ; replyMsg . dmProcess ( dm ) ; } else { logger . info ( ""[processTXOriginatorRecoveryMessage] send reply"" ) ; dm . putOutgoing ( replyMsg ) ; } } } +" +424,"public static void delete ( File file ) throws HyracksDataException { try { if ( file . isDirectory ( ) ) { if ( ! file . exists ( ) ) { return ; } else if ( ! FileUtils . isSymlink ( file ) ) { cleanDirectory ( file ) ; } } Files . delete ( file . toPath ( ) ) ; } catch ( NoSuchFileException | FileNotFoundException e ) { } catch ( IOException e ) { throw HyracksDataException . create ( ErrorCode . CANNOT_DELETE_FILE , e , file . getAbsolutePath ( ) ) ; } } +","public static void delete ( File file ) throws HyracksDataException { try { if ( file . isDirectory ( ) ) { if ( ! file . exists ( ) ) { return ; } else if ( ! FileUtils . isSymlink ( file ) ) { cleanDirectory ( file ) ; } } Files . delete ( file . toPath ( ) ) ; } catch ( NoSuchFileException | FileNotFoundException e ) { LOGGER . warn ( ( ) -> FILE_NOT_FOUND_MSG + "": "" + e . getMessage ( ) , e ) ; } catch ( IOException e ) { throw HyracksDataException . create ( ErrorCode . CANNOT_DELETE_FILE , e , file . getAbsolutePath ( ) ) ; } } +" +425,"private void _initLayoutTemplates ( final PluginPackage pluginPackage ) { ServiceDependencyManager serviceDependencyManager = new ServiceDependencyManager ( ) ; serviceDependencyManager . addServiceDependencyListener ( new ServiceDependencyListener ( ) { @ Override public void dependenciesFulfilled ( ) { try { if ( _log . isDebugEnabled ( ) ) { } ServletContext servletContext = getServletContext ( ) ; List < LayoutTemplate > layoutTemplates = LayoutTemplateLocalServiceUtil . init ( servletContext , new String [ ] { StreamUtil . toString ( servletContext . getResourceAsStream ( ""/WEB-INF/liferay-layout-"" + ""templates.xml"" ) ) , StreamUtil . toString ( servletContext . getResourceAsStream ( ""/WEB-INF/liferay-layout-"" + ""templates-ext.xml"" ) ) } , pluginPackage ) ; servletContext . setAttribute ( WebKeys . PLUGIN_LAYOUT_TEMPLATES , layoutTemplates ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; } } @ Override public void destroy ( ) { } } ) ; Registry registry = RegistryUtil . getRegistry ( ) ; Collection < Filter > filters = new ArrayList < > ( ) ; for ( String langType : LayoutTemplateLocalServiceImpl . supportedLangTypes ) { StringBundler sb = new StringBundler ( 5 ) ; sb . append ( ""(&(language.type="" ) ; sb . append ( langType ) ; sb . append ( "")(objectClass="" ) ; sb . append ( TemplateManager . class . getName ( ) ) ; sb . append ( ""))"" ) ; filters . add ( registry . getFilter ( sb . toString ( ) ) ) ; } serviceDependencyManager . registerDependencies ( filters . toArray ( new Filter [ 0 ] ) ) ; } +","private void _initLayoutTemplates ( final PluginPackage pluginPackage ) { ServiceDependencyManager serviceDependencyManager = new ServiceDependencyManager ( ) ; serviceDependencyManager . addServiceDependencyListener ( new ServiceDependencyListener ( ) { @ Override public void dependenciesFulfilled ( ) { try { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Initialize layout templates"" ) ; } ServletContext servletContext = getServletContext ( ) ; List < LayoutTemplate > layoutTemplates = LayoutTemplateLocalServiceUtil . init ( servletContext , new String [ ] { StreamUtil . toString ( servletContext . getResourceAsStream ( ""/WEB-INF/liferay-layout-"" + ""templates.xml"" ) ) , StreamUtil . toString ( servletContext . getResourceAsStream ( ""/WEB-INF/liferay-layout-"" + ""templates-ext.xml"" ) ) } , pluginPackage ) ; servletContext . setAttribute ( WebKeys . PLUGIN_LAYOUT_TEMPLATES , layoutTemplates ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; } } @ Override public void destroy ( ) { } } ) ; Registry registry = RegistryUtil . getRegistry ( ) ; Collection < Filter > filters = new ArrayList < > ( ) ; for ( String langType : LayoutTemplateLocalServiceImpl . supportedLangTypes ) { StringBundler sb = new StringBundler ( 5 ) ; sb . append ( ""(&(language.type="" ) ; sb . append ( langType ) ; sb . append ( "")(objectClass="" ) ; sb . append ( TemplateManager . class . getName ( ) ) ; sb . append ( ""))"" ) ; filters . add ( registry . getFilter ( sb . toString ( ) ) ) ; } serviceDependencyManager . registerDependencies ( filters . toArray ( new Filter [ 0 ] ) ) ; } +" +426,"private void _initLayoutTemplates ( final PluginPackage pluginPackage ) { ServiceDependencyManager serviceDependencyManager = new ServiceDependencyManager ( ) ; serviceDependencyManager . addServiceDependencyListener ( new ServiceDependencyListener ( ) { @ Override public void dependenciesFulfilled ( ) { try { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Initialize layout templates"" ) ; } ServletContext servletContext = getServletContext ( ) ; List < LayoutTemplate > layoutTemplates = LayoutTemplateLocalServiceUtil . init ( servletContext , new String [ ] { StreamUtil . toString ( servletContext . getResourceAsStream ( ""/WEB-INF/liferay-layout-"" + ""templates.xml"" ) ) , StreamUtil . toString ( servletContext . getResourceAsStream ( ""/WEB-INF/liferay-layout-"" + ""templates-ext.xml"" ) ) } , pluginPackage ) ; servletContext . setAttribute ( WebKeys . PLUGIN_LAYOUT_TEMPLATES , layoutTemplates ) ; } catch ( Exception exception ) { } } @ Override public void destroy ( ) { } } ) ; Registry registry = RegistryUtil . getRegistry ( ) ; Collection < Filter > filters = new ArrayList < > ( ) ; for ( String langType : LayoutTemplateLocalServiceImpl . supportedLangTypes ) { StringBundler sb = new StringBundler ( 5 ) ; sb . append ( ""(&(language.type="" ) ; sb . append ( langType ) ; sb . append ( "")(objectClass="" ) ; sb . append ( TemplateManager . class . getName ( ) ) ; sb . append ( ""))"" ) ; filters . add ( registry . getFilter ( sb . toString ( ) ) ) ; } serviceDependencyManager . registerDependencies ( filters . toArray ( new Filter [ 0 ] ) ) ; } +","private void _initLayoutTemplates ( final PluginPackage pluginPackage ) { ServiceDependencyManager serviceDependencyManager = new ServiceDependencyManager ( ) ; serviceDependencyManager . addServiceDependencyListener ( new ServiceDependencyListener ( ) { @ Override public void dependenciesFulfilled ( ) { try { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Initialize layout templates"" ) ; } ServletContext servletContext = getServletContext ( ) ; List < LayoutTemplate > layoutTemplates = LayoutTemplateLocalServiceUtil . init ( servletContext , new String [ ] { StreamUtil . toString ( servletContext . getResourceAsStream ( ""/WEB-INF/liferay-layout-"" + ""templates.xml"" ) ) , StreamUtil . toString ( servletContext . getResourceAsStream ( ""/WEB-INF/liferay-layout-"" + ""templates-ext.xml"" ) ) } , pluginPackage ) ; servletContext . setAttribute ( WebKeys . PLUGIN_LAYOUT_TEMPLATES , layoutTemplates ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; } } @ Override public void destroy ( ) { } } ) ; Registry registry = RegistryUtil . getRegistry ( ) ; Collection < Filter > filters = new ArrayList < > ( ) ; for ( String langType : LayoutTemplateLocalServiceImpl . supportedLangTypes ) { StringBundler sb = new StringBundler ( 5 ) ; sb . append ( ""(&(language.type="" ) ; sb . append ( langType ) ; sb . append ( "")(objectClass="" ) ; sb . append ( TemplateManager . class . getName ( ) ) ; sb . append ( ""))"" ) ; filters . add ( registry . getFilter ( sb . toString ( ) ) ) ; } serviceDependencyManager . registerDependencies ( filters . toArray ( new Filter [ 0 ] ) ) ; } +" +427,"public void logDebug ( String message ) { String msgWithId = addMsgIdToMsg ( LogMessageIdentifier . DEBUG , message ) ; } +","public void logDebug ( String message ) { String msgWithId = addMsgIdToMsg ( LogMessageIdentifier . DEBUG , message ) ; systemLogger . debug ( msgWithId ) ; } +" +428,"public static boolean canPublishEndpoint ( String listenAddress ) { if ( StringUtils . isEmpty ( listenAddress ) ) { return false ; } IpPort ipPort = NetUtils . parseIpPortFromURI ( ""http://"" + listenAddress ) ; if ( ipPort == null ) { LOGGER . info ( ""invalid listenAddress {}, can not publish, format should be ip:port."" , listenAddress ) ; return false ; } if ( NetUtils . canTcpListen ( ipPort . getSocketAddress ( ) . getAddress ( ) , ipPort . getPort ( ) ) ) { LOGGER . info ( ""{} is not listened, can not publish."" , ipPort . getSocketAddress ( ) ) ; return false ; } return true ; } +","public static boolean canPublishEndpoint ( String listenAddress ) { if ( StringUtils . isEmpty ( listenAddress ) ) { LOGGER . info ( ""listenAddress is null, can not publish."" ) ; return false ; } IpPort ipPort = NetUtils . parseIpPortFromURI ( ""http://"" + listenAddress ) ; if ( ipPort == null ) { LOGGER . info ( ""invalid listenAddress {}, can not publish, format should be ip:port."" , listenAddress ) ; return false ; } if ( NetUtils . canTcpListen ( ipPort . getSocketAddress ( ) . getAddress ( ) , ipPort . getPort ( ) ) ) { LOGGER . info ( ""{} is not listened, can not publish."" , ipPort . getSocketAddress ( ) ) ; return false ; } return true ; } +" +429,"public static boolean canPublishEndpoint ( String listenAddress ) { if ( StringUtils . isEmpty ( listenAddress ) ) { LOGGER . info ( ""listenAddress is null, can not publish."" ) ; return false ; } IpPort ipPort = NetUtils . parseIpPortFromURI ( ""http://"" + listenAddress ) ; if ( ipPort == null ) { return false ; } if ( NetUtils . canTcpListen ( ipPort . getSocketAddress ( ) . getAddress ( ) , ipPort . getPort ( ) ) ) { LOGGER . info ( ""{} is not listened, can not publish."" , ipPort . getSocketAddress ( ) ) ; return false ; } return true ; } +","public static boolean canPublishEndpoint ( String listenAddress ) { if ( StringUtils . isEmpty ( listenAddress ) ) { LOGGER . info ( ""listenAddress is null, can not publish."" ) ; return false ; } IpPort ipPort = NetUtils . parseIpPortFromURI ( ""http://"" + listenAddress ) ; if ( ipPort == null ) { LOGGER . info ( ""invalid listenAddress {}, can not publish, format should be ip:port."" , listenAddress ) ; return false ; } if ( NetUtils . canTcpListen ( ipPort . getSocketAddress ( ) . getAddress ( ) , ipPort . getPort ( ) ) ) { LOGGER . info ( ""{} is not listened, can not publish."" , ipPort . getSocketAddress ( ) ) ; return false ; } return true ; } +" +430,"public static boolean canPublishEndpoint ( String listenAddress ) { if ( StringUtils . isEmpty ( listenAddress ) ) { LOGGER . info ( ""listenAddress is null, can not publish."" ) ; return false ; } IpPort ipPort = NetUtils . parseIpPortFromURI ( ""http://"" + listenAddress ) ; if ( ipPort == null ) { LOGGER . info ( ""invalid listenAddress {}, can not publish, format should be ip:port."" , listenAddress ) ; return false ; } if ( NetUtils . canTcpListen ( ipPort . getSocketAddress ( ) . getAddress ( ) , ipPort . getPort ( ) ) ) { return false ; } return true ; } +","public static boolean canPublishEndpoint ( String listenAddress ) { if ( StringUtils . isEmpty ( listenAddress ) ) { LOGGER . info ( ""listenAddress is null, can not publish."" ) ; return false ; } IpPort ipPort = NetUtils . parseIpPortFromURI ( ""http://"" + listenAddress ) ; if ( ipPort == null ) { LOGGER . info ( ""invalid listenAddress {}, can not publish, format should be ip:port."" , listenAddress ) ; return false ; } if ( NetUtils . canTcpListen ( ipPort . getSocketAddress ( ) . getAddress ( ) , ipPort . getPort ( ) ) ) { LOGGER . info ( ""{} is not listened, can not publish."" , ipPort . getSocketAddress ( ) ) ; return false ; } return true ; } +" +431,"public static Process startProcess ( List < String > command ) { String commandString = Joiner . on ( "" "" ) . join ( command ) ; ProcessBuilder processBuilder = new ProcessBuilder ( command ) ; Process process = startProcess ( processBuilder ) ; closeInput ( process ) ; pipeStdout ( commandString , process ) ; pipeStderr ( commandString , process ) ; return process ; } +","public static Process startProcess ( List < String > command ) { String commandString = Joiner . on ( "" "" ) . join ( command ) ; log . info ( ""Starting: %s"" , commandString ) ; ProcessBuilder processBuilder = new ProcessBuilder ( command ) ; Process process = startProcess ( processBuilder ) ; closeInput ( process ) ; pipeStdout ( commandString , process ) ; pipeStderr ( commandString , process ) ; return process ; } +" +432,"public String decrypt ( String encryptedData ) { if ( encryptedData != null && encryptedData . length ( ) != 0 ) { try { byte [ ] cryptobytes = DatatypeConverter . parseBase64Binary ( encryptedData ) ; byte [ ] clearbytes = decryptCipher . doFinal ( cryptobytes ) ; return new String ( clearbytes , Charset . defaultCharset ( ) ) ; } catch ( IllegalBlockSizeException | BadPaddingException e ) { return encryptedData ; } } else { LOG . warn ( ""encryptedData is empty or null."" ) ; return encryptedData ; } } +","public String decrypt ( String encryptedData ) { if ( encryptedData != null && encryptedData . length ( ) != 0 ) { try { byte [ ] cryptobytes = DatatypeConverter . parseBase64Binary ( encryptedData ) ; byte [ ] clearbytes = decryptCipher . doFinal ( cryptobytes ) ; return new String ( clearbytes , Charset . defaultCharset ( ) ) ; } catch ( IllegalBlockSizeException | BadPaddingException e ) { LOG . error ( ""Failed to decrypt encoded data"" , e ) ; return encryptedData ; } } else { LOG . warn ( ""encryptedData is empty or null."" ) ; return encryptedData ; } } +" +433,"public String decrypt ( String encryptedData ) { if ( encryptedData != null && encryptedData . length ( ) != 0 ) { try { byte [ ] cryptobytes = DatatypeConverter . parseBase64Binary ( encryptedData ) ; byte [ ] clearbytes = decryptCipher . doFinal ( cryptobytes ) ; return new String ( clearbytes , Charset . defaultCharset ( ) ) ; } catch ( IllegalBlockSizeException | BadPaddingException e ) { LOG . error ( ""Failed to decrypt encoded data"" , e ) ; return encryptedData ; } } else { return encryptedData ; } } +","public String decrypt ( String encryptedData ) { if ( encryptedData != null && encryptedData . length ( ) != 0 ) { try { byte [ ] cryptobytes = DatatypeConverter . parseBase64Binary ( encryptedData ) ; byte [ ] clearbytes = decryptCipher . doFinal ( cryptobytes ) ; return new String ( clearbytes , Charset . defaultCharset ( ) ) ; } catch ( IllegalBlockSizeException | BadPaddingException e ) { LOG . error ( ""Failed to decrypt encoded data"" , e ) ; return encryptedData ; } } else { LOG . warn ( ""encryptedData is empty or null."" ) ; return encryptedData ; } } +" +434,"@ POST @ Path ( ""/{cmdletId}/stop"" ) public Response stop ( @ PathParam ( ""cmdletId"" ) String cmdletId ) { Long longNumber = Long . parseLong ( cmdletId ) ; try { smartEngine . getCmdletManager ( ) . disableCmdlet ( longNumber ) ; return new JsonResponse < > ( Response . Status . OK ) . build ( ) ; } catch ( Exception e ) { return new JsonResponse < > ( Response . Status . INTERNAL_SERVER_ERROR , e . getMessage ( ) , ExceptionUtils . getStackTrace ( e ) ) . build ( ) ; } } +","@ POST @ Path ( ""/{cmdletId}/stop"" ) public Response stop ( @ PathParam ( ""cmdletId"" ) String cmdletId ) { Long longNumber = Long . parseLong ( cmdletId ) ; try { smartEngine . getCmdletManager ( ) . disableCmdlet ( longNumber ) ; return new JsonResponse < > ( Response . Status . OK ) . build ( ) ; } catch ( Exception e ) { logger . error ( ""Exception in CmdletRestApi while stop cmdlet "" + longNumber , e ) ; return new JsonResponse < > ( Response . Status . INTERNAL_SERVER_ERROR , e . getMessage ( ) , ExceptionUtils . getStackTrace ( e ) ) . build ( ) ; } } +" +435,"@ PreDestroy public void destroy ( ) { if ( _taskScheduler != null ) { _taskScheduler . shutdown ( ) ; } } +","@ PreDestroy public void destroy ( ) { _log . info ( ""destroy"" ) ; if ( _taskScheduler != null ) { _taskScheduler . shutdown ( ) ; } } +" +436,"private SuperheroNamedEntityFound getAllSuperheroNamesFromDBpediaMatchingPositions ( String question ) { String serviceUrl = ""http://dbpedia.org/sparql"" ; String query = """" + ""PREFIX dbr: \n"" + ""PREFIX rdf: \n"" + ""PREFIX rdfs: \n"" + ""PREFIX pt: \n"" + ""SELECT ?hero (str(?herolabel) as ?herolabelString) (lang(?herolabel) as ?herolabelLang)\n"" + ""WHERE {\n"" + "" ?hero pt:subject dbr:Category:Superhero_film_characters .\n"" + "" ?hero rdfs:label ?herolabel.\n"" + ""}\n"" + ""LIMIT 10000"" ; QueryExecution qe = QueryExecutionFactory . sparqlService ( serviceUrl , query ) ; ResultSet rs = qe . execSelect ( ) ; while ( rs . hasNext ( ) ) { QuerySolution s = rs . nextSolution ( ) ; String characterName = this . getCharacterName ( s ) ; if ( nameFound ( question , characterName ) ) { logger . info ( ""found Super Hero name: {}"" , characterName ) ; String resource = this . getResource ( s ) ; int [ ] index = this . getIndexOfName ( question , characterName ) ; return new SuperheroNamedEntityFound ( characterName , resource , index [ 0 ] , index [ 1 ] ) ; } } logger . warn ( ""no matching names could be found"" ) ; return null ; } +","private SuperheroNamedEntityFound getAllSuperheroNamesFromDBpediaMatchingPositions ( String question ) { String serviceUrl = ""http://dbpedia.org/sparql"" ; String query = """" + ""PREFIX dbr: \n"" + ""PREFIX rdf: \n"" + ""PREFIX rdfs: \n"" + ""PREFIX pt: \n"" + ""SELECT ?hero (str(?herolabel) as ?herolabelString) (lang(?herolabel) as ?herolabelLang)\n"" + ""WHERE {\n"" + "" ?hero pt:subject dbr:Category:Superhero_film_characters .\n"" + "" ?hero rdfs:label ?herolabel.\n"" + ""}\n"" + ""LIMIT 10000"" ; logger . info ( ""searching for character names on DBpedia ...\nDBpedia query: \n{}"" , query ) ; QueryExecution qe = QueryExecutionFactory . sparqlService ( serviceUrl , query ) ; ResultSet rs = qe . execSelect ( ) ; while ( rs . hasNext ( ) ) { QuerySolution s = rs . nextSolution ( ) ; String characterName = this . getCharacterName ( s ) ; if ( nameFound ( question , characterName ) ) { logger . info ( ""found Super Hero name: {}"" , characterName ) ; String resource = this . getResource ( s ) ; int [ ] index = this . getIndexOfName ( question , characterName ) ; return new SuperheroNamedEntityFound ( characterName , resource , index [ 0 ] , index [ 1 ] ) ; } } logger . warn ( ""no matching names could be found"" ) ; return null ; } +" +437,"private SuperheroNamedEntityFound getAllSuperheroNamesFromDBpediaMatchingPositions ( String question ) { String serviceUrl = ""http://dbpedia.org/sparql"" ; String query = """" + ""PREFIX dbr: \n"" + ""PREFIX rdf: \n"" + ""PREFIX rdfs: \n"" + ""PREFIX pt: \n"" + ""SELECT ?hero (str(?herolabel) as ?herolabelString) (lang(?herolabel) as ?herolabelLang)\n"" + ""WHERE {\n"" + "" ?hero pt:subject dbr:Category:Superhero_film_characters .\n"" + "" ?hero rdfs:label ?herolabel.\n"" + ""}\n"" + ""LIMIT 10000"" ; logger . info ( ""searching for character names on DBpedia ...\nDBpedia query: \n{}"" , query ) ; QueryExecution qe = QueryExecutionFactory . sparqlService ( serviceUrl , query ) ; ResultSet rs = qe . execSelect ( ) ; while ( rs . hasNext ( ) ) { QuerySolution s = rs . nextSolution ( ) ; String characterName = this . getCharacterName ( s ) ; if ( nameFound ( question , characterName ) ) { String resource = this . getResource ( s ) ; int [ ] index = this . getIndexOfName ( question , characterName ) ; return new SuperheroNamedEntityFound ( characterName , resource , index [ 0 ] , index [ 1 ] ) ; } } logger . warn ( ""no matching names could be found"" ) ; return null ; } +","private SuperheroNamedEntityFound getAllSuperheroNamesFromDBpediaMatchingPositions ( String question ) { String serviceUrl = ""http://dbpedia.org/sparql"" ; String query = """" + ""PREFIX dbr: \n"" + ""PREFIX rdf: \n"" + ""PREFIX rdfs: \n"" + ""PREFIX pt: \n"" + ""SELECT ?hero (str(?herolabel) as ?herolabelString) (lang(?herolabel) as ?herolabelLang)\n"" + ""WHERE {\n"" + "" ?hero pt:subject dbr:Category:Superhero_film_characters .\n"" + "" ?hero rdfs:label ?herolabel.\n"" + ""}\n"" + ""LIMIT 10000"" ; logger . info ( ""searching for character names on DBpedia ...\nDBpedia query: \n{}"" , query ) ; QueryExecution qe = QueryExecutionFactory . sparqlService ( serviceUrl , query ) ; ResultSet rs = qe . execSelect ( ) ; while ( rs . hasNext ( ) ) { QuerySolution s = rs . nextSolution ( ) ; String characterName = this . getCharacterName ( s ) ; if ( nameFound ( question , characterName ) ) { logger . info ( ""found Super Hero name: {}"" , characterName ) ; String resource = this . getResource ( s ) ; int [ ] index = this . getIndexOfName ( question , characterName ) ; return new SuperheroNamedEntityFound ( characterName , resource , index [ 0 ] , index [ 1 ] ) ; } } logger . warn ( ""no matching names could be found"" ) ; return null ; } +" +438,"private SuperheroNamedEntityFound getAllSuperheroNamesFromDBpediaMatchingPositions ( String question ) { String serviceUrl = ""http://dbpedia.org/sparql"" ; String query = """" + ""PREFIX dbr: \n"" + ""PREFIX rdf: \n"" + ""PREFIX rdfs: \n"" + ""PREFIX pt: \n"" + ""SELECT ?hero (str(?herolabel) as ?herolabelString) (lang(?herolabel) as ?herolabelLang)\n"" + ""WHERE {\n"" + "" ?hero pt:subject dbr:Category:Superhero_film_characters .\n"" + "" ?hero rdfs:label ?herolabel.\n"" + ""}\n"" + ""LIMIT 10000"" ; logger . info ( ""searching for character names on DBpedia ...\nDBpedia query: \n{}"" , query ) ; QueryExecution qe = QueryExecutionFactory . sparqlService ( serviceUrl , query ) ; ResultSet rs = qe . execSelect ( ) ; while ( rs . hasNext ( ) ) { QuerySolution s = rs . nextSolution ( ) ; String characterName = this . getCharacterName ( s ) ; if ( nameFound ( question , characterName ) ) { logger . info ( ""found Super Hero name: {}"" , characterName ) ; String resource = this . getResource ( s ) ; int [ ] index = this . getIndexOfName ( question , characterName ) ; return new SuperheroNamedEntityFound ( characterName , resource , index [ 0 ] , index [ 1 ] ) ; } } return null ; } +","private SuperheroNamedEntityFound getAllSuperheroNamesFromDBpediaMatchingPositions ( String question ) { String serviceUrl = ""http://dbpedia.org/sparql"" ; String query = """" + ""PREFIX dbr: \n"" + ""PREFIX rdf: \n"" + ""PREFIX rdfs: \n"" + ""PREFIX pt: \n"" + ""SELECT ?hero (str(?herolabel) as ?herolabelString) (lang(?herolabel) as ?herolabelLang)\n"" + ""WHERE {\n"" + "" ?hero pt:subject dbr:Category:Superhero_film_characters .\n"" + "" ?hero rdfs:label ?herolabel.\n"" + ""}\n"" + ""LIMIT 10000"" ; logger . info ( ""searching for character names on DBpedia ...\nDBpedia query: \n{}"" , query ) ; QueryExecution qe = QueryExecutionFactory . sparqlService ( serviceUrl , query ) ; ResultSet rs = qe . execSelect ( ) ; while ( rs . hasNext ( ) ) { QuerySolution s = rs . nextSolution ( ) ; String characterName = this . getCharacterName ( s ) ; if ( nameFound ( question , characterName ) ) { logger . info ( ""found Super Hero name: {}"" , characterName ) ; String resource = this . getResource ( s ) ; int [ ] index = this . getIndexOfName ( question , characterName ) ; return new SuperheroNamedEntityFound ( characterName , resource , index [ 0 ] , index [ 1 ] ) ; } } logger . warn ( ""no matching names could be found"" ) ; return null ; } +" +439,"public List < Study > findByTitle ( String title ) { List < Study > study = null ; if ( title != null ) { try { study = studyDAO . findByTitle ( title . trim ( ) ) ; } catch ( Exception exception ) { } } return study ; } +","public List < Study > findByTitle ( String title ) { List < Study > study = null ; if ( title != null ) { try { study = studyDAO . findByTitle ( title . trim ( ) ) ; } catch ( Exception exception ) { LOG . debug ( ""Cannot find Study by title "" + title ) ; } } return study ; } +" +440,"@ Test public void testProcessInstanceSignalEvent ( ) throws Exception { String processId = ""org.drools.core.event"" ; String cmd = """" ; cmd += ""\n"" ; cmd += "" \n"" ; cmd += "" \n"" ; cmd += ""\n"" ; String outXml = new String ( ( byte [ ] ) template . requestBody ( ""direct:test-with-session"" , cmd ) ) ; assertNotNull ( outXml ) ; int processInstanceId = 1 ; cmd = """" ; cmd += ""\n"" ; cmd += "" "" ; cmd += "" MyValue"" ; cmd += "" "" ; cmd += ""\n"" ; outXml = new String ( ( byte [ ] ) template . requestBody ( ""direct:test-with-session"" , cmd ) ) ; logger . debug ( outXml ) ; } +","@ Test public void testProcessInstanceSignalEvent ( ) throws Exception { String processId = ""org.drools.core.event"" ; String cmd = """" ; cmd += ""\n"" ; cmd += "" \n"" ; cmd += "" \n"" ; cmd += ""\n"" ; logger . debug ( cmd ) ; String outXml = new String ( ( byte [ ] ) template . requestBody ( ""direct:test-with-session"" , cmd ) ) ; assertNotNull ( outXml ) ; int processInstanceId = 1 ; cmd = """" ; cmd += ""\n"" ; cmd += "" "" ; cmd += "" MyValue"" ; cmd += "" "" ; cmd += ""\n"" ; outXml = new String ( ( byte [ ] ) template . requestBody ( ""direct:test-with-session"" , cmd ) ) ; logger . debug ( outXml ) ; } +" +441,"@ Test public void testProcessInstanceSignalEvent ( ) throws Exception { String processId = ""org.drools.core.event"" ; String cmd = """" ; cmd += ""\n"" ; cmd += "" \n"" ; cmd += "" \n"" ; cmd += ""\n"" ; logger . debug ( cmd ) ; String outXml = new String ( ( byte [ ] ) template . requestBody ( ""direct:test-with-session"" , cmd ) ) ; assertNotNull ( outXml ) ; int processInstanceId = 1 ; cmd = """" ; cmd += ""\n"" ; cmd += "" "" ; cmd += "" MyValue"" ; cmd += "" "" ; cmd += ""\n"" ; outXml = new String ( ( byte [ ] ) template . requestBody ( ""direct:test-with-session"" , cmd ) ) ; } +","@ Test public void testProcessInstanceSignalEvent ( ) throws Exception { String processId = ""org.drools.core.event"" ; String cmd = """" ; cmd += ""\n"" ; cmd += "" \n"" ; cmd += "" \n"" ; cmd += ""\n"" ; logger . debug ( cmd ) ; String outXml = new String ( ( byte [ ] ) template . requestBody ( ""direct:test-with-session"" , cmd ) ) ; assertNotNull ( outXml ) ; int processInstanceId = 1 ; cmd = """" ; cmd += ""\n"" ; cmd += "" "" ; cmd += "" MyValue"" ; cmd += "" "" ; cmd += ""\n"" ; outXml = new String ( ( byte [ ] ) template . requestBody ( ""direct:test-with-session"" , cmd ) ) ; logger . debug ( outXml ) ; } +" +442,"private void tagFrontLoadLastPeriods ( ) { List < Period > periodList = examination . getPeriodList ( ) ; int frontLoadLastPeriodSize = examination . getConstraintConfiguration ( ) . getFrontLoadLastPeriodSize ( ) ; if ( frontLoadLastPeriodSize == 0 ) { return ; } int minimumPeriodId = periodList . size ( ) - frontLoadLastPeriodSize ; if ( minimumPeriodId < 0 ) { minimumPeriodId = 0 ; } for ( Period period : periodList . subList ( minimumPeriodId , periodList . size ( ) ) ) { period . setFrontLoadLast ( true ) ; } } +","private void tagFrontLoadLastPeriods ( ) { List < Period > periodList = examination . getPeriodList ( ) ; int frontLoadLastPeriodSize = examination . getConstraintConfiguration ( ) . getFrontLoadLastPeriodSize ( ) ; if ( frontLoadLastPeriodSize == 0 ) { return ; } int minimumPeriodId = periodList . size ( ) - frontLoadLastPeriodSize ; if ( minimumPeriodId < 0 ) { logger . warn ( ""The frontLoadLastPeriodSize ("" + frontLoadLastPeriodSize + "") is bigger than periodListSize ("" + periodList . size ( ) + ""). Tagging all periods as frontLoadLast..."" ) ; minimumPeriodId = 0 ; } for ( Period period : periodList . subList ( minimumPeriodId , periodList . size ( ) ) ) { period . setFrontLoadLast ( true ) ; } } +" +443,"public Response getResourceMetaWithDocId ( String docId , Set < String > extensions ) { if ( StringUtils . isBlank ( docId ) ) { return Response . status ( Response . Status . BAD_REQUEST ) . entity ( ""missing id"" ) . build ( ) ; } HProjectIteration hProjectIteration = retrieveAndCheckIteration ( false ) ; EntityTag etag = eTagUtils . generateETagForDocument ( hProjectIteration , docId , extensions ) ; Response . ResponseBuilder response = request . evaluatePreconditions ( etag ) ; if ( response != null ) { return response . build ( ) ; } HDocument doc = documentDAO . getByDocIdAndIteration ( hProjectIteration , docId ) ; if ( doc == null ) { return Response . status ( Response . Status . NOT_FOUND ) . entity ( ""document not found"" ) . build ( ) ; } ResourceMeta entity = new ResourceMeta ( doc . getDocId ( ) ) ; resourceUtils . transferToAbstractResourceMeta ( doc , entity ) ; resourceUtils . transferToResourceExtensions ( doc , entity . getExtensions ( true ) , extensions ) ; log . debug ( ""successfuly get resource meta: {}"" , entity ) ; return Response . ok ( ) . entity ( entity ) . tag ( etag ) . build ( ) ; } +","public Response getResourceMetaWithDocId ( String docId , Set < String > extensions ) { log . debug ( ""start to get resource meta"" ) ; if ( StringUtils . isBlank ( docId ) ) { return Response . status ( Response . Status . BAD_REQUEST ) . entity ( ""missing id"" ) . build ( ) ; } HProjectIteration hProjectIteration = retrieveAndCheckIteration ( false ) ; EntityTag etag = eTagUtils . generateETagForDocument ( hProjectIteration , docId , extensions ) ; Response . ResponseBuilder response = request . evaluatePreconditions ( etag ) ; if ( response != null ) { return response . build ( ) ; } HDocument doc = documentDAO . getByDocIdAndIteration ( hProjectIteration , docId ) ; if ( doc == null ) { return Response . status ( Response . Status . NOT_FOUND ) . entity ( ""document not found"" ) . build ( ) ; } ResourceMeta entity = new ResourceMeta ( doc . getDocId ( ) ) ; resourceUtils . transferToAbstractResourceMeta ( doc , entity ) ; resourceUtils . transferToResourceExtensions ( doc , entity . getExtensions ( true ) , extensions ) ; log . debug ( ""successfuly get resource meta: {}"" , entity ) ; return Response . ok ( ) . entity ( entity ) . tag ( etag ) . build ( ) ; } +" +444,"public Response getResourceMetaWithDocId ( String docId , Set < String > extensions ) { log . debug ( ""start to get resource meta"" ) ; if ( StringUtils . isBlank ( docId ) ) { return Response . status ( Response . Status . BAD_REQUEST ) . entity ( ""missing id"" ) . build ( ) ; } HProjectIteration hProjectIteration = retrieveAndCheckIteration ( false ) ; EntityTag etag = eTagUtils . generateETagForDocument ( hProjectIteration , docId , extensions ) ; Response . ResponseBuilder response = request . evaluatePreconditions ( etag ) ; if ( response != null ) { return response . build ( ) ; } HDocument doc = documentDAO . getByDocIdAndIteration ( hProjectIteration , docId ) ; if ( doc == null ) { return Response . status ( Response . Status . NOT_FOUND ) . entity ( ""document not found"" ) . build ( ) ; } ResourceMeta entity = new ResourceMeta ( doc . getDocId ( ) ) ; resourceUtils . transferToAbstractResourceMeta ( doc , entity ) ; resourceUtils . transferToResourceExtensions ( doc , entity . getExtensions ( true ) , extensions ) ; return Response . ok ( ) . entity ( entity ) . tag ( etag ) . build ( ) ; } +","public Response getResourceMetaWithDocId ( String docId , Set < String > extensions ) { log . debug ( ""start to get resource meta"" ) ; if ( StringUtils . isBlank ( docId ) ) { return Response . status ( Response . Status . BAD_REQUEST ) . entity ( ""missing id"" ) . build ( ) ; } HProjectIteration hProjectIteration = retrieveAndCheckIteration ( false ) ; EntityTag etag = eTagUtils . generateETagForDocument ( hProjectIteration , docId , extensions ) ; Response . ResponseBuilder response = request . evaluatePreconditions ( etag ) ; if ( response != null ) { return response . build ( ) ; } HDocument doc = documentDAO . getByDocIdAndIteration ( hProjectIteration , docId ) ; if ( doc == null ) { return Response . status ( Response . Status . NOT_FOUND ) . entity ( ""document not found"" ) . build ( ) ; } ResourceMeta entity = new ResourceMeta ( doc . getDocId ( ) ) ; resourceUtils . transferToAbstractResourceMeta ( doc , entity ) ; resourceUtils . transferToResourceExtensions ( doc , entity . getExtensions ( true ) , extensions ) ; log . debug ( ""successfuly get resource meta: {}"" , entity ) ; return Response . ok ( ) . entity ( entity ) . tag ( etag ) . build ( ) ; } +" +445,"private void notifyAsynchronousCommand ( final ZToolPacket packet ) { final AsynchronousCommandListener [ ] listeners ; synchronized ( asynchrounsCommandListeners ) { listeners = asynchrounsCommandListeners . toArray ( new AsynchronousCommandListener [ ] { } ) ; } for ( final AsynchronousCommandListener listener : listeners ) { try { listener . receivedAsynchronousCommand ( packet ) ; } catch ( Throwable e ) { } } } +","private void notifyAsynchronousCommand ( final ZToolPacket packet ) { final AsynchronousCommandListener [ ] listeners ; synchronized ( asynchrounsCommandListeners ) { listeners = asynchrounsCommandListeners . toArray ( new AsynchronousCommandListener [ ] { } ) ; } for ( final AsynchronousCommandListener listener : listeners ) { try { listener . receivedAsynchronousCommand ( packet ) ; } catch ( Throwable e ) { LOGGER . error ( ""Error in incoming asynchronous message processing."" , e ) ; } } } +" +446,"public static String resolveImageFile ( SymbolWidget widget , String imageFileName ) { try { String expandedFileName = MacroHandler . replace ( widget . getMacrosOrProperties ( ) , imageFileName ) ; final DisplayModel widgetModel = widget . getDisplayModel ( ) ; return ModelResourceUtil . resolveResource ( widgetModel , expandedFileName ) ; } catch ( Exception ex ) { return null ; } } +","public static String resolveImageFile ( SymbolWidget widget , String imageFileName ) { try { String expandedFileName = MacroHandler . replace ( widget . getMacrosOrProperties ( ) , imageFileName ) ; final DisplayModel widgetModel = widget . getDisplayModel ( ) ; return ModelResourceUtil . resolveResource ( widgetModel , expandedFileName ) ; } catch ( Exception ex ) { logger . log ( Level . WARNING , String . format ( ""Failure resolving image path: %s"" , imageFileName ) , ex ) ; return null ; } } +" +447,"private boolean needReloadContainerProviders ( List < ContainerProvider > providers ) { for ( ContainerProvider provider : providers ) { if ( provider . needsReload ( ) ) { return true ; } } return false ; } +","private boolean needReloadContainerProviders ( List < ContainerProvider > providers ) { for ( ContainerProvider provider : providers ) { if ( provider . needsReload ( ) ) { LOG . info ( ""Detected container provider [{}] needs to be reloaded. Reloading all providers."" , provider ) ; return true ; } } return false ; } +" +448,"public Optional < ActionResultOutput > get ( ActionResultOutputKey actionResultOutputKey ) { try { String query = ""select RUN_ID, PRC_ID, ACTION_ID, OUT_NM, OUT_VAL from "" + getMetadataRepository ( ) . getTableNameByLabel ( ""ActionResultOutputs"" ) + "" where RUN_ID = "" + SQLTools . getStringForSQL ( actionResultOutputKey . getRunId ( ) ) + "" and ACTION_ID = "" + SQLTools . getStringForSQL ( actionResultOutputKey . getActionId ( ) ) + "" and OUT_NM = "" + SQLTools . getStringForSQL ( actionResultOutputKey . getOutputName ( ) ) + "" and PRC_ID = "" + actionResultOutputKey . getProcessId ( ) + "";"" ; CachedRowSet cachedRowSet = getMetadataRepository ( ) . executeQuery ( query , ""reader"" ) ; if ( cachedRowSet . size ( ) == 0 ) { return Optional . empty ( ) ; } else if ( cachedRowSet . size ( ) > 1 ) { } cachedRowSet . next ( ) ; return Optional . of ( new ActionResultOutput ( actionResultOutputKey , cachedRowSet . getString ( ""OUT_VAL"" ) ) ) ; } catch ( SQLException e ) { throw new RuntimeException ( e ) ; } } +","public Optional < ActionResultOutput > get ( ActionResultOutputKey actionResultOutputKey ) { try { String query = ""select RUN_ID, PRC_ID, ACTION_ID, OUT_NM, OUT_VAL from "" + getMetadataRepository ( ) . getTableNameByLabel ( ""ActionResultOutputs"" ) + "" where RUN_ID = "" + SQLTools . getStringForSQL ( actionResultOutputKey . getRunId ( ) ) + "" and ACTION_ID = "" + SQLTools . getStringForSQL ( actionResultOutputKey . getActionId ( ) ) + "" and OUT_NM = "" + SQLTools . getStringForSQL ( actionResultOutputKey . getOutputName ( ) ) + "" and PRC_ID = "" + actionResultOutputKey . getProcessId ( ) + "";"" ; CachedRowSet cachedRowSet = getMetadataRepository ( ) . executeQuery ( query , ""reader"" ) ; if ( cachedRowSet . size ( ) == 0 ) { return Optional . empty ( ) ; } else if ( cachedRowSet . size ( ) > 1 ) { LOGGER . warn ( MessageFormat . format ( ""Found multiple implementations for ActionResultOutput {0}. Returning first implementation"" , actionResultOutputKey . toString ( ) ) ) ; } cachedRowSet . next ( ) ; return Optional . of ( new ActionResultOutput ( actionResultOutputKey , cachedRowSet . getString ( ""OUT_VAL"" ) ) ) ; } catch ( SQLException e ) { throw new RuntimeException ( e ) ; } } +" +449,"public TimerInstance configureSLATimer ( String slaDueDateExpression ) { slaDueDateExpression = resolveVariable ( slaDueDateExpression ) ; if ( slaDueDateExpression == null || slaDueDateExpression . trim ( ) . isEmpty ( ) ) { return null ; } logger . debug ( ""SLA due date is set to {}"" , slaDueDateExpression ) ; InternalKnowledgeRuntime kruntime = getKnowledgeRuntime ( ) ; long duration ; if ( kruntime . getEnvironment ( ) . get ( ""jbpm.business.calendar"" ) != null ) { BusinessCalendar businessCalendar = ( BusinessCalendar ) kruntime . getEnvironment ( ) . get ( ""jbpm.business.calendar"" ) ; duration = businessCalendar . calculateBusinessTimeAsDuration ( slaDueDateExpression ) ; } else { duration = DateTimeUtils . parseDuration ( slaDueDateExpression ) ; } TimerInstance timerInstance = new TimerInstance ( ) ; timerInstance . setTimerId ( - 1 ) ; timerInstance . setDelay ( duration ) ; timerInstance . setPeriod ( 0 ) ; if ( useTimerSLATracking ( ) ) { ProcessInstanceJobDescription description = ProcessInstanceJobDescription . of ( - 1L , DurationExpirationTime . after ( duration ) , getStringId ( ) , getProcessId ( ) ) ; timerInstance . setId ( ( KogitoProcessRuntime . asKogitoProcessRuntime ( kruntime . getProcessRuntime ( ) ) . getJobsService ( ) . scheduleProcessInstanceJob ( description ) ) ) ; } return timerInstance ; } +","public TimerInstance configureSLATimer ( String slaDueDateExpression ) { slaDueDateExpression = resolveVariable ( slaDueDateExpression ) ; if ( slaDueDateExpression == null || slaDueDateExpression . trim ( ) . isEmpty ( ) ) { logger . debug ( ""Sla due date expression resolved to no value '{}'"" , slaDueDateExpression ) ; return null ; } logger . debug ( ""SLA due date is set to {}"" , slaDueDateExpression ) ; InternalKnowledgeRuntime kruntime = getKnowledgeRuntime ( ) ; long duration ; if ( kruntime . getEnvironment ( ) . get ( ""jbpm.business.calendar"" ) != null ) { BusinessCalendar businessCalendar = ( BusinessCalendar ) kruntime . getEnvironment ( ) . get ( ""jbpm.business.calendar"" ) ; duration = businessCalendar . calculateBusinessTimeAsDuration ( slaDueDateExpression ) ; } else { duration = DateTimeUtils . parseDuration ( slaDueDateExpression ) ; } TimerInstance timerInstance = new TimerInstance ( ) ; timerInstance . setTimerId ( - 1 ) ; timerInstance . setDelay ( duration ) ; timerInstance . setPeriod ( 0 ) ; if ( useTimerSLATracking ( ) ) { ProcessInstanceJobDescription description = ProcessInstanceJobDescription . of ( - 1L , DurationExpirationTime . after ( duration ) , getStringId ( ) , getProcessId ( ) ) ; timerInstance . setId ( ( KogitoProcessRuntime . asKogitoProcessRuntime ( kruntime . getProcessRuntime ( ) ) . getJobsService ( ) . scheduleProcessInstanceJob ( description ) ) ) ; } return timerInstance ; } +" +450,"public TimerInstance configureSLATimer ( String slaDueDateExpression ) { slaDueDateExpression = resolveVariable ( slaDueDateExpression ) ; if ( slaDueDateExpression == null || slaDueDateExpression . trim ( ) . isEmpty ( ) ) { logger . debug ( ""Sla due date expression resolved to no value '{}'"" , slaDueDateExpression ) ; return null ; } InternalKnowledgeRuntime kruntime = getKnowledgeRuntime ( ) ; long duration ; if ( kruntime . getEnvironment ( ) . get ( ""jbpm.business.calendar"" ) != null ) { BusinessCalendar businessCalendar = ( BusinessCalendar ) kruntime . getEnvironment ( ) . get ( ""jbpm.business.calendar"" ) ; duration = businessCalendar . calculateBusinessTimeAsDuration ( slaDueDateExpression ) ; } else { duration = DateTimeUtils . parseDuration ( slaDueDateExpression ) ; } TimerInstance timerInstance = new TimerInstance ( ) ; timerInstance . setTimerId ( - 1 ) ; timerInstance . setDelay ( duration ) ; timerInstance . setPeriod ( 0 ) ; if ( useTimerSLATracking ( ) ) { ProcessInstanceJobDescription description = ProcessInstanceJobDescription . of ( - 1L , DurationExpirationTime . after ( duration ) , getStringId ( ) , getProcessId ( ) ) ; timerInstance . setId ( ( KogitoProcessRuntime . asKogitoProcessRuntime ( kruntime . getProcessRuntime ( ) ) . getJobsService ( ) . scheduleProcessInstanceJob ( description ) ) ) ; } return timerInstance ; } +","public TimerInstance configureSLATimer ( String slaDueDateExpression ) { slaDueDateExpression = resolveVariable ( slaDueDateExpression ) ; if ( slaDueDateExpression == null || slaDueDateExpression . trim ( ) . isEmpty ( ) ) { logger . debug ( ""Sla due date expression resolved to no value '{}'"" , slaDueDateExpression ) ; return null ; } logger . debug ( ""SLA due date is set to {}"" , slaDueDateExpression ) ; InternalKnowledgeRuntime kruntime = getKnowledgeRuntime ( ) ; long duration ; if ( kruntime . getEnvironment ( ) . get ( ""jbpm.business.calendar"" ) != null ) { BusinessCalendar businessCalendar = ( BusinessCalendar ) kruntime . getEnvironment ( ) . get ( ""jbpm.business.calendar"" ) ; duration = businessCalendar . calculateBusinessTimeAsDuration ( slaDueDateExpression ) ; } else { duration = DateTimeUtils . parseDuration ( slaDueDateExpression ) ; } TimerInstance timerInstance = new TimerInstance ( ) ; timerInstance . setTimerId ( - 1 ) ; timerInstance . setDelay ( duration ) ; timerInstance . setPeriod ( 0 ) ; if ( useTimerSLATracking ( ) ) { ProcessInstanceJobDescription description = ProcessInstanceJobDescription . of ( - 1L , DurationExpirationTime . after ( duration ) , getStringId ( ) , getProcessId ( ) ) ; timerInstance . setId ( ( KogitoProcessRuntime . asKogitoProcessRuntime ( kruntime . getProcessRuntime ( ) ) . getJobsService ( ) . scheduleProcessInstanceJob ( description ) ) ) ; } return timerInstance ; } +" +451,"public void processMessage ( final ObjectMessage message ) throws JMSException { String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +","public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +" +452,"public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +","public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +" +453,"public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +","public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +" +454,"public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +","public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +" +455,"public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +","public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +" +456,"public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +","public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +" +457,"public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +","public void processMessage ( final ObjectMessage message ) throws JMSException { LOGGER . debug ( ""Processing smart metering response message"" ) ; String correlationUid = null ; String messageType = null ; String organisationIdentification = null ; String deviceIdentification = null ; String notificationMessage ; NotificationType notificationType ; ResponseMessageResultType resultType ; String resultDescription ; Serializable dataObject ; try { correlationUid = message . getJMSCorrelationID ( ) ; messageType = message . getJMSType ( ) ; organisationIdentification = message . getStringProperty ( Constants . ORGANISATION_IDENTIFICATION ) ; deviceIdentification = message . getStringProperty ( Constants . DEVICE_IDENTIFICATION ) ; resultType = ResponseMessageResultType . valueOf ( message . getStringProperty ( Constants . RESULT ) ) ; resultDescription = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationMessage = message . getStringProperty ( Constants . DESCRIPTION ) ; notificationType = NotificationType . valueOf ( messageType ) ; dataObject = message . getObject ( ) ; } catch ( final JMSException | IllegalArgumentException | NullPointerException e ) { LOGGER . error ( ""UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up."" , e ) ; LOGGER . debug ( ""correlationUid: {}"" , correlationUid ) ; LOGGER . debug ( ""messageType: {}"" , messageType ) ; LOGGER . debug ( ""organisationIdentification: {}"" , organisationIdentification ) ; LOGGER . debug ( ""deviceIdentification: {}"" , deviceIdentification ) ; return ; } try { LOGGER . info ( ""Calling application service function to handle response: {}"" , messageType ) ; final CorrelationIds ids = new CorrelationIds ( organisationIdentification , deviceIdentification , correlationUid ) ; this . handleMessage ( ids , messageType , resultType , resultDescription , dataObject ) ; } catch ( final Exception e ) { this . handleError ( e , correlationUid , organisationIdentification , deviceIdentification , notificationType ) ; return ; } this . sendNotification ( organisationIdentification , deviceIdentification , resultType . name ( ) , correlationUid , notificationMessage , notificationType ) ; } +" +458,"public static List < CartridgeInfoBean > fetchSubscriptionDataList ( ) throws ArtifactLoadingException { String cartridgeString = readUrl ( System . getProperty ( Constants . BASE_URL400 ) + Constants . URL_SUBSCRIPTION ) ; String cartridgeListString ; if ( cartridgeString != null ) { cartridgeListString = cartridgeString . substring ( cartridgeString . indexOf ( '[' ) , ( cartridgeString . lastIndexOf ( ']' ) + 1 ) ) ; } else { String msg = ""Error while fetching subscription data list"" ; throw new ArtifactLoadingException ( msg ) ; } return gson . fromJson ( cartridgeListString , new TypeToken < List < CartridgeInfoBean > > ( ) { } . getType ( ) ) ; } +","public static List < CartridgeInfoBean > fetchSubscriptionDataList ( ) throws ArtifactLoadingException { String cartridgeString = readUrl ( System . getProperty ( Constants . BASE_URL400 ) + Constants . URL_SUBSCRIPTION ) ; String cartridgeListString ; if ( cartridgeString != null ) { cartridgeListString = cartridgeString . substring ( cartridgeString . indexOf ( '[' ) , ( cartridgeString . lastIndexOf ( ']' ) + 1 ) ) ; } else { String msg = ""Error while fetching subscription data list"" ; log . error ( msg ) ; throw new ArtifactLoadingException ( msg ) ; } return gson . fromJson ( cartridgeListString , new TypeToken < List < CartridgeInfoBean > > ( ) { } . getType ( ) ) ; } +" +459,"public static void register ( final Logger LOG ) { synchronized ( SignalHandler . class ) { if ( registered ) { return ; } registered = true ; final String [ ] SIGNALS = OperatingSystem . isWindows ( ) ? new String [ ] { ""TERM"" , ""INT"" } : new String [ ] { ""TERM"" , ""HUP"" , ""INT"" } ; StringBuilder bld = new StringBuilder ( ) ; bld . append ( ""Registered UNIX signal handlers for ["" ) ; String separator = """" ; for ( String signalName : SIGNALS ) { try { new Handler ( signalName , LOG ) ; bld . append ( separator ) ; bld . append ( signalName ) ; separator = "", "" ; } catch ( Exception e ) { } } bld . append ( ""]"" ) ; LOG . info ( bld . toString ( ) ) ; } } +","public static void register ( final Logger LOG ) { synchronized ( SignalHandler . class ) { if ( registered ) { return ; } registered = true ; final String [ ] SIGNALS = OperatingSystem . isWindows ( ) ? new String [ ] { ""TERM"" , ""INT"" } : new String [ ] { ""TERM"" , ""HUP"" , ""INT"" } ; StringBuilder bld = new StringBuilder ( ) ; bld . append ( ""Registered UNIX signal handlers for ["" ) ; String separator = """" ; for ( String signalName : SIGNALS ) { try { new Handler ( signalName , LOG ) ; bld . append ( separator ) ; bld . append ( signalName ) ; separator = "", "" ; } catch ( Exception e ) { LOG . info ( ""Error while registering signal handler"" , e ) ; } } bld . append ( ""]"" ) ; LOG . info ( bld . toString ( ) ) ; } } +" +460,"public static void register ( final Logger LOG ) { synchronized ( SignalHandler . class ) { if ( registered ) { return ; } registered = true ; final String [ ] SIGNALS = OperatingSystem . isWindows ( ) ? new String [ ] { ""TERM"" , ""INT"" } : new String [ ] { ""TERM"" , ""HUP"" , ""INT"" } ; StringBuilder bld = new StringBuilder ( ) ; bld . append ( ""Registered UNIX signal handlers for ["" ) ; String separator = """" ; for ( String signalName : SIGNALS ) { try { new Handler ( signalName , LOG ) ; bld . append ( separator ) ; bld . append ( signalName ) ; separator = "", "" ; } catch ( Exception e ) { LOG . info ( ""Error while registering signal handler"" , e ) ; } } bld . append ( ""]"" ) ; } } +","public static void register ( final Logger LOG ) { synchronized ( SignalHandler . class ) { if ( registered ) { return ; } registered = true ; final String [ ] SIGNALS = OperatingSystem . isWindows ( ) ? new String [ ] { ""TERM"" , ""INT"" } : new String [ ] { ""TERM"" , ""HUP"" , ""INT"" } ; StringBuilder bld = new StringBuilder ( ) ; bld . append ( ""Registered UNIX signal handlers for ["" ) ; String separator = """" ; for ( String signalName : SIGNALS ) { try { new Handler ( signalName , LOG ) ; bld . append ( separator ) ; bld . append ( signalName ) ; separator = "", "" ; } catch ( Exception e ) { LOG . info ( ""Error while registering signal handler"" , e ) ; } } bld . append ( ""]"" ) ; LOG . info ( bld . toString ( ) ) ; } } +" +461,"protected void onStreamUnstalled ( IStream stream ) { Long time = streamsStalls . remove ( stream ) ; if ( time != null ) { long stallTime = System . nanoTime ( ) - time ; streamsStallTime . addAndGet ( stallTime ) ; if ( LOG . isDebugEnabled ( ) ) } } +","protected void onStreamUnstalled ( IStream stream ) { Long time = streamsStalls . remove ( stream ) ; if ( time != null ) { long stallTime = System . nanoTime ( ) - time ; streamsStallTime . addAndGet ( stallTime ) ; if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Stream unstalled after {} ms {}"" , TimeUnit . NANOSECONDS . toMillis ( stallTime ) , stream ) ; } } +" +462,"public void generateJson ( String prefix , PrintWriter pw , VWorkspace vWorkspace ) { HNodePath currentColumnPath = null ; Worksheet worksheet = workspace . getWorksheet ( worksheetId ) ; List < HNodePath > paths = worksheet . getHeaders ( ) . getAllPaths ( ) ; for ( HNodePath path : paths ) { if ( path . getLeaf ( ) . getId ( ) . equals ( hNodeId ) ) { currentColumnPath = path ; break ; } } SemanticTypeColumnModel model = new SemanticTypeUtil ( ) . predictColumnSemanticType ( workspace , worksheet , currentColumnPath , 4 , selection ) ; OntologyManager ontMgr = workspace . getOntologyManager ( ) ; Alignment alignment = AlignmentManager . Instance ( ) . getAlignment ( workspace . getId ( ) , worksheetId ) ; ColumnNode columnNode = alignment . getColumnNodeByHNodeId ( hNodeId ) ; if ( columnNode . getLearnedSemanticTypes ( ) == null ) { columnNode . setLearnedSemanticTypes ( new SemanticTypeUtil ( ) . getSuggestedTypes ( ontMgr , columnNode , model ) ) ; if ( columnNode . getLearnedSemanticTypes ( ) . isEmpty ( ) ) { logger . info ( ""no semantic type learned for the column "" + hNodeId ) ; } } JSONObject result ; if ( model != null ) { if ( classUri == null ) result = model . getAsJSONObject ( ontMgr , alignment ) ; else { model = new SemanticTypeUtil ( ) . predictColumnSemanticType ( workspace , worksheet , currentColumnPath , 4 , selection ) ; result = model . getAsJSONObject ( classUri , ontMgr , 4 ) ; } } else { result = new JSONObject ( ) ; result . put ( ""Labels"" , new JSONArray ( ) ) ; } pw . println ( result . toString ( ) ) ; } +","public void generateJson ( String prefix , PrintWriter pw , VWorkspace vWorkspace ) { HNodePath currentColumnPath = null ; Worksheet worksheet = workspace . getWorksheet ( worksheetId ) ; List < HNodePath > paths = worksheet . getHeaders ( ) . getAllPaths ( ) ; for ( HNodePath path : paths ) { if ( path . getLeaf ( ) . getId ( ) . equals ( hNodeId ) ) { currentColumnPath = path ; break ; } } SemanticTypeColumnModel model = new SemanticTypeUtil ( ) . predictColumnSemanticType ( workspace , worksheet , currentColumnPath , 4 , selection ) ; OntologyManager ontMgr = workspace . getOntologyManager ( ) ; Alignment alignment = AlignmentManager . Instance ( ) . getAlignment ( workspace . getId ( ) , worksheetId ) ; ColumnNode columnNode = alignment . getColumnNodeByHNodeId ( hNodeId ) ; if ( columnNode . getLearnedSemanticTypes ( ) == null ) { logger . debug ( ""adding learned semantic types to the column "" + hNodeId ) ; columnNode . setLearnedSemanticTypes ( new SemanticTypeUtil ( ) . getSuggestedTypes ( ontMgr , columnNode , model ) ) ; if ( columnNode . getLearnedSemanticTypes ( ) . isEmpty ( ) ) { logger . info ( ""no semantic type learned for the column "" + hNodeId ) ; } } JSONObject result ; if ( model != null ) { if ( classUri == null ) result = model . getAsJSONObject ( ontMgr , alignment ) ; else { model = new SemanticTypeUtil ( ) . predictColumnSemanticType ( workspace , worksheet , currentColumnPath , 4 , selection ) ; result = model . getAsJSONObject ( classUri , ontMgr , 4 ) ; } } else { result = new JSONObject ( ) ; result . put ( ""Labels"" , new JSONArray ( ) ) ; } pw . println ( result . toString ( ) ) ; } +" +463,"public void generateJson ( String prefix , PrintWriter pw , VWorkspace vWorkspace ) { HNodePath currentColumnPath = null ; Worksheet worksheet = workspace . getWorksheet ( worksheetId ) ; List < HNodePath > paths = worksheet . getHeaders ( ) . getAllPaths ( ) ; for ( HNodePath path : paths ) { if ( path . getLeaf ( ) . getId ( ) . equals ( hNodeId ) ) { currentColumnPath = path ; break ; } } SemanticTypeColumnModel model = new SemanticTypeUtil ( ) . predictColumnSemanticType ( workspace , worksheet , currentColumnPath , 4 , selection ) ; OntologyManager ontMgr = workspace . getOntologyManager ( ) ; Alignment alignment = AlignmentManager . Instance ( ) . getAlignment ( workspace . getId ( ) , worksheetId ) ; ColumnNode columnNode = alignment . getColumnNodeByHNodeId ( hNodeId ) ; if ( columnNode . getLearnedSemanticTypes ( ) == null ) { logger . debug ( ""adding learned semantic types to the column "" + hNodeId ) ; columnNode . setLearnedSemanticTypes ( new SemanticTypeUtil ( ) . getSuggestedTypes ( ontMgr , columnNode , model ) ) ; if ( columnNode . getLearnedSemanticTypes ( ) . isEmpty ( ) ) { } } JSONObject result ; if ( model != null ) { if ( classUri == null ) result = model . getAsJSONObject ( ontMgr , alignment ) ; else { model = new SemanticTypeUtil ( ) . predictColumnSemanticType ( workspace , worksheet , currentColumnPath , 4 , selection ) ; result = model . getAsJSONObject ( classUri , ontMgr , 4 ) ; } } else { result = new JSONObject ( ) ; result . put ( ""Labels"" , new JSONArray ( ) ) ; } pw . println ( result . toString ( ) ) ; } +","public void generateJson ( String prefix , PrintWriter pw , VWorkspace vWorkspace ) { HNodePath currentColumnPath = null ; Worksheet worksheet = workspace . getWorksheet ( worksheetId ) ; List < HNodePath > paths = worksheet . getHeaders ( ) . getAllPaths ( ) ; for ( HNodePath path : paths ) { if ( path . getLeaf ( ) . getId ( ) . equals ( hNodeId ) ) { currentColumnPath = path ; break ; } } SemanticTypeColumnModel model = new SemanticTypeUtil ( ) . predictColumnSemanticType ( workspace , worksheet , currentColumnPath , 4 , selection ) ; OntologyManager ontMgr = workspace . getOntologyManager ( ) ; Alignment alignment = AlignmentManager . Instance ( ) . getAlignment ( workspace . getId ( ) , worksheetId ) ; ColumnNode columnNode = alignment . getColumnNodeByHNodeId ( hNodeId ) ; if ( columnNode . getLearnedSemanticTypes ( ) == null ) { logger . debug ( ""adding learned semantic types to the column "" + hNodeId ) ; columnNode . setLearnedSemanticTypes ( new SemanticTypeUtil ( ) . getSuggestedTypes ( ontMgr , columnNode , model ) ) ; if ( columnNode . getLearnedSemanticTypes ( ) . isEmpty ( ) ) { logger . info ( ""no semantic type learned for the column "" + hNodeId ) ; } } JSONObject result ; if ( model != null ) { if ( classUri == null ) result = model . getAsJSONObject ( ontMgr , alignment ) ; else { model = new SemanticTypeUtil ( ) . predictColumnSemanticType ( workspace , worksheet , currentColumnPath , 4 , selection ) ; result = model . getAsJSONObject ( classUri , ontMgr , 4 ) ; } } else { result = new JSONObject ( ) ; result . put ( ""Labels"" , new JSONArray ( ) ) ; } pw . println ( result . toString ( ) ) ; } +" +464,"public void close ( ) { setClosing ( ) ; synchronized ( this ) { if ( pollingTimer != null ) { pollingTimer . cancel ( true ) ; } pollingScheduler . shutdown ( ) ; stopRetryTimer ( ) ; } synchronized ( transactionListeners ) { for ( SpiListener listener : transactionListeners ) { listener . transactionComplete ( ) ; } } timer . shutdownNow ( ) ; executor . shutdownNow ( ) ; try { parserThread . interrupt ( ) ; parserThread . join ( ) ; logger . debug ( ""SpiFrameHandler close complete."" ) ; } catch ( InterruptedException e ) { logger . warn ( ""SpiFrameHandler interrupted in packet parser thread shutdown join."" ) ; } } +","public void close ( ) { logger . debug ( ""SpiFrameHandler close."" ) ; setClosing ( ) ; synchronized ( this ) { if ( pollingTimer != null ) { pollingTimer . cancel ( true ) ; } pollingScheduler . shutdown ( ) ; stopRetryTimer ( ) ; } synchronized ( transactionListeners ) { for ( SpiListener listener : transactionListeners ) { listener . transactionComplete ( ) ; } } timer . shutdownNow ( ) ; executor . shutdownNow ( ) ; try { parserThread . interrupt ( ) ; parserThread . join ( ) ; logger . debug ( ""SpiFrameHandler close complete."" ) ; } catch ( InterruptedException e ) { logger . warn ( ""SpiFrameHandler interrupted in packet parser thread shutdown join."" ) ; } } +" +465,"public void close ( ) { logger . debug ( ""SpiFrameHandler close."" ) ; setClosing ( ) ; synchronized ( this ) { if ( pollingTimer != null ) { pollingTimer . cancel ( true ) ; } pollingScheduler . shutdown ( ) ; stopRetryTimer ( ) ; } synchronized ( transactionListeners ) { for ( SpiListener listener : transactionListeners ) { listener . transactionComplete ( ) ; } } timer . shutdownNow ( ) ; executor . shutdownNow ( ) ; try { parserThread . interrupt ( ) ; parserThread . join ( ) ; } catch ( InterruptedException e ) { logger . warn ( ""SpiFrameHandler interrupted in packet parser thread shutdown join."" ) ; } } +","public void close ( ) { logger . debug ( ""SpiFrameHandler close."" ) ; setClosing ( ) ; synchronized ( this ) { if ( pollingTimer != null ) { pollingTimer . cancel ( true ) ; } pollingScheduler . shutdown ( ) ; stopRetryTimer ( ) ; } synchronized ( transactionListeners ) { for ( SpiListener listener : transactionListeners ) { listener . transactionComplete ( ) ; } } timer . shutdownNow ( ) ; executor . shutdownNow ( ) ; try { parserThread . interrupt ( ) ; parserThread . join ( ) ; logger . debug ( ""SpiFrameHandler close complete."" ) ; } catch ( InterruptedException e ) { logger . warn ( ""SpiFrameHandler interrupted in packet parser thread shutdown join."" ) ; } } +" +466,"public void close ( ) { logger . debug ( ""SpiFrameHandler close."" ) ; setClosing ( ) ; synchronized ( this ) { if ( pollingTimer != null ) { pollingTimer . cancel ( true ) ; } pollingScheduler . shutdown ( ) ; stopRetryTimer ( ) ; } synchronized ( transactionListeners ) { for ( SpiListener listener : transactionListeners ) { listener . transactionComplete ( ) ; } } timer . shutdownNow ( ) ; executor . shutdownNow ( ) ; try { parserThread . interrupt ( ) ; parserThread . join ( ) ; logger . debug ( ""SpiFrameHandler close complete."" ) ; } catch ( InterruptedException e ) { } } +","public void close ( ) { logger . debug ( ""SpiFrameHandler close."" ) ; setClosing ( ) ; synchronized ( this ) { if ( pollingTimer != null ) { pollingTimer . cancel ( true ) ; } pollingScheduler . shutdown ( ) ; stopRetryTimer ( ) ; } synchronized ( transactionListeners ) { for ( SpiListener listener : transactionListeners ) { listener . transactionComplete ( ) ; } } timer . shutdownNow ( ) ; executor . shutdownNow ( ) ; try { parserThread . interrupt ( ) ; parserThread . join ( ) ; logger . debug ( ""SpiFrameHandler close complete."" ) ; } catch ( InterruptedException e ) { logger . warn ( ""SpiFrameHandler interrupted in packet parser thread shutdown join."" ) ; } } +" +467,"protected void processRequest ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { final HttpServletRequest httpRequest = request ; final HttpServletResponse httpResponse = response ; httpResponse . setContentType ( ""application/jrd+json"" ) ; PrintWriter out = httpResponse . getWriter ( ) ; String resource = httpRequest . getParameter ( RESOURCE ) ; String rel = httpRequest . getParameter ( REL ) ; try { if ( OpenIdConnectDiscoveryParamsValidator . validateParams ( resource , rel ) ) { if ( rel == null || rel . equals ( REL_VALUE ) ) { JSONObject jsonObj = new JSONObject ( ) ; jsonObj . put ( SUBJECT , resource ) ; JSONArray linksJsonArray = new JSONArray ( ) ; JSONObject linkJsonObject = new JSONObject ( ) ; linkJsonObject . put ( REL , REL_VALUE ) ; linkJsonObject . put ( HREF , appConfiguration . getIssuer ( ) ) ; linksJsonArray . put ( linkJsonObject ) ; jsonObj . put ( LINKS , linksJsonArray ) ; out . println ( jsonObj . toString ( 4 ) . replace ( ""\\/"" , ""/"" ) ) ; } } } catch ( JSONException e ) { log . error ( e . getMessage ( ) , e ) ; } out . close ( ) ; } +","protected void processRequest ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { final HttpServletRequest httpRequest = request ; final HttpServletResponse httpResponse = response ; httpResponse . setContentType ( ""application/jrd+json"" ) ; PrintWriter out = httpResponse . getWriter ( ) ; String resource = httpRequest . getParameter ( RESOURCE ) ; String rel = httpRequest . getParameter ( REL ) ; log . debug ( ""Attempting to request OpenID Connect Discovery: "" + resource + "", "" + rel + "", Is Secure = "" + httpRequest . isSecure ( ) ) ; try { if ( OpenIdConnectDiscoveryParamsValidator . validateParams ( resource , rel ) ) { if ( rel == null || rel . equals ( REL_VALUE ) ) { JSONObject jsonObj = new JSONObject ( ) ; jsonObj . put ( SUBJECT , resource ) ; JSONArray linksJsonArray = new JSONArray ( ) ; JSONObject linkJsonObject = new JSONObject ( ) ; linkJsonObject . put ( REL , REL_VALUE ) ; linkJsonObject . put ( HREF , appConfiguration . getIssuer ( ) ) ; linksJsonArray . put ( linkJsonObject ) ; jsonObj . put ( LINKS , linksJsonArray ) ; out . println ( jsonObj . toString ( 4 ) . replace ( ""\\/"" , ""/"" ) ) ; } } } catch ( JSONException e ) { log . error ( e . getMessage ( ) , e ) ; } out . close ( ) ; } +" +468,"protected void processRequest ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { final HttpServletRequest httpRequest = request ; final HttpServletResponse httpResponse = response ; httpResponse . setContentType ( ""application/jrd+json"" ) ; PrintWriter out = httpResponse . getWriter ( ) ; String resource = httpRequest . getParameter ( RESOURCE ) ; String rel = httpRequest . getParameter ( REL ) ; log . debug ( ""Attempting to request OpenID Connect Discovery: "" + resource + "", "" + rel + "", Is Secure = "" + httpRequest . isSecure ( ) ) ; try { if ( OpenIdConnectDiscoveryParamsValidator . validateParams ( resource , rel ) ) { if ( rel == null || rel . equals ( REL_VALUE ) ) { JSONObject jsonObj = new JSONObject ( ) ; jsonObj . put ( SUBJECT , resource ) ; JSONArray linksJsonArray = new JSONArray ( ) ; JSONObject linkJsonObject = new JSONObject ( ) ; linkJsonObject . put ( REL , REL_VALUE ) ; linkJsonObject . put ( HREF , appConfiguration . getIssuer ( ) ) ; linksJsonArray . put ( linkJsonObject ) ; jsonObj . put ( LINKS , linksJsonArray ) ; out . println ( jsonObj . toString ( 4 ) . replace ( ""\\/"" , ""/"" ) ) ; } } } catch ( JSONException e ) { } out . close ( ) ; } +","protected void processRequest ( HttpServletRequest request , HttpServletResponse response ) throws ServletException , IOException { final HttpServletRequest httpRequest = request ; final HttpServletResponse httpResponse = response ; httpResponse . setContentType ( ""application/jrd+json"" ) ; PrintWriter out = httpResponse . getWriter ( ) ; String resource = httpRequest . getParameter ( RESOURCE ) ; String rel = httpRequest . getParameter ( REL ) ; log . debug ( ""Attempting to request OpenID Connect Discovery: "" + resource + "", "" + rel + "", Is Secure = "" + httpRequest . isSecure ( ) ) ; try { if ( OpenIdConnectDiscoveryParamsValidator . validateParams ( resource , rel ) ) { if ( rel == null || rel . equals ( REL_VALUE ) ) { JSONObject jsonObj = new JSONObject ( ) ; jsonObj . put ( SUBJECT , resource ) ; JSONArray linksJsonArray = new JSONArray ( ) ; JSONObject linkJsonObject = new JSONObject ( ) ; linkJsonObject . put ( REL , REL_VALUE ) ; linkJsonObject . put ( HREF , appConfiguration . getIssuer ( ) ) ; linksJsonArray . put ( linkJsonObject ) ; jsonObj . put ( LINKS , linksJsonArray ) ; out . println ( jsonObj . toString ( 4 ) . replace ( ""\\/"" , ""/"" ) ) ; } } } catch ( JSONException e ) { log . error ( e . getMessage ( ) , e ) ; } out . close ( ) ; } +" +469,"public boolean processResponses ( NodeStateOrHostInfoChangeHandler listener ) { boolean processedAnyResponses = false ; long currentTime = timer . getCurrentTimeInMillis ( ) ; synchronized ( monitor ) { for ( GetNodeStateRequest req : replies ) { processedAnyResponses = true ; NodeInfo info = req . getNodeInfo ( ) ; if ( ! info . isPendingGetNodeStateRequest ( req ) ) { continue ; } info . removePendingGetNodeStateRequest ( req ) ; GetNodeStateRequest . Reply reply = req . getReply ( ) ; if ( reply . isError ( ) ) { if ( reply . getReturnCode ( ) != ErrorCode . ABORT ) { NodeState newState = handleError ( req , info , currentTime ) ; if ( newState != null ) { listener . handleNewNodeState ( info , newState . clone ( ) ) ; info . setReportedState ( newState , currentTime ) ; } else { log . log ( Level . FINE , ( ) -> ""Ignoring get node state error. Need to resend"" ) ; } } else { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as it was aborted by client"" ) ; } continue ; } try { NodeState state = NodeState . deserialize ( info . getNode ( ) . getType ( ) , reply . getStateString ( ) ) ; if ( ! state . equals ( info . getReportedState ( ) ) ) listener . handleNewNodeState ( info , state . clone ( ) ) ; info . setReportedState ( state , currentTime ) ; } catch ( Exception e ) { log . log ( Level . WARNING , ""Failed to process get node state response"" , e ) ; info . setReportedState ( new NodeState ( info . getNode ( ) . getType ( ) , State . DOWN ) , currentTime ) ; } HostInfo hostInfo = HostInfo . createHostInfo ( reply . getHostInfo ( ) ) ; listener . handleUpdatedHostInfo ( info , hostInfo ) ; info . setHostInfo ( hostInfo ) ; } replies . clear ( ) ; } return processedAnyResponses ; } +","public boolean processResponses ( NodeStateOrHostInfoChangeHandler listener ) { boolean processedAnyResponses = false ; long currentTime = timer . getCurrentTimeInMillis ( ) ; synchronized ( monitor ) { for ( GetNodeStateRequest req : replies ) { processedAnyResponses = true ; NodeInfo info = req . getNodeInfo ( ) ; if ( ! info . isPendingGetNodeStateRequest ( req ) ) { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as request replied to is not the most recent pending request."" ) ; continue ; } info . removePendingGetNodeStateRequest ( req ) ; GetNodeStateRequest . Reply reply = req . getReply ( ) ; if ( reply . isError ( ) ) { if ( reply . getReturnCode ( ) != ErrorCode . ABORT ) { NodeState newState = handleError ( req , info , currentTime ) ; if ( newState != null ) { listener . handleNewNodeState ( info , newState . clone ( ) ) ; info . setReportedState ( newState , currentTime ) ; } else { log . log ( Level . FINE , ( ) -> ""Ignoring get node state error. Need to resend"" ) ; } } else { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as it was aborted by client"" ) ; } continue ; } try { NodeState state = NodeState . deserialize ( info . getNode ( ) . getType ( ) , reply . getStateString ( ) ) ; if ( ! state . equals ( info . getReportedState ( ) ) ) listener . handleNewNodeState ( info , state . clone ( ) ) ; info . setReportedState ( state , currentTime ) ; } catch ( Exception e ) { log . log ( Level . WARNING , ""Failed to process get node state response"" , e ) ; info . setReportedState ( new NodeState ( info . getNode ( ) . getType ( ) , State . DOWN ) , currentTime ) ; } HostInfo hostInfo = HostInfo . createHostInfo ( reply . getHostInfo ( ) ) ; listener . handleUpdatedHostInfo ( info , hostInfo ) ; info . setHostInfo ( hostInfo ) ; } replies . clear ( ) ; } return processedAnyResponses ; } +" +470,"public boolean processResponses ( NodeStateOrHostInfoChangeHandler listener ) { boolean processedAnyResponses = false ; long currentTime = timer . getCurrentTimeInMillis ( ) ; synchronized ( monitor ) { for ( GetNodeStateRequest req : replies ) { processedAnyResponses = true ; NodeInfo info = req . getNodeInfo ( ) ; if ( ! info . isPendingGetNodeStateRequest ( req ) ) { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as request replied to is not the most recent pending request."" ) ; continue ; } info . removePendingGetNodeStateRequest ( req ) ; GetNodeStateRequest . Reply reply = req . getReply ( ) ; if ( reply . isError ( ) ) { if ( reply . getReturnCode ( ) != ErrorCode . ABORT ) { NodeState newState = handleError ( req , info , currentTime ) ; if ( newState != null ) { listener . handleNewNodeState ( info , newState . clone ( ) ) ; info . setReportedState ( newState , currentTime ) ; } else { } } else { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as it was aborted by client"" ) ; } continue ; } try { NodeState state = NodeState . deserialize ( info . getNode ( ) . getType ( ) , reply . getStateString ( ) ) ; if ( ! state . equals ( info . getReportedState ( ) ) ) listener . handleNewNodeState ( info , state . clone ( ) ) ; info . setReportedState ( state , currentTime ) ; } catch ( Exception e ) { log . log ( Level . WARNING , ""Failed to process get node state response"" , e ) ; info . setReportedState ( new NodeState ( info . getNode ( ) . getType ( ) , State . DOWN ) , currentTime ) ; } HostInfo hostInfo = HostInfo . createHostInfo ( reply . getHostInfo ( ) ) ; listener . handleUpdatedHostInfo ( info , hostInfo ) ; info . setHostInfo ( hostInfo ) ; } replies . clear ( ) ; } return processedAnyResponses ; } +","public boolean processResponses ( NodeStateOrHostInfoChangeHandler listener ) { boolean processedAnyResponses = false ; long currentTime = timer . getCurrentTimeInMillis ( ) ; synchronized ( monitor ) { for ( GetNodeStateRequest req : replies ) { processedAnyResponses = true ; NodeInfo info = req . getNodeInfo ( ) ; if ( ! info . isPendingGetNodeStateRequest ( req ) ) { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as request replied to is not the most recent pending request."" ) ; continue ; } info . removePendingGetNodeStateRequest ( req ) ; GetNodeStateRequest . Reply reply = req . getReply ( ) ; if ( reply . isError ( ) ) { if ( reply . getReturnCode ( ) != ErrorCode . ABORT ) { NodeState newState = handleError ( req , info , currentTime ) ; if ( newState != null ) { listener . handleNewNodeState ( info , newState . clone ( ) ) ; info . setReportedState ( newState , currentTime ) ; } else { log . log ( Level . FINE , ( ) -> ""Ignoring get node state error. Need to resend"" ) ; } } else { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as it was aborted by client"" ) ; } continue ; } try { NodeState state = NodeState . deserialize ( info . getNode ( ) . getType ( ) , reply . getStateString ( ) ) ; if ( ! state . equals ( info . getReportedState ( ) ) ) listener . handleNewNodeState ( info , state . clone ( ) ) ; info . setReportedState ( state , currentTime ) ; } catch ( Exception e ) { log . log ( Level . WARNING , ""Failed to process get node state response"" , e ) ; info . setReportedState ( new NodeState ( info . getNode ( ) . getType ( ) , State . DOWN ) , currentTime ) ; } HostInfo hostInfo = HostInfo . createHostInfo ( reply . getHostInfo ( ) ) ; listener . handleUpdatedHostInfo ( info , hostInfo ) ; info . setHostInfo ( hostInfo ) ; } replies . clear ( ) ; } return processedAnyResponses ; } +" +471,"public boolean processResponses ( NodeStateOrHostInfoChangeHandler listener ) { boolean processedAnyResponses = false ; long currentTime = timer . getCurrentTimeInMillis ( ) ; synchronized ( monitor ) { for ( GetNodeStateRequest req : replies ) { processedAnyResponses = true ; NodeInfo info = req . getNodeInfo ( ) ; if ( ! info . isPendingGetNodeStateRequest ( req ) ) { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as request replied to is not the most recent pending request."" ) ; continue ; } info . removePendingGetNodeStateRequest ( req ) ; GetNodeStateRequest . Reply reply = req . getReply ( ) ; if ( reply . isError ( ) ) { if ( reply . getReturnCode ( ) != ErrorCode . ABORT ) { NodeState newState = handleError ( req , info , currentTime ) ; if ( newState != null ) { listener . handleNewNodeState ( info , newState . clone ( ) ) ; info . setReportedState ( newState , currentTime ) ; } else { log . log ( Level . FINE , ( ) -> ""Ignoring get node state error. Need to resend"" ) ; } } else { } continue ; } try { NodeState state = NodeState . deserialize ( info . getNode ( ) . getType ( ) , reply . getStateString ( ) ) ; if ( ! state . equals ( info . getReportedState ( ) ) ) listener . handleNewNodeState ( info , state . clone ( ) ) ; info . setReportedState ( state , currentTime ) ; } catch ( Exception e ) { log . log ( Level . WARNING , ""Failed to process get node state response"" , e ) ; info . setReportedState ( new NodeState ( info . getNode ( ) . getType ( ) , State . DOWN ) , currentTime ) ; } HostInfo hostInfo = HostInfo . createHostInfo ( reply . getHostInfo ( ) ) ; listener . handleUpdatedHostInfo ( info , hostInfo ) ; info . setHostInfo ( hostInfo ) ; } replies . clear ( ) ; } return processedAnyResponses ; } +","public boolean processResponses ( NodeStateOrHostInfoChangeHandler listener ) { boolean processedAnyResponses = false ; long currentTime = timer . getCurrentTimeInMillis ( ) ; synchronized ( monitor ) { for ( GetNodeStateRequest req : replies ) { processedAnyResponses = true ; NodeInfo info = req . getNodeInfo ( ) ; if ( ! info . isPendingGetNodeStateRequest ( req ) ) { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as request replied to is not the most recent pending request."" ) ; continue ; } info . removePendingGetNodeStateRequest ( req ) ; GetNodeStateRequest . Reply reply = req . getReply ( ) ; if ( reply . isError ( ) ) { if ( reply . getReturnCode ( ) != ErrorCode . ABORT ) { NodeState newState = handleError ( req , info , currentTime ) ; if ( newState != null ) { listener . handleNewNodeState ( info , newState . clone ( ) ) ; info . setReportedState ( newState , currentTime ) ; } else { log . log ( Level . FINE , ( ) -> ""Ignoring get node state error. Need to resend"" ) ; } } else { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as it was aborted by client"" ) ; } continue ; } try { NodeState state = NodeState . deserialize ( info . getNode ( ) . getType ( ) , reply . getStateString ( ) ) ; if ( ! state . equals ( info . getReportedState ( ) ) ) listener . handleNewNodeState ( info , state . clone ( ) ) ; info . setReportedState ( state , currentTime ) ; } catch ( Exception e ) { log . log ( Level . WARNING , ""Failed to process get node state response"" , e ) ; info . setReportedState ( new NodeState ( info . getNode ( ) . getType ( ) , State . DOWN ) , currentTime ) ; } HostInfo hostInfo = HostInfo . createHostInfo ( reply . getHostInfo ( ) ) ; listener . handleUpdatedHostInfo ( info , hostInfo ) ; info . setHostInfo ( hostInfo ) ; } replies . clear ( ) ; } return processedAnyResponses ; } +" +472,"public boolean processResponses ( NodeStateOrHostInfoChangeHandler listener ) { boolean processedAnyResponses = false ; long currentTime = timer . getCurrentTimeInMillis ( ) ; synchronized ( monitor ) { for ( GetNodeStateRequest req : replies ) { processedAnyResponses = true ; NodeInfo info = req . getNodeInfo ( ) ; if ( ! info . isPendingGetNodeStateRequest ( req ) ) { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as request replied to is not the most recent pending request."" ) ; continue ; } info . removePendingGetNodeStateRequest ( req ) ; GetNodeStateRequest . Reply reply = req . getReply ( ) ; if ( reply . isError ( ) ) { if ( reply . getReturnCode ( ) != ErrorCode . ABORT ) { NodeState newState = handleError ( req , info , currentTime ) ; if ( newState != null ) { listener . handleNewNodeState ( info , newState . clone ( ) ) ; info . setReportedState ( newState , currentTime ) ; } else { log . log ( Level . FINE , ( ) -> ""Ignoring get node state error. Need to resend"" ) ; } } else { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as it was aborted by client"" ) ; } continue ; } try { NodeState state = NodeState . deserialize ( info . getNode ( ) . getType ( ) , reply . getStateString ( ) ) ; if ( ! state . equals ( info . getReportedState ( ) ) ) listener . handleNewNodeState ( info , state . clone ( ) ) ; info . setReportedState ( state , currentTime ) ; } catch ( Exception e ) { info . setReportedState ( new NodeState ( info . getNode ( ) . getType ( ) , State . DOWN ) , currentTime ) ; } HostInfo hostInfo = HostInfo . createHostInfo ( reply . getHostInfo ( ) ) ; listener . handleUpdatedHostInfo ( info , hostInfo ) ; info . setHostInfo ( hostInfo ) ; } replies . clear ( ) ; } return processedAnyResponses ; } +","public boolean processResponses ( NodeStateOrHostInfoChangeHandler listener ) { boolean processedAnyResponses = false ; long currentTime = timer . getCurrentTimeInMillis ( ) ; synchronized ( monitor ) { for ( GetNodeStateRequest req : replies ) { processedAnyResponses = true ; NodeInfo info = req . getNodeInfo ( ) ; if ( ! info . isPendingGetNodeStateRequest ( req ) ) { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as request replied to is not the most recent pending request."" ) ; continue ; } info . removePendingGetNodeStateRequest ( req ) ; GetNodeStateRequest . Reply reply = req . getReply ( ) ; if ( reply . isError ( ) ) { if ( reply . getReturnCode ( ) != ErrorCode . ABORT ) { NodeState newState = handleError ( req , info , currentTime ) ; if ( newState != null ) { listener . handleNewNodeState ( info , newState . clone ( ) ) ; info . setReportedState ( newState , currentTime ) ; } else { log . log ( Level . FINE , ( ) -> ""Ignoring get node state error. Need to resend"" ) ; } } else { log . log ( Level . FINE , ( ) -> ""Ignoring getnodestate response from "" + info . getNode ( ) + "" as it was aborted by client"" ) ; } continue ; } try { NodeState state = NodeState . deserialize ( info . getNode ( ) . getType ( ) , reply . getStateString ( ) ) ; if ( ! state . equals ( info . getReportedState ( ) ) ) listener . handleNewNodeState ( info , state . clone ( ) ) ; info . setReportedState ( state , currentTime ) ; } catch ( Exception e ) { log . log ( Level . WARNING , ""Failed to process get node state response"" , e ) ; info . setReportedState ( new NodeState ( info . getNode ( ) . getType ( ) , State . DOWN ) , currentTime ) ; } HostInfo hostInfo = HostInfo . createHostInfo ( reply . getHostInfo ( ) ) ; listener . handleUpdatedHostInfo ( info , hostInfo ) ; info . setHostInfo ( hostInfo ) ; } replies . clear ( ) ; } return processedAnyResponses ; } +" +473,"public ImportLocalizedAssetBody importLocalizedAssetForContent ( Long assetId , Long localeId , String content , ImportLocalizedAssetBody . StatusForEqualTarget statusForEqualTarget , FilterConfigIdOverride filterConfigIdOverride , List < String > filterOptions ) { UriComponentsBuilder uriBuilder = UriComponentsBuilder . fromPath ( getBasePathForResource ( assetId , ""localized"" , localeId , ""import"" ) ) ; ImportLocalizedAssetBody importLocalizedAssetBody = new ImportLocalizedAssetBody ( ) ; importLocalizedAssetBody . setContent ( content ) ; importLocalizedAssetBody . setStatusForEqualTarget ( statusForEqualTarget ) ; importLocalizedAssetBody . setFilterConfigIdOverride ( filterConfigIdOverride ) ; importLocalizedAssetBody . setFilterOptions ( filterOptions ) ; return authenticatedRestTemplate . postForObject ( uriBuilder . toUriString ( ) , importLocalizedAssetBody , ImportLocalizedAssetBody . class ) ; } +","public ImportLocalizedAssetBody importLocalizedAssetForContent ( Long assetId , Long localeId , String content , ImportLocalizedAssetBody . StatusForEqualTarget statusForEqualTarget , FilterConfigIdOverride filterConfigIdOverride , List < String > filterOptions ) { logger . debug ( ""Import localized asset with asset id = {}, locale id = {}"" , assetId , localeId ) ; UriComponentsBuilder uriBuilder = UriComponentsBuilder . fromPath ( getBasePathForResource ( assetId , ""localized"" , localeId , ""import"" ) ) ; ImportLocalizedAssetBody importLocalizedAssetBody = new ImportLocalizedAssetBody ( ) ; importLocalizedAssetBody . setContent ( content ) ; importLocalizedAssetBody . setStatusForEqualTarget ( statusForEqualTarget ) ; importLocalizedAssetBody . setFilterConfigIdOverride ( filterConfigIdOverride ) ; importLocalizedAssetBody . setFilterOptions ( filterOptions ) ; return authenticatedRestTemplate . postForObject ( uriBuilder . toUriString ( ) , importLocalizedAssetBody , ImportLocalizedAssetBody . class ) ; } +" +474,"public void addBytes ( String fileName , byte [ ] bytes ) { try { fileName = MimeUtility . decodeText ( fileName ) ; } catch ( UnsupportedEncodingException unsupportedEncodingException ) { if ( _log . isWarnEnabled ( ) ) { } } _bytesOVPs . add ( new ObjectValuePair < String , byte [ ] > ( fileName , bytes ) ) ; } +","public void addBytes ( String fileName , byte [ ] bytes ) { try { fileName = MimeUtility . decodeText ( fileName ) ; } catch ( UnsupportedEncodingException unsupportedEncodingException ) { if ( _log . isWarnEnabled ( ) ) { _log . warn ( ""Unable to decode file name "" + fileName , unsupportedEncodingException ) ; } } _bytesOVPs . add ( new ObjectValuePair < String , byte [ ] > ( fileName , bytes ) ) ; } +" +475,"public void expandLibs ( ) throws IOException { for ( String rawlibref : rawLibs ) { StartLog . debug ( ""rawlibref = "" + rawlibref ) ; String libref = properties . expand ( rawlibref ) ; StartLog . debug ( ""expanded = "" + libref ) ; libref = libref . replaceAll ( ""\\\\([^\\\\])"" , ""\\\\\\\\$1"" ) ; for ( Path libpath : baseHome . getPaths ( libref ) ) { classpath . addComponent ( libpath . toFile ( ) ) ; } } } +","public void expandLibs ( ) throws IOException { StartLog . debug ( ""Expanding Libs"" ) ; for ( String rawlibref : rawLibs ) { StartLog . debug ( ""rawlibref = "" + rawlibref ) ; String libref = properties . expand ( rawlibref ) ; StartLog . debug ( ""expanded = "" + libref ) ; libref = libref . replaceAll ( ""\\\\([^\\\\])"" , ""\\\\\\\\$1"" ) ; for ( Path libpath : baseHome . getPaths ( libref ) ) { classpath . addComponent ( libpath . toFile ( ) ) ; } } } +" +476,"public void expandLibs ( ) throws IOException { StartLog . debug ( ""Expanding Libs"" ) ; for ( String rawlibref : rawLibs ) { String libref = properties . expand ( rawlibref ) ; StartLog . debug ( ""expanded = "" + libref ) ; libref = libref . replaceAll ( ""\\\\([^\\\\])"" , ""\\\\\\\\$1"" ) ; for ( Path libpath : baseHome . getPaths ( libref ) ) { classpath . addComponent ( libpath . toFile ( ) ) ; } } } +","public void expandLibs ( ) throws IOException { StartLog . debug ( ""Expanding Libs"" ) ; for ( String rawlibref : rawLibs ) { StartLog . debug ( ""rawlibref = "" + rawlibref ) ; String libref = properties . expand ( rawlibref ) ; StartLog . debug ( ""expanded = "" + libref ) ; libref = libref . replaceAll ( ""\\\\([^\\\\])"" , ""\\\\\\\\$1"" ) ; for ( Path libpath : baseHome . getPaths ( libref ) ) { classpath . addComponent ( libpath . toFile ( ) ) ; } } } +" +477,"public void expandLibs ( ) throws IOException { StartLog . debug ( ""Expanding Libs"" ) ; for ( String rawlibref : rawLibs ) { StartLog . debug ( ""rawlibref = "" + rawlibref ) ; String libref = properties . expand ( rawlibref ) ; libref = libref . replaceAll ( ""\\\\([^\\\\])"" , ""\\\\\\\\$1"" ) ; for ( Path libpath : baseHome . getPaths ( libref ) ) { classpath . addComponent ( libpath . toFile ( ) ) ; } } } +","public void expandLibs ( ) throws IOException { StartLog . debug ( ""Expanding Libs"" ) ; for ( String rawlibref : rawLibs ) { StartLog . debug ( ""rawlibref = "" + rawlibref ) ; String libref = properties . expand ( rawlibref ) ; StartLog . debug ( ""expanded = "" + libref ) ; libref = libref . replaceAll ( ""\\\\([^\\\\])"" , ""\\\\\\\\$1"" ) ; for ( Path libpath : baseHome . getPaths ( libref ) ) { classpath . addComponent ( libpath . toFile ( ) ) ; } } } +" +478,"public void selectBlockers ( Ability source , Game game , UUID defendingPlayerId ) { List < Permanent > blockers = getAvailableBlockers ( game ) ; CombatSimulator sim = simulateBlock ( CombatSimulator . load ( game ) , blockers , game ) ; List < CombatGroup > groups = game . getCombat ( ) . getGroups ( ) ; for ( int i = 0 ; i < groups . size ( ) ; i ++ ) { for ( CreatureSimulator creature : sim . groups . get ( i ) . blockers ) { groups . get ( i ) . addBlocker ( creature . id , playerId , game ) ; } } } +","public void selectBlockers ( Ability source , Game game , UUID defendingPlayerId ) { log . debug ( ""selectBlockers"" ) ; List < Permanent > blockers = getAvailableBlockers ( game ) ; CombatSimulator sim = simulateBlock ( CombatSimulator . load ( game ) , blockers , game ) ; List < CombatGroup > groups = game . getCombat ( ) . getGroups ( ) ; for ( int i = 0 ; i < groups . size ( ) ; i ++ ) { for ( CreatureSimulator creature : sim . groups . get ( i ) . blockers ) { groups . get ( i ) . addBlocker ( creature . id , playerId , game ) ; } } } +" +479,"@ Test public void currentClassLoader ( ) { verify ( originalLogger ) . info ( MESSAGE ) ; } +","@ Test public void currentClassLoader ( ) { logger . info ( MESSAGE ) ; verify ( originalLogger ) . info ( MESSAGE ) ; } +" +480,"public AuditEvent remove ( Serializable primaryKey ) throws NoSuchEventException { Session session = null ; try { session = openSession ( ) ; AuditEvent auditEvent = ( AuditEvent ) session . get ( AuditEventImpl . class , primaryKey ) ; if ( auditEvent == null ) { if ( _log . isDebugEnabled ( ) ) { } throw new NoSuchEventException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( auditEvent ) ; } catch ( NoSuchEventException noSuchEntityException ) { throw noSuchEntityException ; } catch ( Exception exception ) { throw processException ( exception ) ; } finally { closeSession ( session ) ; } } +","public AuditEvent remove ( Serializable primaryKey ) throws NoSuchEventException { Session session = null ; try { session = openSession ( ) ; AuditEvent auditEvent = ( AuditEvent ) session . get ( AuditEventImpl . class , primaryKey ) ; if ( auditEvent == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchEventException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( auditEvent ) ; } catch ( NoSuchEventException noSuchEntityException ) { throw noSuchEntityException ; } catch ( Exception exception ) { throw processException ( exception ) ; } finally { closeSession ( session ) ; } } +" +481,"private boolean isModelClass ( String classname , Bundle bundle ) { Class < ? > clazz ; try { clazz = bundle . loadClass ( classname ) ; } catch ( ClassNotFoundException e ) { LOGGER . warn ( ""Bundle could not load its own class: '{}' bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } catch ( NoClassDefFoundError e ) { return false ; } catch ( Error e ) { LOGGER . warn ( ""Error while loading class: '{}' in bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } return clazz . isAnnotationPresent ( Model . class ) ; } +","private boolean isModelClass ( String classname , Bundle bundle ) { LOGGER . debug ( ""Check if class '{}' is a model class"" , classname ) ; Class < ? > clazz ; try { clazz = bundle . loadClass ( classname ) ; } catch ( ClassNotFoundException e ) { LOGGER . warn ( ""Bundle could not load its own class: '{}' bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } catch ( NoClassDefFoundError e ) { return false ; } catch ( Error e ) { LOGGER . warn ( ""Error while loading class: '{}' in bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } return clazz . isAnnotationPresent ( Model . class ) ; } +" +482,"private boolean isModelClass ( String classname , Bundle bundle ) { LOGGER . debug ( ""Check if class '{}' is a model class"" , classname ) ; Class < ? > clazz ; try { clazz = bundle . loadClass ( classname ) ; } catch ( ClassNotFoundException e ) { LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } catch ( NoClassDefFoundError e ) { return false ; } catch ( Error e ) { LOGGER . warn ( ""Error while loading class: '{}' in bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } return clazz . isAnnotationPresent ( Model . class ) ; } +","private boolean isModelClass ( String classname , Bundle bundle ) { LOGGER . debug ( ""Check if class '{}' is a model class"" , classname ) ; Class < ? > clazz ; try { clazz = bundle . loadClass ( classname ) ; } catch ( ClassNotFoundException e ) { LOGGER . warn ( ""Bundle could not load its own class: '{}' bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } catch ( NoClassDefFoundError e ) { return false ; } catch ( Error e ) { LOGGER . warn ( ""Error while loading class: '{}' in bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } return clazz . isAnnotationPresent ( Model . class ) ; } +" +483,"private boolean isModelClass ( String classname , Bundle bundle ) { LOGGER . debug ( ""Check if class '{}' is a model class"" , classname ) ; Class < ? > clazz ; try { clazz = bundle . loadClass ( classname ) ; } catch ( ClassNotFoundException e ) { LOGGER . warn ( ""Bundle could not load its own class: '{}' bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } catch ( NoClassDefFoundError e ) { return false ; } catch ( Error e ) { LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } return clazz . isAnnotationPresent ( Model . class ) ; } +","private boolean isModelClass ( String classname , Bundle bundle ) { LOGGER . debug ( ""Check if class '{}' is a model class"" , classname ) ; Class < ? > clazz ; try { clazz = bundle . loadClass ( classname ) ; } catch ( ClassNotFoundException e ) { LOGGER . warn ( ""Bundle could not load its own class: '{}' bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } catch ( NoClassDefFoundError e ) { return false ; } catch ( Error e ) { LOGGER . warn ( ""Error while loading class: '{}' in bundle: '{}'"" , classname , bundle . getSymbolicName ( ) ) ; LOGGER . debug ( ""Exact error: "" , e ) ; return false ; } return clazz . isAnnotationPresent ( Model . class ) ; } +" +484,"public static void handleUseOverridePropertiesWithPropertiesComponent ( ConfigurableApplicationContext context , Class < ? > testClass ) throws Exception { Collection < Method > methods = CamelSpringTestHelper . getAllMethods ( testClass ) ; final List < Properties > properties = new LinkedList < > ( ) ; for ( Method method : methods ) { if ( AnnotationUtils . findAnnotation ( method , UseOverridePropertiesWithPropertiesComponent . class ) != null ) { Class < ? > [ ] argTypes = method . getParameterTypes ( ) ; if ( argTypes . length > 0 ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not a no-argument method."" ) ; } else if ( ! Properties . class . isAssignableFrom ( method . getReturnType ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but does not return a java.util.Properties."" ) ; } else if ( ! Modifier . isStatic ( method . getModifiers ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not static."" ) ; } else if ( ! Modifier . isPublic ( method . getModifiers ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not public."" ) ; } try { properties . add ( ( Properties ) method . invoke ( null ) ) ; } catch ( Exception e ) { throw new RuntimeException ( ""Method ["" + method . getName ( ) + ""] threw exception during evaluation."" , e ) ; } } } Properties extra = new Properties ( ) ; for ( Properties prop : properties ) { extra . putAll ( prop ) ; } if ( ! extra . isEmpty ( ) ) { return bean ; } } +","public static void handleUseOverridePropertiesWithPropertiesComponent ( ConfigurableApplicationContext context , Class < ? > testClass ) throws Exception { Collection < Method > methods = CamelSpringTestHelper . getAllMethods ( testClass ) ; final List < Properties > properties = new LinkedList < > ( ) ; for ( Method method : methods ) { if ( AnnotationUtils . findAnnotation ( method , UseOverridePropertiesWithPropertiesComponent . class ) != null ) { Class < ? > [ ] argTypes = method . getParameterTypes ( ) ; if ( argTypes . length > 0 ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not a no-argument method."" ) ; } else if ( ! Properties . class . isAssignableFrom ( method . getReturnType ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but does not return a java.util.Properties."" ) ; } else if ( ! Modifier . isStatic ( method . getModifiers ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not static."" ) ; } else if ( ! Modifier . isPublic ( method . getModifiers ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not public."" ) ; } try { properties . add ( ( Properties ) method . invoke ( null ) ) ; } catch ( Exception e ) { throw new RuntimeException ( ""Method ["" + method . getName ( ) + ""] threw exception during evaluation."" , e ) ; } } } Properties extra = new Properties ( ) ; for ( Properties prop : properties ) { extra . putAll ( prop ) ; } if ( ! extra . isEmpty ( ) ) { context . addBeanFactoryPostProcessor ( beanFactory -> beanFactory . addBeanPostProcessor ( new BeanPostProcessor ( ) { @ Override public Object postProcessBeforeInitialization ( Object bean , String beanName ) { if ( bean instanceof PropertiesComponent ) { PropertiesComponent pc = ( PropertiesComponent ) bean ; LOGGER . info ( ""Using {} properties to override any existing properties on the PropertiesComponent"" , extra . size ( ) ) ; pc . setOverrideProperties ( extra ) ; } return bean ; } } +" +485,"public static void handleUseOverridePropertiesWithPropertiesComponent ( ConfigurableApplicationContext context , Class < ? > testClass ) throws Exception { Collection < Method > methods = CamelSpringTestHelper . getAllMethods ( testClass ) ; final List < Properties > properties = new LinkedList < > ( ) ; for ( Method method : methods ) { if ( AnnotationUtils . findAnnotation ( method , UseOverridePropertiesWithPropertiesComponent . class ) != null ) { Class < ? > [ ] argTypes = method . getParameterTypes ( ) ; if ( argTypes . length > 0 ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not a no-argument method."" ) ; } else if ( ! Properties . class . isAssignableFrom ( method . getReturnType ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but does not return a java.util.Properties."" ) ; } else if ( ! Modifier . isStatic ( method . getModifiers ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not static."" ) ; } else if ( ! Modifier . isPublic ( method . getModifiers ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not public."" ) ; } try { properties . add ( ( Properties ) method . invoke ( null ) ) ; } catch ( Exception e ) { throw new RuntimeException ( ""Method ["" + method . getName ( ) + ""] threw exception during evaluation."" , e ) ; } } } Properties extra = new Properties ( ) ; for ( Properties prop : properties ) { extra . putAll ( prop ) ; } if ( ! extra . isEmpty ( ) ) { context . addBeanFactoryPostProcessor ( beanFactory -> beanFactory . addBeanPostProcessor ( new BeanPostProcessor ( ) { @ Override public Object postProcessBeforeInitialization ( Object bean , String beanName ) { if ( bean instanceof PropertiesComponent ) { PropertiesComponent pc = ( PropertiesComponent ) bean ; pc . setOverrideProperties ( extra ) ; } return bean ; } } +","public static void handleUseOverridePropertiesWithPropertiesComponent ( ConfigurableApplicationContext context , Class < ? > testClass ) throws Exception { Collection < Method > methods = CamelSpringTestHelper . getAllMethods ( testClass ) ; final List < Properties > properties = new LinkedList < > ( ) ; for ( Method method : methods ) { if ( AnnotationUtils . findAnnotation ( method , UseOverridePropertiesWithPropertiesComponent . class ) != null ) { Class < ? > [ ] argTypes = method . getParameterTypes ( ) ; if ( argTypes . length > 0 ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not a no-argument method."" ) ; } else if ( ! Properties . class . isAssignableFrom ( method . getReturnType ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but does not return a java.util.Properties."" ) ; } else if ( ! Modifier . isStatic ( method . getModifiers ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not static."" ) ; } else if ( ! Modifier . isPublic ( method . getModifiers ( ) ) ) { throw new IllegalArgumentException ( ""Method ["" + method . getName ( ) + ""] is annotated with UseOverridePropertiesWithPropertiesComponent but is not public."" ) ; } try { properties . add ( ( Properties ) method . invoke ( null ) ) ; } catch ( Exception e ) { throw new RuntimeException ( ""Method ["" + method . getName ( ) + ""] threw exception during evaluation."" , e ) ; } } } Properties extra = new Properties ( ) ; for ( Properties prop : properties ) { extra . putAll ( prop ) ; } if ( ! extra . isEmpty ( ) ) { context . addBeanFactoryPostProcessor ( beanFactory -> beanFactory . addBeanPostProcessor ( new BeanPostProcessor ( ) { @ Override public Object postProcessBeforeInitialization ( Object bean , String beanName ) { if ( bean instanceof PropertiesComponent ) { PropertiesComponent pc = ( PropertiesComponent ) bean ; LOGGER . info ( ""Using {} properties to override any existing properties on the PropertiesComponent"" , extra . size ( ) ) ; pc . setOverrideProperties ( extra ) ; } return bean ; } } +" +486,"@ SuppressWarnings ( ""unchecked"" ) public void run ( ) { long lastTime = 0 ; while ( true ) { int [ ] copiedSelectedPositionIndices ; List < NavigationPosition > copiedPositions ; boolean recenter ; synchronized ( notificationMutex ) { try { notificationMutex . wait ( 250 ) ; } catch ( InterruptedException e ) { } if ( ! running ) return ; if ( ! hasPositions ( ) ) continue ; if ( ! isVisible ( ) ) continue ; long currentTime = currentTimeMillis ( ) ; if ( haveToRecenterMap || haveToRepaintSelectionImmediately || ( haveToRepaintSelection && ( currentTime - lastTime > 500 ) ) ) { log . fine ( ""Woke up to update selected positions: "" + selectionUpdateReason + "" haveToRepaintSelection: "" + haveToRepaintSelection + "" haveToRepaintSelectionImmediately: "" + haveToRepaintSelectionImmediately + "" haveToRecenterMap: "" + haveToRecenterMap ) ; recenter = haveToRecenterMap ; haveToRecenterMap = false ; haveToRepaintSelectionImmediately = false ; haveToRepaintSelection = false ; copiedSelectedPositionIndices = new int [ selectedPositionIndices . length ] ; System . arraycopy ( selectedPositionIndices , 0 , copiedSelectedPositionIndices , 0 , copiedSelectedPositionIndices . length ) ; copiedPositions = new ArrayList < > ( positionsModel . getRoute ( ) . getPositions ( ) ) ; } else continue ; } List < NavigationPosition > render = new ArrayList < > ( positionReducer . reduceSelectedPositions ( copiedPositions , copiedSelectedPositionIndices ) ) ; render . addAll ( selectedPositions ) ; NavigationPosition centerPosition = render . size ( ) > 0 ? new BoundingBox ( render ) . getCenter ( ) : null ; selectPositions ( render , recenter ? centerPosition : null ) ; lastTime = currentTimeMillis ( ) ; } } +","@ SuppressWarnings ( ""unchecked"" ) public void run ( ) { long lastTime = 0 ; while ( true ) { int [ ] copiedSelectedPositionIndices ; List < NavigationPosition > copiedPositions ; boolean recenter ; synchronized ( notificationMutex ) { try { notificationMutex . wait ( 250 ) ; } catch ( InterruptedException e ) { } if ( ! running ) return ; if ( ! hasPositions ( ) ) continue ; if ( ! isVisible ( ) ) continue ; long currentTime = currentTimeMillis ( ) ; if ( haveToRecenterMap || haveToRepaintSelectionImmediately || ( haveToRepaintSelection && ( currentTime - lastTime > 500 ) ) ) { log . fine ( ""Woke up to update selected positions: "" + selectionUpdateReason + "" haveToRepaintSelection: "" + haveToRepaintSelection + "" haveToRepaintSelectionImmediately: "" + haveToRepaintSelectionImmediately + "" haveToRecenterMap: "" + haveToRecenterMap ) ; recenter = haveToRecenterMap ; haveToRecenterMap = false ; haveToRepaintSelectionImmediately = false ; haveToRepaintSelection = false ; copiedSelectedPositionIndices = new int [ selectedPositionIndices . length ] ; System . arraycopy ( selectedPositionIndices , 0 , copiedSelectedPositionIndices , 0 , copiedSelectedPositionIndices . length ) ; copiedPositions = new ArrayList < > ( positionsModel . getRoute ( ) . getPositions ( ) ) ; } else continue ; } List < NavigationPosition > render = new ArrayList < > ( positionReducer . reduceSelectedPositions ( copiedPositions , copiedSelectedPositionIndices ) ) ; render . addAll ( selectedPositions ) ; NavigationPosition centerPosition = render . size ( ) > 0 ? new BoundingBox ( render ) . getCenter ( ) : null ; selectPositions ( render , recenter ? centerPosition : null ) ; log . info ( ""Selected positions updated for "" + render . size ( ) + "" positions, reason: "" + selectionUpdateReason + "", recentering: "" + recenter + "" to: "" + centerPosition ) ; lastTime = currentTimeMillis ( ) ; } } +" +487,"private static Map < String , RSAPublicKey > getJwksFromUrl ( final URL url , SSLContext sslContext , HostnameVerifier hostnameVerifier ) { JsonObject response = null ; try { URLConnection connection = url . openConnection ( ) ; if ( connection instanceof HttpsURLConnection ) { HttpsURLConnection conn = ( HttpsURLConnection ) connection ; conn . setRequestMethod ( ""GET"" ) ; conn . setSSLSocketFactory ( sslContext . getSocketFactory ( ) ) ; conn . setHostnameVerifier ( hostnameVerifier ) ; conn . setConnectTimeout ( CONNECTION_TIMEOUT ) ; conn . setReadTimeout ( CONNECTION_TIMEOUT ) ; conn . connect ( ) ; InputStream inputStream = conn . getInputStream ( ) ; response = Json . createReader ( inputStream ) . readObject ( ) ; } } catch ( IOException e ) { return null ; } if ( response == null ) { log . warn ( ""No response when fetching jwk set from "" + url . toString ( ) ) ; return null ; } JsonArray jwks = response . getJsonArray ( ""keys"" ) ; if ( jwks == null ) { log . warn ( ""Unable to parse jwks"" ) ; return null ; } Map < String , RSAPublicKey > res = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < jwks . size ( ) ; i ++ ) { JsonObject jwk = jwks . getJsonObject ( i ) ; String kid = jwk . getString ( ""kid"" , null ) ; String kty = jwk . getString ( ""kty"" , null ) ; String e1 = jwk . getString ( ""e"" , null ) ; String n1 = jwk . getString ( ""n"" , null ) ; if ( kid == null ) { log . tokenRealmJwkMissingClaim ( ""kid"" ) ; continue ; } if ( ! ""RSA"" . equals ( kty ) ) { log . tokenRealmJwkMissingClaim ( ""kty"" ) ; continue ; } if ( e1 == null ) { log . tokenRealmJwkMissingClaim ( ""e"" ) ; continue ; } if ( n1 == null ) { log . tokenRealmJwkMissingClaim ( ""n"" ) ; continue ; } BigInteger e = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( e1 ) ) ; BigInteger n = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( n1 ) ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( n , e ) ; try { RSAPublicKey publicKey = ( RSAPublicKey ) KeyFactory . getInstance ( ""RSA"" ) . generatePublic ( keySpec ) ; res . put ( kid , publicKey ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException ex ) { log . info ( ""Fetched jwk could not be parsed, ignoring..."" ) ; ex . printStackTrace ( ) ; continue ; } } return res ; } +","private static Map < String , RSAPublicKey > getJwksFromUrl ( final URL url , SSLContext sslContext , HostnameVerifier hostnameVerifier ) { JsonObject response = null ; try { URLConnection connection = url . openConnection ( ) ; if ( connection instanceof HttpsURLConnection ) { HttpsURLConnection conn = ( HttpsURLConnection ) connection ; conn . setRequestMethod ( ""GET"" ) ; conn . setSSLSocketFactory ( sslContext . getSocketFactory ( ) ) ; conn . setHostnameVerifier ( hostnameVerifier ) ; conn . setConnectTimeout ( CONNECTION_TIMEOUT ) ; conn . setReadTimeout ( CONNECTION_TIMEOUT ) ; conn . connect ( ) ; InputStream inputStream = conn . getInputStream ( ) ; response = Json . createReader ( inputStream ) . readObject ( ) ; } } catch ( IOException e ) { log . warn ( ""Unable to connect to "" + url . toString ( ) ) ; return null ; } if ( response == null ) { log . warn ( ""No response when fetching jwk set from "" + url . toString ( ) ) ; return null ; } JsonArray jwks = response . getJsonArray ( ""keys"" ) ; if ( jwks == null ) { log . warn ( ""Unable to parse jwks"" ) ; return null ; } Map < String , RSAPublicKey > res = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < jwks . size ( ) ; i ++ ) { JsonObject jwk = jwks . getJsonObject ( i ) ; String kid = jwk . getString ( ""kid"" , null ) ; String kty = jwk . getString ( ""kty"" , null ) ; String e1 = jwk . getString ( ""e"" , null ) ; String n1 = jwk . getString ( ""n"" , null ) ; if ( kid == null ) { log . tokenRealmJwkMissingClaim ( ""kid"" ) ; continue ; } if ( ! ""RSA"" . equals ( kty ) ) { log . tokenRealmJwkMissingClaim ( ""kty"" ) ; continue ; } if ( e1 == null ) { log . tokenRealmJwkMissingClaim ( ""e"" ) ; continue ; } if ( n1 == null ) { log . tokenRealmJwkMissingClaim ( ""n"" ) ; continue ; } BigInteger e = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( e1 ) ) ; BigInteger n = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( n1 ) ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( n , e ) ; try { RSAPublicKey publicKey = ( RSAPublicKey ) KeyFactory . getInstance ( ""RSA"" ) . generatePublic ( keySpec ) ; res . put ( kid , publicKey ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException ex ) { log . info ( ""Fetched jwk could not be parsed, ignoring..."" ) ; ex . printStackTrace ( ) ; continue ; } } return res ; } +" +488,"private static Map < String , RSAPublicKey > getJwksFromUrl ( final URL url , SSLContext sslContext , HostnameVerifier hostnameVerifier ) { JsonObject response = null ; try { URLConnection connection = url . openConnection ( ) ; if ( connection instanceof HttpsURLConnection ) { HttpsURLConnection conn = ( HttpsURLConnection ) connection ; conn . setRequestMethod ( ""GET"" ) ; conn . setSSLSocketFactory ( sslContext . getSocketFactory ( ) ) ; conn . setHostnameVerifier ( hostnameVerifier ) ; conn . setConnectTimeout ( CONNECTION_TIMEOUT ) ; conn . setReadTimeout ( CONNECTION_TIMEOUT ) ; conn . connect ( ) ; InputStream inputStream = conn . getInputStream ( ) ; response = Json . createReader ( inputStream ) . readObject ( ) ; } } catch ( IOException e ) { log . warn ( ""Unable to connect to "" + url . toString ( ) ) ; return null ; } if ( response == null ) { return null ; } JsonArray jwks = response . getJsonArray ( ""keys"" ) ; if ( jwks == null ) { log . warn ( ""Unable to parse jwks"" ) ; return null ; } Map < String , RSAPublicKey > res = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < jwks . size ( ) ; i ++ ) { JsonObject jwk = jwks . getJsonObject ( i ) ; String kid = jwk . getString ( ""kid"" , null ) ; String kty = jwk . getString ( ""kty"" , null ) ; String e1 = jwk . getString ( ""e"" , null ) ; String n1 = jwk . getString ( ""n"" , null ) ; if ( kid == null ) { log . tokenRealmJwkMissingClaim ( ""kid"" ) ; continue ; } if ( ! ""RSA"" . equals ( kty ) ) { log . tokenRealmJwkMissingClaim ( ""kty"" ) ; continue ; } if ( e1 == null ) { log . tokenRealmJwkMissingClaim ( ""e"" ) ; continue ; } if ( n1 == null ) { log . tokenRealmJwkMissingClaim ( ""n"" ) ; continue ; } BigInteger e = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( e1 ) ) ; BigInteger n = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( n1 ) ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( n , e ) ; try { RSAPublicKey publicKey = ( RSAPublicKey ) KeyFactory . getInstance ( ""RSA"" ) . generatePublic ( keySpec ) ; res . put ( kid , publicKey ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException ex ) { log . info ( ""Fetched jwk could not be parsed, ignoring..."" ) ; ex . printStackTrace ( ) ; continue ; } } return res ; } +","private static Map < String , RSAPublicKey > getJwksFromUrl ( final URL url , SSLContext sslContext , HostnameVerifier hostnameVerifier ) { JsonObject response = null ; try { URLConnection connection = url . openConnection ( ) ; if ( connection instanceof HttpsURLConnection ) { HttpsURLConnection conn = ( HttpsURLConnection ) connection ; conn . setRequestMethod ( ""GET"" ) ; conn . setSSLSocketFactory ( sslContext . getSocketFactory ( ) ) ; conn . setHostnameVerifier ( hostnameVerifier ) ; conn . setConnectTimeout ( CONNECTION_TIMEOUT ) ; conn . setReadTimeout ( CONNECTION_TIMEOUT ) ; conn . connect ( ) ; InputStream inputStream = conn . getInputStream ( ) ; response = Json . createReader ( inputStream ) . readObject ( ) ; } } catch ( IOException e ) { log . warn ( ""Unable to connect to "" + url . toString ( ) ) ; return null ; } if ( response == null ) { log . warn ( ""No response when fetching jwk set from "" + url . toString ( ) ) ; return null ; } JsonArray jwks = response . getJsonArray ( ""keys"" ) ; if ( jwks == null ) { log . warn ( ""Unable to parse jwks"" ) ; return null ; } Map < String , RSAPublicKey > res = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < jwks . size ( ) ; i ++ ) { JsonObject jwk = jwks . getJsonObject ( i ) ; String kid = jwk . getString ( ""kid"" , null ) ; String kty = jwk . getString ( ""kty"" , null ) ; String e1 = jwk . getString ( ""e"" , null ) ; String n1 = jwk . getString ( ""n"" , null ) ; if ( kid == null ) { log . tokenRealmJwkMissingClaim ( ""kid"" ) ; continue ; } if ( ! ""RSA"" . equals ( kty ) ) { log . tokenRealmJwkMissingClaim ( ""kty"" ) ; continue ; } if ( e1 == null ) { log . tokenRealmJwkMissingClaim ( ""e"" ) ; continue ; } if ( n1 == null ) { log . tokenRealmJwkMissingClaim ( ""n"" ) ; continue ; } BigInteger e = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( e1 ) ) ; BigInteger n = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( n1 ) ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( n , e ) ; try { RSAPublicKey publicKey = ( RSAPublicKey ) KeyFactory . getInstance ( ""RSA"" ) . generatePublic ( keySpec ) ; res . put ( kid , publicKey ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException ex ) { log . info ( ""Fetched jwk could not be parsed, ignoring..."" ) ; ex . printStackTrace ( ) ; continue ; } } return res ; } +" +489,"private static Map < String , RSAPublicKey > getJwksFromUrl ( final URL url , SSLContext sslContext , HostnameVerifier hostnameVerifier ) { JsonObject response = null ; try { URLConnection connection = url . openConnection ( ) ; if ( connection instanceof HttpsURLConnection ) { HttpsURLConnection conn = ( HttpsURLConnection ) connection ; conn . setRequestMethod ( ""GET"" ) ; conn . setSSLSocketFactory ( sslContext . getSocketFactory ( ) ) ; conn . setHostnameVerifier ( hostnameVerifier ) ; conn . setConnectTimeout ( CONNECTION_TIMEOUT ) ; conn . setReadTimeout ( CONNECTION_TIMEOUT ) ; conn . connect ( ) ; InputStream inputStream = conn . getInputStream ( ) ; response = Json . createReader ( inputStream ) . readObject ( ) ; } } catch ( IOException e ) { log . warn ( ""Unable to connect to "" + url . toString ( ) ) ; return null ; } if ( response == null ) { log . warn ( ""No response when fetching jwk set from "" + url . toString ( ) ) ; return null ; } JsonArray jwks = response . getJsonArray ( ""keys"" ) ; if ( jwks == null ) { return null ; } Map < String , RSAPublicKey > res = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < jwks . size ( ) ; i ++ ) { JsonObject jwk = jwks . getJsonObject ( i ) ; String kid = jwk . getString ( ""kid"" , null ) ; String kty = jwk . getString ( ""kty"" , null ) ; String e1 = jwk . getString ( ""e"" , null ) ; String n1 = jwk . getString ( ""n"" , null ) ; if ( kid == null ) { log . tokenRealmJwkMissingClaim ( ""kid"" ) ; continue ; } if ( ! ""RSA"" . equals ( kty ) ) { log . tokenRealmJwkMissingClaim ( ""kty"" ) ; continue ; } if ( e1 == null ) { log . tokenRealmJwkMissingClaim ( ""e"" ) ; continue ; } if ( n1 == null ) { log . tokenRealmJwkMissingClaim ( ""n"" ) ; continue ; } BigInteger e = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( e1 ) ) ; BigInteger n = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( n1 ) ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( n , e ) ; try { RSAPublicKey publicKey = ( RSAPublicKey ) KeyFactory . getInstance ( ""RSA"" ) . generatePublic ( keySpec ) ; res . put ( kid , publicKey ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException ex ) { log . info ( ""Fetched jwk could not be parsed, ignoring..."" ) ; ex . printStackTrace ( ) ; continue ; } } return res ; } +","private static Map < String , RSAPublicKey > getJwksFromUrl ( final URL url , SSLContext sslContext , HostnameVerifier hostnameVerifier ) { JsonObject response = null ; try { URLConnection connection = url . openConnection ( ) ; if ( connection instanceof HttpsURLConnection ) { HttpsURLConnection conn = ( HttpsURLConnection ) connection ; conn . setRequestMethod ( ""GET"" ) ; conn . setSSLSocketFactory ( sslContext . getSocketFactory ( ) ) ; conn . setHostnameVerifier ( hostnameVerifier ) ; conn . setConnectTimeout ( CONNECTION_TIMEOUT ) ; conn . setReadTimeout ( CONNECTION_TIMEOUT ) ; conn . connect ( ) ; InputStream inputStream = conn . getInputStream ( ) ; response = Json . createReader ( inputStream ) . readObject ( ) ; } } catch ( IOException e ) { log . warn ( ""Unable to connect to "" + url . toString ( ) ) ; return null ; } if ( response == null ) { log . warn ( ""No response when fetching jwk set from "" + url . toString ( ) ) ; return null ; } JsonArray jwks = response . getJsonArray ( ""keys"" ) ; if ( jwks == null ) { log . warn ( ""Unable to parse jwks"" ) ; return null ; } Map < String , RSAPublicKey > res = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < jwks . size ( ) ; i ++ ) { JsonObject jwk = jwks . getJsonObject ( i ) ; String kid = jwk . getString ( ""kid"" , null ) ; String kty = jwk . getString ( ""kty"" , null ) ; String e1 = jwk . getString ( ""e"" , null ) ; String n1 = jwk . getString ( ""n"" , null ) ; if ( kid == null ) { log . tokenRealmJwkMissingClaim ( ""kid"" ) ; continue ; } if ( ! ""RSA"" . equals ( kty ) ) { log . tokenRealmJwkMissingClaim ( ""kty"" ) ; continue ; } if ( e1 == null ) { log . tokenRealmJwkMissingClaim ( ""e"" ) ; continue ; } if ( n1 == null ) { log . tokenRealmJwkMissingClaim ( ""n"" ) ; continue ; } BigInteger e = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( e1 ) ) ; BigInteger n = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( n1 ) ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( n , e ) ; try { RSAPublicKey publicKey = ( RSAPublicKey ) KeyFactory . getInstance ( ""RSA"" ) . generatePublic ( keySpec ) ; res . put ( kid , publicKey ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException ex ) { log . info ( ""Fetched jwk could not be parsed, ignoring..."" ) ; ex . printStackTrace ( ) ; continue ; } } return res ; } +" +490,"private static Map < String , RSAPublicKey > getJwksFromUrl ( final URL url , SSLContext sslContext , HostnameVerifier hostnameVerifier ) { JsonObject response = null ; try { URLConnection connection = url . openConnection ( ) ; if ( connection instanceof HttpsURLConnection ) { HttpsURLConnection conn = ( HttpsURLConnection ) connection ; conn . setRequestMethod ( ""GET"" ) ; conn . setSSLSocketFactory ( sslContext . getSocketFactory ( ) ) ; conn . setHostnameVerifier ( hostnameVerifier ) ; conn . setConnectTimeout ( CONNECTION_TIMEOUT ) ; conn . setReadTimeout ( CONNECTION_TIMEOUT ) ; conn . connect ( ) ; InputStream inputStream = conn . getInputStream ( ) ; response = Json . createReader ( inputStream ) . readObject ( ) ; } } catch ( IOException e ) { log . warn ( ""Unable to connect to "" + url . toString ( ) ) ; return null ; } if ( response == null ) { log . warn ( ""No response when fetching jwk set from "" + url . toString ( ) ) ; return null ; } JsonArray jwks = response . getJsonArray ( ""keys"" ) ; if ( jwks == null ) { log . warn ( ""Unable to parse jwks"" ) ; return null ; } Map < String , RSAPublicKey > res = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < jwks . size ( ) ; i ++ ) { JsonObject jwk = jwks . getJsonObject ( i ) ; String kid = jwk . getString ( ""kid"" , null ) ; String kty = jwk . getString ( ""kty"" , null ) ; String e1 = jwk . getString ( ""e"" , null ) ; String n1 = jwk . getString ( ""n"" , null ) ; if ( kid == null ) { log . tokenRealmJwkMissingClaim ( ""kid"" ) ; continue ; } if ( ! ""RSA"" . equals ( kty ) ) { log . tokenRealmJwkMissingClaim ( ""kty"" ) ; continue ; } if ( e1 == null ) { log . tokenRealmJwkMissingClaim ( ""e"" ) ; continue ; } if ( n1 == null ) { log . tokenRealmJwkMissingClaim ( ""n"" ) ; continue ; } BigInteger e = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( e1 ) ) ; BigInteger n = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( n1 ) ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( n , e ) ; try { RSAPublicKey publicKey = ( RSAPublicKey ) KeyFactory . getInstance ( ""RSA"" ) . generatePublic ( keySpec ) ; res . put ( kid , publicKey ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException ex ) { ex . printStackTrace ( ) ; continue ; } } return res ; } +","private static Map < String , RSAPublicKey > getJwksFromUrl ( final URL url , SSLContext sslContext , HostnameVerifier hostnameVerifier ) { JsonObject response = null ; try { URLConnection connection = url . openConnection ( ) ; if ( connection instanceof HttpsURLConnection ) { HttpsURLConnection conn = ( HttpsURLConnection ) connection ; conn . setRequestMethod ( ""GET"" ) ; conn . setSSLSocketFactory ( sslContext . getSocketFactory ( ) ) ; conn . setHostnameVerifier ( hostnameVerifier ) ; conn . setConnectTimeout ( CONNECTION_TIMEOUT ) ; conn . setReadTimeout ( CONNECTION_TIMEOUT ) ; conn . connect ( ) ; InputStream inputStream = conn . getInputStream ( ) ; response = Json . createReader ( inputStream ) . readObject ( ) ; } } catch ( IOException e ) { log . warn ( ""Unable to connect to "" + url . toString ( ) ) ; return null ; } if ( response == null ) { log . warn ( ""No response when fetching jwk set from "" + url . toString ( ) ) ; return null ; } JsonArray jwks = response . getJsonArray ( ""keys"" ) ; if ( jwks == null ) { log . warn ( ""Unable to parse jwks"" ) ; return null ; } Map < String , RSAPublicKey > res = new LinkedHashMap < > ( ) ; for ( int i = 0 ; i < jwks . size ( ) ; i ++ ) { JsonObject jwk = jwks . getJsonObject ( i ) ; String kid = jwk . getString ( ""kid"" , null ) ; String kty = jwk . getString ( ""kty"" , null ) ; String e1 = jwk . getString ( ""e"" , null ) ; String n1 = jwk . getString ( ""n"" , null ) ; if ( kid == null ) { log . tokenRealmJwkMissingClaim ( ""kid"" ) ; continue ; } if ( ! ""RSA"" . equals ( kty ) ) { log . tokenRealmJwkMissingClaim ( ""kty"" ) ; continue ; } if ( e1 == null ) { log . tokenRealmJwkMissingClaim ( ""e"" ) ; continue ; } if ( n1 == null ) { log . tokenRealmJwkMissingClaim ( ""n"" ) ; continue ; } BigInteger e = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( e1 ) ) ; BigInteger n = new BigInteger ( 1 , Base64 . getUrlDecoder ( ) . decode ( n1 ) ) ; RSAPublicKeySpec keySpec = new RSAPublicKeySpec ( n , e ) ; try { RSAPublicKey publicKey = ( RSAPublicKey ) KeyFactory . getInstance ( ""RSA"" ) . generatePublic ( keySpec ) ; res . put ( kid , publicKey ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException ex ) { log . info ( ""Fetched jwk could not be parsed, ignoring..."" ) ; ex . printStackTrace ( ) ; continue ; } } return res ; } +" +491,"private void updateTaskWithChangedElementProperties ( ) { if ( ! changedElementProperties . isEmpty ( ) ) { getTaskService ( ) . updateChangedElementProperties ( task . getId ( ) , changedElementProperties ) ; changedElementProperties . clear ( ) ; } } +","private void updateTaskWithChangedElementProperties ( ) { if ( ! changedElementProperties . isEmpty ( ) ) { getTaskService ( ) . updateChangedElementProperties ( task . getId ( ) , changedElementProperties ) ; changedElementProperties . clear ( ) ; LOG . info ( ""Updated task: saved changes in element properties."" ) ; } } +" +492,"protected void restoreBackup ( String backupSubFolder ) throws ApsSystemException { try { this . restoreLocalDump ( this . getEntandoTableMapping ( ) , backupSubFolder ) ; List < Component > components = this . getComponents ( ) ; for ( int i = 0 ; i < components . size ( ) ; i ++ ) { Component componentConfiguration = components . get ( i ) ; this . restoreLocalDump ( componentConfiguration . getTableMapping ( ) , backupSubFolder ) ; } } catch ( Throwable t ) { throw new ApsSystemException ( ""Error while restoring local backup"" , t ) ; } } +","protected void restoreBackup ( String backupSubFolder ) throws ApsSystemException { try { this . restoreLocalDump ( this . getEntandoTableMapping ( ) , backupSubFolder ) ; List < Component > components = this . getComponents ( ) ; for ( int i = 0 ; i < components . size ( ) ; i ++ ) { Component componentConfiguration = components . get ( i ) ; this . restoreLocalDump ( componentConfiguration . getTableMapping ( ) , backupSubFolder ) ; } } catch ( Throwable t ) { _logger . error ( ""Error while restoring local backup"" , t ) ; throw new ApsSystemException ( ""Error while restoring local backup"" , t ) ; } } +" +493,"@ Transactional public void registerChannel ( ChannelRequest channelRequest ) { addOrUpdate ( ChannelBuilder . fromChannelRequest ( channelRequest ) . build ( ) ) ; } +","@ Transactional public void registerChannel ( ChannelRequest channelRequest ) { LOGGER . info ( ""Registering channel: {}"" , channelRequest . getModuleName ( ) ) ; addOrUpdate ( ChannelBuilder . fromChannelRequest ( channelRequest ) . build ( ) ) ; } +" +494,"public void startStream ( ) { task = executor . submit ( new WebHdfsPersistReaderTask ( this ) ) ; } +","public void startStream ( ) { LOGGER . debug ( ""startStream"" ) ; task = executor . submit ( new WebHdfsPersistReaderTask ( this ) ) ; } +" +495,"private NetworkTopology getNetworkDescriptor ( String filename ) throws JAXBException , IOException , ResourceException , SAXException { InputStream stream = null ; try { URL url = new URL ( filename ) ; stream = url . openStream ( ) ; } catch ( MalformedURLException ignore ) { log . error ( ""file: "" + filename ) ; stream = new FileInputStream ( filename ) ; } NetworkTopology rd = loadNetworkDescriptor ( stream ) ; return rd ; } +","private NetworkTopology getNetworkDescriptor ( String filename ) throws JAXBException , IOException , ResourceException , SAXException { InputStream stream = null ; try { URL url = new URL ( filename ) ; log . info ( ""URL: "" + url ) ; stream = url . openStream ( ) ; } catch ( MalformedURLException ignore ) { log . error ( ""file: "" + filename ) ; stream = new FileInputStream ( filename ) ; } NetworkTopology rd = loadNetworkDescriptor ( stream ) ; return rd ; } +" +496,"private NetworkTopology getNetworkDescriptor ( String filename ) throws JAXBException , IOException , ResourceException , SAXException { InputStream stream = null ; try { URL url = new URL ( filename ) ; log . info ( ""URL: "" + url ) ; stream = url . openStream ( ) ; } catch ( MalformedURLException ignore ) { stream = new FileInputStream ( filename ) ; } NetworkTopology rd = loadNetworkDescriptor ( stream ) ; return rd ; } +","private NetworkTopology getNetworkDescriptor ( String filename ) throws JAXBException , IOException , ResourceException , SAXException { InputStream stream = null ; try { URL url = new URL ( filename ) ; log . info ( ""URL: "" + url ) ; stream = url . openStream ( ) ; } catch ( MalformedURLException ignore ) { log . error ( ""file: "" + filename ) ; stream = new FileInputStream ( filename ) ; } NetworkTopology rd = loadNetworkDescriptor ( stream ) ; return rd ; } +" +497,"public static void removeRedundantSubClassAxioms ( OWLReasoner reasoner , Map < String , String > options ) { OWLOntology ontology = reasoner . getRootOntology ( ) ; OWLOntologyManager manager = ontology . getOWLOntologyManager ( ) ; OWLDataFactory dataFactory = manager . getOWLDataFactory ( ) ; for ( OWLClass thisClass : ontology . getClassesInSignature ( ) ) { if ( thisClass . isOWLNothing ( ) || thisClass . isOWLThing ( ) ) { continue ; } Set < OWLClass > inferredSuperClasses = new HashSet < > ( ) ; for ( Node < OWLClass > node : reasoner . getSuperClasses ( thisClass , true ) ) { for ( OWLClass inferredSuperClass : node ) { inferredSuperClasses . add ( inferredSuperClass ) ; } } for ( OWLSubClassOfAxiom subClassAxiom : ontology . getSubClassAxiomsForSubClass ( thisClass ) ) { if ( OptionsHelper . optionIsTrue ( options , ""preserve-annotated-axioms"" ) ) { if ( subClassAxiom . getAnnotations ( ) . size ( ) > 0 ) { continue ; } } if ( subClassAxiom . getSuperClass ( ) . isAnonymous ( ) ) { continue ; } OWLClass assertedSuperClass = subClassAxiom . getSuperClass ( ) . asOWLClass ( ) ; if ( inferredSuperClasses . contains ( assertedSuperClass ) ) { continue ; } manager . removeAxiom ( ontology , dataFactory . getOWLSubClassOfAxiom ( thisClass , assertedSuperClass ) ) ; } } logger . info ( ""Ontology now has {} axioms."" , ontology . getAxioms ( ) . size ( ) ) ; } +","public static void removeRedundantSubClassAxioms ( OWLReasoner reasoner , Map < String , String > options ) { logger . info ( ""Removing redundant subclass axioms..."" ) ; OWLOntology ontology = reasoner . getRootOntology ( ) ; OWLOntologyManager manager = ontology . getOWLOntologyManager ( ) ; OWLDataFactory dataFactory = manager . getOWLDataFactory ( ) ; for ( OWLClass thisClass : ontology . getClassesInSignature ( ) ) { if ( thisClass . isOWLNothing ( ) || thisClass . isOWLThing ( ) ) { continue ; } Set < OWLClass > inferredSuperClasses = new HashSet < > ( ) ; for ( Node < OWLClass > node : reasoner . getSuperClasses ( thisClass , true ) ) { for ( OWLClass inferredSuperClass : node ) { inferredSuperClasses . add ( inferredSuperClass ) ; } } for ( OWLSubClassOfAxiom subClassAxiom : ontology . getSubClassAxiomsForSubClass ( thisClass ) ) { if ( OptionsHelper . optionIsTrue ( options , ""preserve-annotated-axioms"" ) ) { if ( subClassAxiom . getAnnotations ( ) . size ( ) > 0 ) { continue ; } } if ( subClassAxiom . getSuperClass ( ) . isAnonymous ( ) ) { continue ; } OWLClass assertedSuperClass = subClassAxiom . getSuperClass ( ) . asOWLClass ( ) ; if ( inferredSuperClasses . contains ( assertedSuperClass ) ) { continue ; } manager . removeAxiom ( ontology , dataFactory . getOWLSubClassOfAxiom ( thisClass , assertedSuperClass ) ) ; } } logger . info ( ""Ontology now has {} axioms."" , ontology . getAxioms ( ) . size ( ) ) ; } +" +498,"public static void removeRedundantSubClassAxioms ( OWLReasoner reasoner , Map < String , String > options ) { logger . info ( ""Removing redundant subclass axioms..."" ) ; OWLOntology ontology = reasoner . getRootOntology ( ) ; OWLOntologyManager manager = ontology . getOWLOntologyManager ( ) ; OWLDataFactory dataFactory = manager . getOWLDataFactory ( ) ; for ( OWLClass thisClass : ontology . getClassesInSignature ( ) ) { if ( thisClass . isOWLNothing ( ) || thisClass . isOWLThing ( ) ) { continue ; } Set < OWLClass > inferredSuperClasses = new HashSet < > ( ) ; for ( Node < OWLClass > node : reasoner . getSuperClasses ( thisClass , true ) ) { for ( OWLClass inferredSuperClass : node ) { inferredSuperClasses . add ( inferredSuperClass ) ; } } for ( OWLSubClassOfAxiom subClassAxiom : ontology . getSubClassAxiomsForSubClass ( thisClass ) ) { if ( OptionsHelper . optionIsTrue ( options , ""preserve-annotated-axioms"" ) ) { if ( subClassAxiom . getAnnotations ( ) . size ( ) > 0 ) { continue ; } } if ( subClassAxiom . getSuperClass ( ) . isAnonymous ( ) ) { continue ; } OWLClass assertedSuperClass = subClassAxiom . getSuperClass ( ) . asOWLClass ( ) ; if ( inferredSuperClasses . contains ( assertedSuperClass ) ) { continue ; } manager . removeAxiom ( ontology , dataFactory . getOWLSubClassOfAxiom ( thisClass , assertedSuperClass ) ) ; } } } +","public static void removeRedundantSubClassAxioms ( OWLReasoner reasoner , Map < String , String > options ) { logger . info ( ""Removing redundant subclass axioms..."" ) ; OWLOntology ontology = reasoner . getRootOntology ( ) ; OWLOntologyManager manager = ontology . getOWLOntologyManager ( ) ; OWLDataFactory dataFactory = manager . getOWLDataFactory ( ) ; for ( OWLClass thisClass : ontology . getClassesInSignature ( ) ) { if ( thisClass . isOWLNothing ( ) || thisClass . isOWLThing ( ) ) { continue ; } Set < OWLClass > inferredSuperClasses = new HashSet < > ( ) ; for ( Node < OWLClass > node : reasoner . getSuperClasses ( thisClass , true ) ) { for ( OWLClass inferredSuperClass : node ) { inferredSuperClasses . add ( inferredSuperClass ) ; } } for ( OWLSubClassOfAxiom subClassAxiom : ontology . getSubClassAxiomsForSubClass ( thisClass ) ) { if ( OptionsHelper . optionIsTrue ( options , ""preserve-annotated-axioms"" ) ) { if ( subClassAxiom . getAnnotations ( ) . size ( ) > 0 ) { continue ; } } if ( subClassAxiom . getSuperClass ( ) . isAnonymous ( ) ) { continue ; } OWLClass assertedSuperClass = subClassAxiom . getSuperClass ( ) . asOWLClass ( ) ; if ( inferredSuperClasses . contains ( assertedSuperClass ) ) { continue ; } manager . removeAxiom ( ontology , dataFactory . getOWLSubClassOfAxiom ( thisClass , assertedSuperClass ) ) ; } } logger . info ( ""Ontology now has {} axioms."" , ontology . getAxioms ( ) . size ( ) ) ; } +" +499,"public synchronized boolean deleteAll ( ) { syncCloseAllProcessor ( ) ; for ( PartialPath storageGroup : IoTDB . metaManager . getAllStorageGroupPaths ( ) ) { this . deleteAllDataFilesInOneStorageGroup ( storageGroup ) ; } return true ; } +","public synchronized boolean deleteAll ( ) { logger . info ( ""Start deleting all storage groups' timeseries"" ) ; syncCloseAllProcessor ( ) ; for ( PartialPath storageGroup : IoTDB . metaManager . getAllStorageGroupPaths ( ) ) { this . deleteAllDataFilesInOneStorageGroup ( storageGroup ) ; } return true ; } +" +500,"private boolean doBindToDevice ( ) { try { return device . bindToLocal ( cluster . getId ( ) ) ; } catch ( ZigBeeNetworkManagerException e ) { log . error ( ""Binding failed"" , e ) ; return false ; } } +","private boolean doBindToDevice ( ) { try { return device . bindToLocal ( cluster . getId ( ) ) ; } catch ( ZigBeeNetworkManagerException e ) { log . debug ( ""Unable to bind to device {} on cluster {}"" , device , cluster . getId ( ) ) ; log . error ( ""Binding failed"" , e ) ; return false ; } } +" +501,"private boolean doBindToDevice ( ) { try { return device . bindToLocal ( cluster . getId ( ) ) ; } catch ( ZigBeeNetworkManagerException e ) { log . debug ( ""Unable to bind to device {} on cluster {}"" , device , cluster . getId ( ) ) ; return false ; } } +","private boolean doBindToDevice ( ) { try { return device . bindToLocal ( cluster . getId ( ) ) ; } catch ( ZigBeeNetworkManagerException e ) { log . debug ( ""Unable to bind to device {} on cluster {}"" , device , cluster . getId ( ) ) ; log . error ( ""Binding failed"" , e ) ; return false ; } } +" +502,"private void readAndCheckEntry ( JarFile jarFile , JarEntry entry ) throws VerifyFailedException { InputStream is = null ; byte [ ] buffer = new byte [ 8192 ] ; try { is = jarFile . getInputStream ( entry ) ; while ( ( is . read ( buffer , 0 , buffer . length ) ) != - 1 ) ; } catch ( java . lang . SecurityException ex ) { throw new VerifyFailedException ( ""Verify failed."" + ex . getMessage ( ) , ex ) ; } catch ( IOException ex ) { throw new VerifyFailedException ( ""Verify failed."" + ex . getMessage ( ) , ex ) ; } finally { if ( is != null ) { try { is . close ( ) ; } catch ( IOException ex ) { } } } } +","private void readAndCheckEntry ( JarFile jarFile , JarEntry entry ) throws VerifyFailedException { InputStream is = null ; byte [ ] buffer = new byte [ 8192 ] ; try { is = jarFile . getInputStream ( entry ) ; while ( ( is . read ( buffer , 0 , buffer . length ) ) != - 1 ) ; } catch ( java . lang . SecurityException ex ) { throw new VerifyFailedException ( ""Verify failed."" + ex . getMessage ( ) , ex ) ; } catch ( IOException ex ) { throw new VerifyFailedException ( ""Verify failed."" + ex . getMessage ( ) , ex ) ; } finally { if ( is != null ) { try { is . close ( ) ; } catch ( IOException ex ) { LOGGER . debug ( ""Close stream failed:"" + ex ) ; } } } } +" +503,"public void superAddValues ( RecordingObject recordingObject ) throws Exception { Group root = getRoot ( ) ; String path = ""/"" + recordingObject . getVariable ( ) . replace ( ""."" , ""/"" ) ; HObject v = FileFormat . findObject ( recordingsH5File , path ) ; if ( v == null ) { String [ ] splitByPeriod = recordingObject . getVariable ( ) . split ( ""\\."" ) ; Group current = root ; String currentTag = recordingObject . getVariable ( ) ; String currentPath = """" ; for ( int s = 0 ; s < splitByPeriod . length - 1 ; s ++ ) { currentTag = splitByPeriod [ s ] ; currentPath = currentPath . concat ( ""/"" + currentTag ) ; current = createGroup ( current , currentTag , currentPath , root ) ; } currentTag = splitByPeriod [ splitByPeriod . length - 1 ] ; this . createDataSet ( recordingObject , current , currentTag ) ; } } +","public void superAddValues ( RecordingObject recordingObject ) throws Exception { Group root = getRoot ( ) ; String path = ""/"" + recordingObject . getVariable ( ) . replace ( ""."" , ""/"" ) ; HObject v = FileFormat . findObject ( recordingsH5File , path ) ; if ( v == null ) { _logger . warn ( ""Creating variable "" + recordingObject . getVariable ( ) + "" "" + i ++ ) ; String [ ] splitByPeriod = recordingObject . getVariable ( ) . split ( ""\\."" ) ; Group current = root ; String currentTag = recordingObject . getVariable ( ) ; String currentPath = """" ; for ( int s = 0 ; s < splitByPeriod . length - 1 ; s ++ ) { currentTag = splitByPeriod [ s ] ; currentPath = currentPath . concat ( ""/"" + currentTag ) ; current = createGroup ( current , currentTag , currentPath , root ) ; } currentTag = splitByPeriod [ splitByPeriod . length - 1 ] ; this . createDataSet ( recordingObject , current , currentTag ) ; } } +" +504,"public void onAccuracyChanged ( Sensor sensor , int accuracy ) { } +","public void onAccuracyChanged ( Sensor sensor , int accuracy ) { logger . debug ( String . format ( ""accuracy changed to %d for %s"" , accuracy , sensor . getName ( ) ) ) ; } +" +505,"public Number readByValue ( Integer value ) throws DataAccessException { try { return ( Number ) em . createQuery ( ""SELECT n "" + ""FROM Number n "" + ""WHERE n.value = :value"" ) . setParameter ( ""value"" , value ) . getSingleResult ( ) ; } catch ( NoResultException e ) { return null ; } } +","public Number readByValue ( Integer value ) throws DataAccessException { try { return ( Number ) em . createQuery ( ""SELECT n "" + ""FROM Number n "" + ""WHERE n.value = :value"" ) . setParameter ( ""value"" , value ) . getSingleResult ( ) ; } catch ( NoResultException e ) { logger . warn ( ""Number \"""" + value + ""\"" was not found"" ) ; return null ; } } +" +506,"public void onContainerStopped ( ContainerId containerId ) { if ( LOG . isDebugEnabled ( ) ) { } containers . remove ( containerId ) ; } +","public void onContainerStopped ( ContainerId containerId ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( ""Succeeded to stop Container "" + containerId ) ; } containers . remove ( containerId ) ; } +" +507,"protected AccountImpl createAccount ( ) { AuthenticationCommand command = new AuthenticationCommand ( ) { @ Override public Access call ( ) { return new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; } @ Override public String getUrl ( ) { return ""abc"" ; } } ; Access access = new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; LOG . info ( ""JOSS / Applying preferred region: "" + ( accountConfig . getPreferredRegion ( ) == null ? ""none"" : accountConfig . getPreferredRegion ( ) ) ) ; LOG . info ( ""JOSS / Using TempURL hash prefix source: "" + accountConfig . getTempUrlHashPrefixSource ( ) ) ; return new AccountImpl ( command , mHttpClient , access , accountConfig . isAllowCaching ( ) , accountConfig . getTempUrlHashPrefixSource ( ) , accountConfig . getDelimiter ( ) ) ; } +","protected AccountImpl createAccount ( ) { AuthenticationCommand command = new AuthenticationCommand ( ) { @ Override public Access call ( ) { return new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; } @ Override public String getUrl ( ) { return ""abc"" ; } } ; Access access = new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; LOG . info ( ""JOSS / Successfully authenticated"" ) ; LOG . info ( ""JOSS / Applying preferred region: "" + ( accountConfig . getPreferredRegion ( ) == null ? ""none"" : accountConfig . getPreferredRegion ( ) ) ) ; LOG . info ( ""JOSS / Using TempURL hash prefix source: "" + accountConfig . getTempUrlHashPrefixSource ( ) ) ; return new AccountImpl ( command , mHttpClient , access , accountConfig . isAllowCaching ( ) , accountConfig . getTempUrlHashPrefixSource ( ) , accountConfig . getDelimiter ( ) ) ; } +" +508,"protected AccountImpl createAccount ( ) { AuthenticationCommand command = new AuthenticationCommand ( ) { @ Override public Access call ( ) { return new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; } @ Override public String getUrl ( ) { return ""abc"" ; } } ; Access access = new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; LOG . info ( ""JOSS / Successfully authenticated"" ) ; LOG . info ( ""JOSS / Using TempURL hash prefix source: "" + accountConfig . getTempUrlHashPrefixSource ( ) ) ; return new AccountImpl ( command , mHttpClient , access , accountConfig . isAllowCaching ( ) , accountConfig . getTempUrlHashPrefixSource ( ) , accountConfig . getDelimiter ( ) ) ; } +","protected AccountImpl createAccount ( ) { AuthenticationCommand command = new AuthenticationCommand ( ) { @ Override public Access call ( ) { return new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; } @ Override public String getUrl ( ) { return ""abc"" ; } } ; Access access = new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; LOG . info ( ""JOSS / Successfully authenticated"" ) ; LOG . info ( ""JOSS / Applying preferred region: "" + ( accountConfig . getPreferredRegion ( ) == null ? ""none"" : accountConfig . getPreferredRegion ( ) ) ) ; LOG . info ( ""JOSS / Using TempURL hash prefix source: "" + accountConfig . getTempUrlHashPrefixSource ( ) ) ; return new AccountImpl ( command , mHttpClient , access , accountConfig . isAllowCaching ( ) , accountConfig . getTempUrlHashPrefixSource ( ) , accountConfig . getDelimiter ( ) ) ; } +" +509,"protected AccountImpl createAccount ( ) { AuthenticationCommand command = new AuthenticationCommand ( ) { @ Override public Access call ( ) { return new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; } @ Override public String getUrl ( ) { return ""abc"" ; } } ; Access access = new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; LOG . info ( ""JOSS / Successfully authenticated"" ) ; LOG . info ( ""JOSS / Applying preferred region: "" + ( accountConfig . getPreferredRegion ( ) == null ? ""none"" : accountConfig . getPreferredRegion ( ) ) ) ; return new AccountImpl ( command , mHttpClient , access , accountConfig . isAllowCaching ( ) , accountConfig . getTempUrlHashPrefixSource ( ) , accountConfig . getDelimiter ( ) ) ; } +","protected AccountImpl createAccount ( ) { AuthenticationCommand command = new AuthenticationCommand ( ) { @ Override public Access call ( ) { return new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; } @ Override public String getUrl ( ) { return ""abc"" ; } } ; Access access = new DummyAccess ( accountConfig . getAuthUrl ( ) ) ; LOG . info ( ""JOSS / Successfully authenticated"" ) ; LOG . info ( ""JOSS / Applying preferred region: "" + ( accountConfig . getPreferredRegion ( ) == null ? ""none"" : accountConfig . getPreferredRegion ( ) ) ) ; LOG . info ( ""JOSS / Using TempURL hash prefix source: "" + accountConfig . getTempUrlHashPrefixSource ( ) ) ; return new AccountImpl ( command , mHttpClient , access , accountConfig . isAllowCaching ( ) , accountConfig . getTempUrlHashPrefixSource ( ) , accountConfig . getDelimiter ( ) ) ; } +" +510,"public void reloadEntitySearchRecords ( String id , IApsEntity entity ) { Connection conn = null ; try { conn = this . getConnection ( ) ; conn . setAutoCommit ( false ) ; this . executeReloadEntitySearchRecords ( id , entity , conn ) ; conn . commit ( ) ; } catch ( Throwable t ) { this . executeRollback ( conn ) ; throw new RuntimeException ( ""Error detected while reloading references"" , t ) ; } finally { this . closeConnection ( conn ) ; } } +","public void reloadEntitySearchRecords ( String id , IApsEntity entity ) { Connection conn = null ; try { conn = this . getConnection ( ) ; conn . setAutoCommit ( false ) ; this . executeReloadEntitySearchRecords ( id , entity , conn ) ; conn . commit ( ) ; } catch ( Throwable t ) { this . executeRollback ( conn ) ; _logger . error ( ""Error detected while reloading references"" , t ) ; throw new RuntimeException ( ""Error detected while reloading references"" , t ) ; } finally { this . closeConnection ( conn ) ; } } +" +511,"public void before ( Object target , String className , String methodName , String parameterDescription , Object [ ] args ) { final InterceptorScopeInvocation transaction = scope . getCurrentInvocation ( ) ; if ( transaction . tryEnter ( policy ) ) { try { this . delegate . before ( target , className , methodName , parameterDescription , args ) ; } catch ( Throwable t ) { exceptionHandler . handleException ( t ) ; } } else { if ( debugEnabled ) { } } } +","public void before ( Object target , String className , String methodName , String parameterDescription , Object [ ] args ) { final InterceptorScopeInvocation transaction = scope . getCurrentInvocation ( ) ; if ( transaction . tryEnter ( policy ) ) { try { this . delegate . before ( target , className , methodName , parameterDescription , args ) ; } catch ( Throwable t ) { exceptionHandler . handleException ( t ) ; } } else { if ( debugEnabled ) { logger . debug ( ""tryBefore() returns false: interceptorScopeTransaction: {}, executionPoint: {}. Skip interceptor {}"" , transaction , policy , delegate . getClass ( ) ) ; } } } +" +512,"private void submitParentCreateRequest ( Context context , CreateRequest createRequest ) throws IngestException , SourceUnavailableException { List < Metacard > createdMetacards = catalogFramework . create ( createRequest ) . getCreatedMetacards ( ) ; List < String > createdIds = createdMetacards . stream ( ) . map ( Metacard :: getId ) . collect ( Collectors . toList ( ) ) ; context . setParentMetacard ( createdMetacards . get ( createdMetacards . size ( ) - 1 ) ) ; } +","private void submitParentCreateRequest ( Context context , CreateRequest createRequest ) throws IngestException , SourceUnavailableException { List < Metacard > createdMetacards = catalogFramework . create ( createRequest ) . getCreatedMetacards ( ) ; List < String > createdIds = createdMetacards . stream ( ) . map ( Metacard :: getId ) . collect ( Collectors . toList ( ) ) ; LOGGER . debug ( ""created parent metacards with ids: {}"" , createdIds ) ; context . setParentMetacard ( createdMetacards . get ( createdMetacards . size ( ) - 1 ) ) ; } +" +513,"private URI buildResourceUri ( final String ... path ) { final UriBuilder uriBuilder = uriInfo . getBaseUriBuilder ( ) ; uriBuilder . segment ( path ) ; URI uri = uriBuilder . build ( ) ; try { final String scheme = getFirstHeaderValue ( PROXY_SCHEME_HTTP_HEADER , FORWARDED_PROTO_HTTP_HEADER ) ; final String hostHeaderValue = getFirstHeaderValue ( PROXY_HOST_HTTP_HEADER , FORWARDED_HOST_HTTP_HEADER ) ; final String portHeaderValue = getFirstHeaderValue ( PROXY_PORT_HTTP_HEADER , FORWARDED_PORT_HTTP_HEADER ) ; final String host = WebUtils . determineProxiedHost ( hostHeaderValue ) ; final String port = WebUtils . determineProxiedPort ( hostHeaderValue , portHeaderValue ) ; String allowedContextPaths = properties . getAllowedContextPaths ( ) ; String resourcePath = WebUtils . getResourcePath ( uri , httpServletRequest , allowedContextPaths ) ; int uriPort = uri . getPort ( ) ; if ( port != null ) { if ( StringUtils . isWhitespace ( port ) ) { uriPort = - 1 ; } else { try { uriPort = Integer . parseInt ( port ) ; } catch ( final NumberFormatException nfe ) { } } } uri = new URI ( ( StringUtils . isBlank ( scheme ) ) ? uri . getScheme ( ) : scheme , uri . getUserInfo ( ) , ( StringUtils . isBlank ( host ) ) ? uri . getHost ( ) : host , uriPort , resourcePath , uri . getQuery ( ) , uri . getFragment ( ) ) ; } catch ( final URISyntaxException use ) { throw new UriBuilderException ( use ) ; } return uri ; } +","private URI buildResourceUri ( final String ... path ) { final UriBuilder uriBuilder = uriInfo . getBaseUriBuilder ( ) ; uriBuilder . segment ( path ) ; URI uri = uriBuilder . build ( ) ; try { final String scheme = getFirstHeaderValue ( PROXY_SCHEME_HTTP_HEADER , FORWARDED_PROTO_HTTP_HEADER ) ; final String hostHeaderValue = getFirstHeaderValue ( PROXY_HOST_HTTP_HEADER , FORWARDED_HOST_HTTP_HEADER ) ; final String portHeaderValue = getFirstHeaderValue ( PROXY_PORT_HTTP_HEADER , FORWARDED_PORT_HTTP_HEADER ) ; final String host = WebUtils . determineProxiedHost ( hostHeaderValue ) ; final String port = WebUtils . determineProxiedPort ( hostHeaderValue , portHeaderValue ) ; String allowedContextPaths = properties . getAllowedContextPaths ( ) ; String resourcePath = WebUtils . getResourcePath ( uri , httpServletRequest , allowedContextPaths ) ; int uriPort = uri . getPort ( ) ; if ( port != null ) { if ( StringUtils . isWhitespace ( port ) ) { uriPort = - 1 ; } else { try { uriPort = Integer . parseInt ( port ) ; } catch ( final NumberFormatException nfe ) { logger . warn ( String . format ( ""Unable to parse proxy port HTTP header '%s'. Using port from request URI '%s'."" , port , uriPort ) ) ; } } } uri = new URI ( ( StringUtils . isBlank ( scheme ) ) ? uri . getScheme ( ) : scheme , uri . getUserInfo ( ) , ( StringUtils . isBlank ( host ) ) ? uri . getHost ( ) : host , uriPort , resourcePath , uri . getQuery ( ) , uri . getFragment ( ) ) ; } catch ( final URISyntaxException use ) { throw new UriBuilderException ( use ) ; } return uri ; } +" +514,"protected boolean abortExecution ( ) { final boolean abort = engine . exceedsRuntimeLimit ( ) ; if ( abort ) { final AutoFormattingTime aft = new AutoFormattingTime ( engine . getRuntimeLimitInNanoseconds ( ) , - 1 , 4 ) ; } return abort ; } +","protected boolean abortExecution ( ) { final boolean abort = engine . exceedsRuntimeLimit ( ) ; if ( abort ) { final AutoFormattingTime aft = new AutoFormattingTime ( engine . getRuntimeLimitInNanoseconds ( ) , - 1 , 4 ) ; logger . info ( ""Aborting execution (cleanly) because the runtime has exceeded the requested maximum "" + aft ) ; } return abort ; } +" +515,"private void removeProcessControlTags ( Process process , ConfigurationElementReport processReport ) { Long aliveTagId = process . getAliveTagId ( ) ; if ( aliveTagId != null ) { ConfigurationElementReport tagReport = new ConfigurationElementReport ( Action . REMOVE , Entity . CONTROLTAG , aliveTagId ) ; processReport . addSubReport ( tagReport ) ; controlTagConfigHandler . removeControlTag ( aliveTagId , tagReport ) ; } Long stateTagId = process . getStateTagId ( ) ; ConfigurationElementReport tagReport = new ConfigurationElementReport ( Action . REMOVE , Entity . CONTROLTAG , stateTagId ) ; processReport . addSubReport ( tagReport ) ; controlTagConfigHandler . removeControlTag ( stateTagId , tagReport ) ; } +","private void removeProcessControlTags ( Process process , ConfigurationElementReport processReport ) { LOGGER . debug ( ""Removing Process control tags for process "" + process . getId ( ) ) ; Long aliveTagId = process . getAliveTagId ( ) ; if ( aliveTagId != null ) { ConfigurationElementReport tagReport = new ConfigurationElementReport ( Action . REMOVE , Entity . CONTROLTAG , aliveTagId ) ; processReport . addSubReport ( tagReport ) ; controlTagConfigHandler . removeControlTag ( aliveTagId , tagReport ) ; } Long stateTagId = process . getStateTagId ( ) ; ConfigurationElementReport tagReport = new ConfigurationElementReport ( Action . REMOVE , Entity . CONTROLTAG , stateTagId ) ; processReport . addSubReport ( tagReport ) ; controlTagConfigHandler . removeControlTag ( stateTagId , tagReport ) ; } +" +516,"@ SuppressWarnings ( ""rawtypes"" ) public long getCommittedWindowId ( String appId , int operatorId ) { try { beginTransaction ( ) ; BytesMessage message = ( BytesMessage ) consumer . receive ( ) ; logger . debug ( ""Retrieved committed window messageId: {}, messageAppOperatorIdProp: {}"" , message . getJMSMessageID ( ) , message . getStringProperty ( APP_OPERATOR_ID ) ) ; long windowId = message . readLong ( ) ; writeWindowId ( appId , operatorId , windowId ) ; commitTransaction ( ) ; logger . debug ( ""metaQueueName: "" + metaQueueName ) ; logger . debug ( ""Retrieved windowId {}"" , windowId ) ; return windowId ; } catch ( JMSException ex ) { throw new RuntimeException ( ex ) ; } } +","@ SuppressWarnings ( ""rawtypes"" ) public long getCommittedWindowId ( String appId , int operatorId ) { logger . debug ( ""Getting committed windowId appId {} operatorId {}"" , appId , operatorId ) ; try { beginTransaction ( ) ; BytesMessage message = ( BytesMessage ) consumer . receive ( ) ; logger . debug ( ""Retrieved committed window messageId: {}, messageAppOperatorIdProp: {}"" , message . getJMSMessageID ( ) , message . getStringProperty ( APP_OPERATOR_ID ) ) ; long windowId = message . readLong ( ) ; writeWindowId ( appId , operatorId , windowId ) ; commitTransaction ( ) ; logger . debug ( ""metaQueueName: "" + metaQueueName ) ; logger . debug ( ""Retrieved windowId {}"" , windowId ) ; return windowId ; } catch ( JMSException ex ) { throw new RuntimeException ( ex ) ; } } +" +517,"@ SuppressWarnings ( ""rawtypes"" ) public long getCommittedWindowId ( String appId , int operatorId ) { logger . debug ( ""Getting committed windowId appId {} operatorId {}"" , appId , operatorId ) ; try { beginTransaction ( ) ; BytesMessage message = ( BytesMessage ) consumer . receive ( ) ; long windowId = message . readLong ( ) ; writeWindowId ( appId , operatorId , windowId ) ; commitTransaction ( ) ; logger . debug ( ""metaQueueName: "" + metaQueueName ) ; logger . debug ( ""Retrieved windowId {}"" , windowId ) ; return windowId ; } catch ( JMSException ex ) { throw new RuntimeException ( ex ) ; } } +","@ SuppressWarnings ( ""rawtypes"" ) public long getCommittedWindowId ( String appId , int operatorId ) { logger . debug ( ""Getting committed windowId appId {} operatorId {}"" , appId , operatorId ) ; try { beginTransaction ( ) ; BytesMessage message = ( BytesMessage ) consumer . receive ( ) ; logger . debug ( ""Retrieved committed window messageId: {}, messageAppOperatorIdProp: {}"" , message . getJMSMessageID ( ) , message . getStringProperty ( APP_OPERATOR_ID ) ) ; long windowId = message . readLong ( ) ; writeWindowId ( appId , operatorId , windowId ) ; commitTransaction ( ) ; logger . debug ( ""metaQueueName: "" + metaQueueName ) ; logger . debug ( ""Retrieved windowId {}"" , windowId ) ; return windowId ; } catch ( JMSException ex ) { throw new RuntimeException ( ex ) ; } } +" +518,"@ SuppressWarnings ( ""rawtypes"" ) public long getCommittedWindowId ( String appId , int operatorId ) { logger . debug ( ""Getting committed windowId appId {} operatorId {}"" , appId , operatorId ) ; try { beginTransaction ( ) ; BytesMessage message = ( BytesMessage ) consumer . receive ( ) ; logger . debug ( ""Retrieved committed window messageId: {}, messageAppOperatorIdProp: {}"" , message . getJMSMessageID ( ) , message . getStringProperty ( APP_OPERATOR_ID ) ) ; long windowId = message . readLong ( ) ; writeWindowId ( appId , operatorId , windowId ) ; commitTransaction ( ) ; logger . debug ( ""Retrieved windowId {}"" , windowId ) ; return windowId ; } catch ( JMSException ex ) { throw new RuntimeException ( ex ) ; } } +","@ SuppressWarnings ( ""rawtypes"" ) public long getCommittedWindowId ( String appId , int operatorId ) { logger . debug ( ""Getting committed windowId appId {} operatorId {}"" , appId , operatorId ) ; try { beginTransaction ( ) ; BytesMessage message = ( BytesMessage ) consumer . receive ( ) ; logger . debug ( ""Retrieved committed window messageId: {}, messageAppOperatorIdProp: {}"" , message . getJMSMessageID ( ) , message . getStringProperty ( APP_OPERATOR_ID ) ) ; long windowId = message . readLong ( ) ; writeWindowId ( appId , operatorId , windowId ) ; commitTransaction ( ) ; logger . debug ( ""metaQueueName: "" + metaQueueName ) ; logger . debug ( ""Retrieved windowId {}"" , windowId ) ; return windowId ; } catch ( JMSException ex ) { throw new RuntimeException ( ex ) ; } } +" +519,"@ SuppressWarnings ( ""rawtypes"" ) public long getCommittedWindowId ( String appId , int operatorId ) { logger . debug ( ""Getting committed windowId appId {} operatorId {}"" , appId , operatorId ) ; try { beginTransaction ( ) ; BytesMessage message = ( BytesMessage ) consumer . receive ( ) ; logger . debug ( ""Retrieved committed window messageId: {}, messageAppOperatorIdProp: {}"" , message . getJMSMessageID ( ) , message . getStringProperty ( APP_OPERATOR_ID ) ) ; long windowId = message . readLong ( ) ; writeWindowId ( appId , operatorId , windowId ) ; commitTransaction ( ) ; logger . debug ( ""metaQueueName: "" + metaQueueName ) ; return windowId ; } catch ( JMSException ex ) { throw new RuntimeException ( ex ) ; } } +","@ SuppressWarnings ( ""rawtypes"" ) public long getCommittedWindowId ( String appId , int operatorId ) { logger . debug ( ""Getting committed windowId appId {} operatorId {}"" , appId , operatorId ) ; try { beginTransaction ( ) ; BytesMessage message = ( BytesMessage ) consumer . receive ( ) ; logger . debug ( ""Retrieved committed window messageId: {}, messageAppOperatorIdProp: {}"" , message . getJMSMessageID ( ) , message . getStringProperty ( APP_OPERATOR_ID ) ) ; long windowId = message . readLong ( ) ; writeWindowId ( appId , operatorId , windowId ) ; commitTransaction ( ) ; logger . debug ( ""metaQueueName: "" + metaQueueName ) ; logger . debug ( ""Retrieved windowId {}"" , windowId ) ; return windowId ; } catch ( JMSException ex ) { throw new RuntimeException ( ex ) ; } } +" +520,"public void reply ( Message request , MessageReply reply ) { if ( Boolean . valueOf ( request . getHeaderEntry ( CloudBus . HEADER_NO_NEED_REPLY_MSG ) ) ) { if ( logger . isTraceEnabled ( ) ) { } return ; } AMQP . BasicProperties . Builder builder = new AMQP . BasicProperties . Builder ( ) ; reply . setAMQPProperties ( builder . deliveryMode ( 1 ) . build ( ) ) ; reply . getHeaders ( ) . put ( CloudBus . HEADER_IS_MESSAGE_REPLY , Boolean . TRUE . toString ( ) ) ; reply . putHeaderEntry ( CloudBus . HEADER_CORRELATION_ID , request . getId ( ) ) ; reply . setServiceId ( request . getHeaderEntry ( CloudBus . HEADER_REPLY_TO ) ) ; buildResponseMessageMetaData ( reply ) ; if ( request instanceof NeedReplyMessage ) { callReplyPreSendingExtensions ( reply , ( NeedReplyMessage ) request ) ; } wire . send ( reply , false ) ; } +","public void reply ( Message request , MessageReply reply ) { if ( Boolean . valueOf ( request . getHeaderEntry ( CloudBus . HEADER_NO_NEED_REPLY_MSG ) ) ) { if ( logger . isTraceEnabled ( ) ) { logger . trace ( String . format ( ""%s in message%s is set, drop reply%s"" , CloudBus . HEADER_NO_NEED_REPLY_MSG , wire . dumpMessage ( request ) , wire . dumpMessage ( reply ) ) ) ; } return ; } AMQP . BasicProperties . Builder builder = new AMQP . BasicProperties . Builder ( ) ; reply . setAMQPProperties ( builder . deliveryMode ( 1 ) . build ( ) ) ; reply . getHeaders ( ) . put ( CloudBus . HEADER_IS_MESSAGE_REPLY , Boolean . TRUE . toString ( ) ) ; reply . putHeaderEntry ( CloudBus . HEADER_CORRELATION_ID , request . getId ( ) ) ; reply . setServiceId ( request . getHeaderEntry ( CloudBus . HEADER_REPLY_TO ) ) ; buildResponseMessageMetaData ( reply ) ; if ( request instanceof NeedReplyMessage ) { callReplyPreSendingExtensions ( reply , ( NeedReplyMessage ) request ) ; } wire . send ( reply , false ) ; } +" +521,"public void stopBeans ( ) { } +","public void stopBeans ( ) { with ( new WithComponentLifeCycle ( ) { @ Override public void with ( ComponentLifecycle lifecycle ) { log . info ( ""stopping bean "" + lifecycle . getName ( ) ) ; lifecycle . stop ( ) ; } } ) ; } +" +522,"public void stopBeans ( ) { with ( new WithComponentLifeCycle ( ) { @ Override public void with ( ComponentLifecycle lifecycle ) { lifecycle . stop ( ) ; } } ) ; } +","public void stopBeans ( ) { with ( new WithComponentLifeCycle ( ) { @ Override public void with ( ComponentLifecycle lifecycle ) { log . info ( ""stopping bean "" + lifecycle . getName ( ) ) ; lifecycle . stop ( ) ; } } ) ; } +" +523,"public static void incrementFailedLoginAttemptsCounter ( final Principal principal ) { try { final PropertyKey < Integer > passwordAttemptsKey = StructrApp . key ( Principal . class , ""passwordAttempts"" ) ; Integer failedAttempts = principal . getProperty ( passwordAttemptsKey ) ; if ( failedAttempts == null ) { failedAttempts = 0 ; } failedAttempts ++ ; principal . setProperty ( passwordAttemptsKey , failedAttempts ) ; } catch ( FrameworkException fex ) { } } +","public static void incrementFailedLoginAttemptsCounter ( final Principal principal ) { try { final PropertyKey < Integer > passwordAttemptsKey = StructrApp . key ( Principal . class , ""passwordAttempts"" ) ; Integer failedAttempts = principal . getProperty ( passwordAttemptsKey ) ; if ( failedAttempts == null ) { failedAttempts = 0 ; } failedAttempts ++ ; principal . setProperty ( passwordAttemptsKey , failedAttempts ) ; } catch ( FrameworkException fex ) { logger . warn ( ""Exception while incrementing failed login attempts counter"" , fex ) ; } } +" +524,"public void printLayout ( ) { LOGGER . info ( HORIZONTAL_RULER ) ; LOGGER . info ( HORIZONTAL_RULER ) ; for ( WorkersPerAgent workersPerAgent : workersPerAgentList ) { Set < String > agentVersionSpecs = workersPerAgent . getVersionSpecs ( ) ; int agentMemberWorkerCount = workersPerAgent . count ( ""member"" ) ; int agentClientWorkerCount = workersPerAgent . workers . size ( ) - agentMemberWorkerCount ; int totalWorkerCount = agentMemberWorkerCount + agentClientWorkerCount ; String message = "" Agent %s (%s) members: %s, clients: %s"" ; if ( totalWorkerCount > 0 ) { message += "", mode: %s, version specs: %s"" ; } else { message += "" (no workers)"" ; } LOGGER . info ( format ( message , workersPerAgent . agent . formatIpAddresses ( ) , workersPerAgent . agent . getAddress ( ) , formatLong ( agentMemberWorkerCount , 2 ) , formatLong ( agentClientWorkerCount , 2 ) , padLeft ( workersPerAgent . agent . getAgentWorkerMode ( ) . toString ( ) , WORKER_MODE_LENGTH ) , agentVersionSpecs ) ) ; } } +","public void printLayout ( ) { LOGGER . info ( HORIZONTAL_RULER ) ; LOGGER . info ( ""Cluster layout"" ) ; LOGGER . info ( HORIZONTAL_RULER ) ; for ( WorkersPerAgent workersPerAgent : workersPerAgentList ) { Set < String > agentVersionSpecs = workersPerAgent . getVersionSpecs ( ) ; int agentMemberWorkerCount = workersPerAgent . count ( ""member"" ) ; int agentClientWorkerCount = workersPerAgent . workers . size ( ) - agentMemberWorkerCount ; int totalWorkerCount = agentMemberWorkerCount + agentClientWorkerCount ; String message = "" Agent %s (%s) members: %s, clients: %s"" ; if ( totalWorkerCount > 0 ) { message += "", mode: %s, version specs: %s"" ; } else { message += "" (no workers)"" ; } LOGGER . info ( format ( message , workersPerAgent . agent . formatIpAddresses ( ) , workersPerAgent . agent . getAddress ( ) , formatLong ( agentMemberWorkerCount , 2 ) , formatLong ( agentClientWorkerCount , 2 ) , padLeft ( workersPerAgent . agent . getAgentWorkerMode ( ) . toString ( ) , WORKER_MODE_LENGTH ) , agentVersionSpecs ) ) ; } } +" +525,"public void printLayout ( ) { LOGGER . info ( HORIZONTAL_RULER ) ; LOGGER . info ( ""Cluster layout"" ) ; LOGGER . info ( HORIZONTAL_RULER ) ; for ( WorkersPerAgent workersPerAgent : workersPerAgentList ) { Set < String > agentVersionSpecs = workersPerAgent . getVersionSpecs ( ) ; int agentMemberWorkerCount = workersPerAgent . count ( ""member"" ) ; int agentClientWorkerCount = workersPerAgent . workers . size ( ) - agentMemberWorkerCount ; int totalWorkerCount = agentMemberWorkerCount + agentClientWorkerCount ; String message = "" Agent %s (%s) members: %s, clients: %s"" ; if ( totalWorkerCount > 0 ) { message += "", mode: %s, version specs: %s"" ; } else { message += "" (no workers)"" ; } } } +","public void printLayout ( ) { LOGGER . info ( HORIZONTAL_RULER ) ; LOGGER . info ( ""Cluster layout"" ) ; LOGGER . info ( HORIZONTAL_RULER ) ; for ( WorkersPerAgent workersPerAgent : workersPerAgentList ) { Set < String > agentVersionSpecs = workersPerAgent . getVersionSpecs ( ) ; int agentMemberWorkerCount = workersPerAgent . count ( ""member"" ) ; int agentClientWorkerCount = workersPerAgent . workers . size ( ) - agentMemberWorkerCount ; int totalWorkerCount = agentMemberWorkerCount + agentClientWorkerCount ; String message = "" Agent %s (%s) members: %s, clients: %s"" ; if ( totalWorkerCount > 0 ) { message += "", mode: %s, version specs: %s"" ; } else { message += "" (no workers)"" ; } LOGGER . info ( format ( message , workersPerAgent . agent . formatIpAddresses ( ) , workersPerAgent . agent . getAddress ( ) , formatLong ( agentMemberWorkerCount , 2 ) , formatLong ( agentClientWorkerCount , 2 ) , padLeft ( workersPerAgent . agent . getAgentWorkerMode ( ) . toString ( ) , WORKER_MODE_LENGTH ) , agentVersionSpecs ) ) ; } } +" +526,"private void deletePermOrgunitInheritances ( ) throws BuildException { if ( delpermorgunitinheritances == null ) { return ; } for ( Delpermorgunitinheritance delpermorgunitinheritance : delpermorgunitinheritances ) { List < Relationship > orgs = delpermorgunitinheritance . getRelationships ( ) ; for ( Relationship relationship : orgs ) { try { dAdminMgr . deleteInheritance ( new OrgUnit ( relationship . getParent ( ) , OrgUnit . Type . PERM ) , new OrgUnit ( relationship . getChild ( ) , OrgUnit . Type . PERM ) ) ; } catch ( SecurityException se ) { LOG . warn ( ""deletePermOrgunitInheritances tenant={} parent={} child={} caught SecurityException={}"" , getTenant ( ) , relationship . getParent ( ) , relationship . getChild ( ) , se ) ; } } } } +","private void deletePermOrgunitInheritances ( ) throws BuildException { if ( delpermorgunitinheritances == null ) { return ; } for ( Delpermorgunitinheritance delpermorgunitinheritance : delpermorgunitinheritances ) { List < Relationship > orgs = delpermorgunitinheritance . getRelationships ( ) ; for ( Relationship relationship : orgs ) { LOG . info ( ""deletePermOrgunitInheritances tenant={} parent={} child={}"" , getTenant ( ) , relationship . getParent ( ) , relationship . getChild ( ) ) ; try { dAdminMgr . deleteInheritance ( new OrgUnit ( relationship . getParent ( ) , OrgUnit . Type . PERM ) , new OrgUnit ( relationship . getChild ( ) , OrgUnit . Type . PERM ) ) ; } catch ( SecurityException se ) { LOG . warn ( ""deletePermOrgunitInheritances tenant={} parent={} child={} caught SecurityException={}"" , getTenant ( ) , relationship . getParent ( ) , relationship . getChild ( ) , se ) ; } } } } +" +527,"private void deletePermOrgunitInheritances ( ) throws BuildException { if ( delpermorgunitinheritances == null ) { return ; } for ( Delpermorgunitinheritance delpermorgunitinheritance : delpermorgunitinheritances ) { List < Relationship > orgs = delpermorgunitinheritance . getRelationships ( ) ; for ( Relationship relationship : orgs ) { LOG . info ( ""deletePermOrgunitInheritances tenant={} parent={} child={}"" , getTenant ( ) , relationship . getParent ( ) , relationship . getChild ( ) ) ; try { dAdminMgr . deleteInheritance ( new OrgUnit ( relationship . getParent ( ) , OrgUnit . Type . PERM ) , new OrgUnit ( relationship . getChild ( ) , OrgUnit . Type . PERM ) ) ; } catch ( SecurityException se ) { } } } } +","private void deletePermOrgunitInheritances ( ) throws BuildException { if ( delpermorgunitinheritances == null ) { return ; } for ( Delpermorgunitinheritance delpermorgunitinheritance : delpermorgunitinheritances ) { List < Relationship > orgs = delpermorgunitinheritance . getRelationships ( ) ; for ( Relationship relationship : orgs ) { LOG . info ( ""deletePermOrgunitInheritances tenant={} parent={} child={}"" , getTenant ( ) , relationship . getParent ( ) , relationship . getChild ( ) ) ; try { dAdminMgr . deleteInheritance ( new OrgUnit ( relationship . getParent ( ) , OrgUnit . Type . PERM ) , new OrgUnit ( relationship . getChild ( ) , OrgUnit . Type . PERM ) ) ; } catch ( SecurityException se ) { LOG . warn ( ""deletePermOrgunitInheritances tenant={} parent={} child={} caught SecurityException={}"" , getTenant ( ) , relationship . getParent ( ) , relationship . getChild ( ) , se ) ; } } } } +" +528,"public void completed ( final HttpResponse response3 ) { try { httpclient . close ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Httpclient close failed"" + e ) ; } } +","public void completed ( final HttpResponse response3 ) { LOGGER . info ( request3 . getRequestLine ( ) + ""->"" + response3 . getStatusLine ( ) ) ; try { httpclient . close ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Httpclient close failed"" + e ) ; } } +" +529,"public void completed ( final HttpResponse response3 ) { LOGGER . info ( request3 . getRequestLine ( ) + ""->"" + response3 . getStatusLine ( ) ) ; try { httpclient . close ( ) ; } catch ( IOException e ) { } } +","public void completed ( final HttpResponse response3 ) { LOGGER . info ( request3 . getRequestLine ( ) + ""->"" + response3 . getStatusLine ( ) ) ; try { httpclient . close ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Httpclient close failed"" + e ) ; } } +" +530,"public void testFindDuplicates ( ) { Map < Integer , Set < DocumentProtos . DocumentMetadata > > duplicates = duplicateWorkService . findDuplicates ( documentWrappers , null ) ; for ( Map . Entry < Integer , Set < DocumentProtos . DocumentMetadata > > entry : duplicates . entrySet ( ) ) { for ( DocumentProtos . DocumentMetadata documentMetadata : entry . getValue ( ) ) { log . info ( ""------ title0: {}"" , DocumentWrapperUtils . getMainTitle ( documentMetadata ) ) ; } } Assert . assertEquals ( duplicates . size ( ) , 2 ) ; for ( Map . Entry < Integer , Set < DocumentProtos . DocumentMetadata > > entry : duplicates . entrySet ( ) ) { if ( entry . getValue ( ) . contains ( documentWrapper1 ) ) { Assert . assertEquals ( entry . getValue ( ) . size ( ) , 3 ) ; } else if ( entry . getValue ( ) . contains ( documentWrapper5 ) ) { Assert . assertEquals ( entry . getValue ( ) . size ( ) , 2 ) ; } else { Assert . fail ( ) ; } } } +","public void testFindDuplicates ( ) { Map < Integer , Set < DocumentProtos . DocumentMetadata > > duplicates = duplicateWorkService . findDuplicates ( documentWrappers , null ) ; for ( Map . Entry < Integer , Set < DocumentProtos . DocumentMetadata > > entry : duplicates . entrySet ( ) ) { log . info ( ""key : {}"" , """" + entry . getKey ( ) ) ; for ( DocumentProtos . DocumentMetadata documentMetadata : entry . getValue ( ) ) { log . info ( ""------ title0: {}"" , DocumentWrapperUtils . getMainTitle ( documentMetadata ) ) ; } } Assert . assertEquals ( duplicates . size ( ) , 2 ) ; for ( Map . Entry < Integer , Set < DocumentProtos . DocumentMetadata > > entry : duplicates . entrySet ( ) ) { if ( entry . getValue ( ) . contains ( documentWrapper1 ) ) { Assert . assertEquals ( entry . getValue ( ) . size ( ) , 3 ) ; } else if ( entry . getValue ( ) . contains ( documentWrapper5 ) ) { Assert . assertEquals ( entry . getValue ( ) . size ( ) , 2 ) ; } else { Assert . fail ( ) ; } } } +" +531,"public void testFindDuplicates ( ) { Map < Integer , Set < DocumentProtos . DocumentMetadata > > duplicates = duplicateWorkService . findDuplicates ( documentWrappers , null ) ; for ( Map . Entry < Integer , Set < DocumentProtos . DocumentMetadata > > entry : duplicates . entrySet ( ) ) { log . info ( ""key : {}"" , """" + entry . getKey ( ) ) ; for ( DocumentProtos . DocumentMetadata documentMetadata : entry . getValue ( ) ) { } } Assert . assertEquals ( duplicates . size ( ) , 2 ) ; for ( Map . Entry < Integer , Set < DocumentProtos . DocumentMetadata > > entry : duplicates . entrySet ( ) ) { if ( entry . getValue ( ) . contains ( documentWrapper1 ) ) { Assert . assertEquals ( entry . getValue ( ) . size ( ) , 3 ) ; } else if ( entry . getValue ( ) . contains ( documentWrapper5 ) ) { Assert . assertEquals ( entry . getValue ( ) . size ( ) , 2 ) ; } else { Assert . fail ( ) ; } } } +","public void testFindDuplicates ( ) { Map < Integer , Set < DocumentProtos . DocumentMetadata > > duplicates = duplicateWorkService . findDuplicates ( documentWrappers , null ) ; for ( Map . Entry < Integer , Set < DocumentProtos . DocumentMetadata > > entry : duplicates . entrySet ( ) ) { log . info ( ""key : {}"" , """" + entry . getKey ( ) ) ; for ( DocumentProtos . DocumentMetadata documentMetadata : entry . getValue ( ) ) { log . info ( ""------ title0: {}"" , DocumentWrapperUtils . getMainTitle ( documentMetadata ) ) ; } } Assert . assertEquals ( duplicates . size ( ) , 2 ) ; for ( Map . Entry < Integer , Set < DocumentProtos . DocumentMetadata > > entry : duplicates . entrySet ( ) ) { if ( entry . getValue ( ) . contains ( documentWrapper1 ) ) { Assert . assertEquals ( entry . getValue ( ) . size ( ) , 3 ) ; } else if ( entry . getValue ( ) . contains ( documentWrapper5 ) ) { Assert . assertEquals ( entry . getValue ( ) . size ( ) , 2 ) ; } else { Assert . fail ( ) ; } } } +" +532,"public StubDownloader build ( StubRunnerOptions stubRunnerOptions ) { return new AetherStubDownloader ( AetherStubDownloaderFactory . this . repoSystem , AetherStubDownloaderFactory . this . project . getRemoteProjectRepositories ( ) , repoSession , AetherStubDownloaderFactory . this . settings ) ; } +","public StubDownloader build ( StubRunnerOptions stubRunnerOptions ) { log . info ( ""Will download contracts using current build's Maven repository setup"" ) ; return new AetherStubDownloader ( AetherStubDownloaderFactory . this . repoSystem , AetherStubDownloaderFactory . this . project . getRemoteProjectRepositories ( ) , repoSession , AetherStubDownloaderFactory . this . settings ) ; } +" +533,"public Set < T > discoverPlugins ( Configuration conf ) { Set < T > detectedPlugins = new LinkedHashSet < > ( ) ; String classNamesStr = conf . get ( this . propertyName ) ; if ( StringUtils . isBlank ( classNamesStr ) ) { return detectedPlugins ; } Set < String > classNames = new LinkedHashSet < > ( ) ; Collections . addAll ( classNames , classNamesStr . split ( "","" ) ) ; for ( String className : classNames ) { try { Class < ? > plugin = StramUtils . classForName ( className , Object . class ) ; if ( klass . isAssignableFrom ( plugin ) ) { detectedPlugins . add ( StramUtils . newInstance ( plugin . asSubclass ( klass ) ) ) ; } else { } } catch ( IllegalArgumentException e ) { LOG . warn ( ""Could not load plugin {}"" , className , e ) ; } } return detectedPlugins ; } +","public Set < T > discoverPlugins ( Configuration conf ) { Set < T > detectedPlugins = new LinkedHashSet < > ( ) ; String classNamesStr = conf . get ( this . propertyName ) ; if ( StringUtils . isBlank ( classNamesStr ) ) { return detectedPlugins ; } Set < String > classNames = new LinkedHashSet < > ( ) ; Collections . addAll ( classNames , classNamesStr . split ( "","" ) ) ; for ( String className : classNames ) { try { Class < ? > plugin = StramUtils . classForName ( className , Object . class ) ; if ( klass . isAssignableFrom ( plugin ) ) { detectedPlugins . add ( StramUtils . newInstance ( plugin . asSubclass ( klass ) ) ) ; } else { LOG . info ( ""Skipping loading {} incompatible with {}"" , className , klass ) ; } } catch ( IllegalArgumentException e ) { LOG . warn ( ""Could not load plugin {}"" , className , e ) ; } } return detectedPlugins ; } +" +534,"public Set < T > discoverPlugins ( Configuration conf ) { Set < T > detectedPlugins = new LinkedHashSet < > ( ) ; String classNamesStr = conf . get ( this . propertyName ) ; if ( StringUtils . isBlank ( classNamesStr ) ) { return detectedPlugins ; } Set < String > classNames = new LinkedHashSet < > ( ) ; Collections . addAll ( classNames , classNamesStr . split ( "","" ) ) ; for ( String className : classNames ) { try { Class < ? > plugin = StramUtils . classForName ( className , Object . class ) ; if ( klass . isAssignableFrom ( plugin ) ) { detectedPlugins . add ( StramUtils . newInstance ( plugin . asSubclass ( klass ) ) ) ; } else { LOG . info ( ""Skipping loading {} incompatible with {}"" , className , klass ) ; } } catch ( IllegalArgumentException e ) { } } return detectedPlugins ; } +","public Set < T > discoverPlugins ( Configuration conf ) { Set < T > detectedPlugins = new LinkedHashSet < > ( ) ; String classNamesStr = conf . get ( this . propertyName ) ; if ( StringUtils . isBlank ( classNamesStr ) ) { return detectedPlugins ; } Set < String > classNames = new LinkedHashSet < > ( ) ; Collections . addAll ( classNames , classNamesStr . split ( "","" ) ) ; for ( String className : classNames ) { try { Class < ? > plugin = StramUtils . classForName ( className , Object . class ) ; if ( klass . isAssignableFrom ( plugin ) ) { detectedPlugins . add ( StramUtils . newInstance ( plugin . asSubclass ( klass ) ) ) ; } else { LOG . info ( ""Skipping loading {} incompatible with {}"" , className , klass ) ; } } catch ( IllegalArgumentException e ) { LOG . warn ( ""Could not load plugin {}"" , className , e ) ; } } return detectedPlugins ; } +" +535,"@ Test public void errorLevel_exceptionMessage_loggedInBefore_failsInLocator ( ) { Result result = runTest ( ExecuteTaskBefore . class ) ; assertThat ( getFailure ( result ) ) . isInstanceOf ( AssertionError . class ) . hasMessageContaining ( ""error"" ) . hasMessageContaining ( EXCEPTION_MESSAGE ) ; } +","@ Test public void errorLevel_exceptionMessage_loggedInBefore_failsInLocator ( ) { given ( LOCATOR , ( ) -> logger . error ( EXCEPTION_MESSAGE ) ) ; Result result = runTest ( ExecuteTaskBefore . class ) ; assertThat ( getFailure ( result ) ) . isInstanceOf ( AssertionError . class ) . hasMessageContaining ( ""error"" ) . hasMessageContaining ( EXCEPTION_MESSAGE ) ; } +" +536,"protected < TransformT extends PTransform < ? super PInput , POutput > > TransformEvaluator < TransformT > translate ( TransformHierarchy . Node node , TransformT transform ) { Map < TupleTag < ? > , PCollection < ? > > pValues ; if ( node . getInputs ( ) . isEmpty ( ) ) { pValues = node . getOutputs ( ) ; } else { pValues = node . getInputs ( ) ; } PCollection . IsBounded isNodeBounded = isBoundedCollection ( pValues . values ( ) ) ; return isNodeBounded . equals ( PCollection . IsBounded . BOUNDED ) ? translator . translateBounded ( transform ) : translator . translateUnbounded ( transform ) ; } +","protected < TransformT extends PTransform < ? super PInput , POutput > > TransformEvaluator < TransformT > translate ( TransformHierarchy . Node node , TransformT transform ) { Map < TupleTag < ? > , PCollection < ? > > pValues ; if ( node . getInputs ( ) . isEmpty ( ) ) { pValues = node . getOutputs ( ) ; } else { pValues = node . getInputs ( ) ; } PCollection . IsBounded isNodeBounded = isBoundedCollection ( pValues . values ( ) ) ; LOG . debug ( ""Translating {} as {}"" , transform , isNodeBounded ) ; return isNodeBounded . equals ( PCollection . IsBounded . BOUNDED ) ? translator . translateBounded ( transform ) : translator . translateUnbounded ( transform ) ; } +" +537,"private void removeConnectionFromNote ( String noteId , NotebookSocket socket ) { synchronized ( noteSocketMap ) { List < NotebookSocket > socketList = noteSocketMap . get ( noteId ) ; if ( socketList != null ) { socketList . remove ( socket ) ; } checkCollaborativeStatus ( noteId , socketList ) ; } } +","private void removeConnectionFromNote ( String noteId , NotebookSocket socket ) { LOGGER . debug ( ""Remove connection {} from note: {}"" , socket , noteId ) ; synchronized ( noteSocketMap ) { List < NotebookSocket > socketList = noteSocketMap . get ( noteId ) ; if ( socketList != null ) { socketList . remove ( socket ) ; } checkCollaborativeStatus ( noteId , socketList ) ; } } +" +538,"public Stream < Record > fetchAsStream ( ) throws IOException { int retryCount = 0 ; final QueryStatistics . Measurer statTracker = new QueryStatistics . Measurer ( ) ; statTracker . recordQueryPrepStart ( ) ; PreparedStatement preparedStatement = getPreparedStatementStreaming ( ) ; statTracker . recordQueryPrepEnd ( ) ; while ( true ) { try { statTracker . recordQueryExecStart ( ) ; RecordIterator itr = QueryFetcher . getQueryResultsStream ( preparedStatement , selectedColumns , dbConnection ) ; itr . addStatisticsMeasurer ( statTracker ) ; Iterable < Record > i = ( ) -> itr ; Stream < Record > stream = StreamSupport . stream ( i . spliterator ( ) , false ) ; stream = stream . onClose ( ( ) -> itr . close ( ) ) ; return stream ; } catch ( SQLRecoverableException e ) { if ( ++ retryCount > MAX_CONNECTION_RETRIES ) { throw new IOException ( e ) ; } } catch ( SQLException e ) { throw new IOException ( e ) ; } } } +","public Stream < Record > fetchAsStream ( ) throws IOException { int retryCount = 0 ; final QueryStatistics . Measurer statTracker = new QueryStatistics . Measurer ( ) ; statTracker . recordQueryPrepStart ( ) ; PreparedStatement preparedStatement = getPreparedStatementStreaming ( ) ; statTracker . recordQueryPrepEnd ( ) ; while ( true ) { try { statTracker . recordQueryExecStart ( ) ; RecordIterator itr = QueryFetcher . getQueryResultsStream ( preparedStatement , selectedColumns , dbConnection ) ; itr . addStatisticsMeasurer ( statTracker ) ; Iterable < Record > i = ( ) -> itr ; Stream < Record > stream = StreamSupport . stream ( i . spliterator ( ) , false ) ; stream = stream . onClose ( ( ) -> itr . close ( ) ) ; return stream ; } catch ( SQLRecoverableException e ) { LOG . error ( e . toString ( ) ) ; if ( ++ retryCount > MAX_CONNECTION_RETRIES ) { throw new IOException ( e ) ; } } catch ( SQLException e ) { throw new IOException ( e ) ; } } } +" +539,"private static void checkSampleTask ( ) { final List < SlowServiceCheckTask > tasksCopy = new ArrayList < > ( tasks ) ; final Iterator < SlowServiceCheckTask > iterator = tasksCopy . iterator ( ) ; final long currentTime = System . currentTimeMillis ( ) ; final String currentTimeAsString = getCurrentTime ( ) ; while ( iterator . hasNext ( ) ) { final SlowServiceCheckTask task = iterator . next ( ) ; long maxProcessTime = task . maxProcessTime . isPresent ( ) ? task . maxProcessTime . get ( ) : MAX_PROCESS_TIME ; MDC . put ( SoaSystemEnvProperties . KEY_LOGGER_SESSION_TID , task . sessionTid . map ( DapengUtil :: longToHexStr ) . orElse ( ""0"" ) ) ; if ( logger . isInfoEnabled ( ) ) { } final long ptime = currentTime - task . startTime ; if ( ptime >= maxProcessTime ) { final StackTraceElement [ ] stackElements = task . currentThread . getStackTrace ( ) ; if ( stackElements != null && stackElements . length > 0 ) { final StringBuilder builder = new StringBuilder ( task . toString ( ) ) ; builder . append ( ""--["" + currentTimeAsString + ""]:task info:["" + task . serviceName + "":"" + task . methodName + "":"" + task . versionName + ""]"" ) . append ( ""\n"" ) ; final String firstStackInfo = stackElements [ 0 ] . toString ( ) ; if ( lastStackInfo . containsKey ( task . currentThread ) && lastStackInfo . get ( task . currentThread ) . equals ( firstStackInfo ) ) { builder . append ( ""Same as last check..."" ) ; } else { builder . append ( ""-- The task has been executed "" ) . append ( ptime ) . append ( ""ms and Currently is executing:"" ) ; lastStackInfo . put ( task . currentThread , firstStackInfo ) ; builder . append ( ""\n at "" ) . append ( firstStackInfo ) ; for ( int i = 1 ; i < stackElements . length ; i ++ ) { builder . append ( ""\n at "" + stackElements [ i ] ) ; } } builder . append ( ""\n"" ) . append ( ""\n"" ) ; logger . error ( ""SlowProcess:{}"" , builder . toString ( ) ) ; } } else { lastStackInfo . remove ( task . currentThread ) ; } } tasksCopy . clear ( ) ; } +","private static void checkSampleTask ( ) { final List < SlowServiceCheckTask > tasksCopy = new ArrayList < > ( tasks ) ; final Iterator < SlowServiceCheckTask > iterator = tasksCopy . iterator ( ) ; final long currentTime = System . currentTimeMillis ( ) ; final String currentTimeAsString = getCurrentTime ( ) ; while ( iterator . hasNext ( ) ) { final SlowServiceCheckTask task = iterator . next ( ) ; long maxProcessTime = task . maxProcessTime . isPresent ( ) ? task . maxProcessTime . get ( ) : MAX_PROCESS_TIME ; MDC . put ( SoaSystemEnvProperties . KEY_LOGGER_SESSION_TID , task . sessionTid . map ( DapengUtil :: longToHexStr ) . orElse ( ""0"" ) ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( ""slow service check {}:{}:{};maxProcessTime:{} "" , task . serviceName , task . versionName , task . methodName , maxProcessTime ) ; } final long ptime = currentTime - task . startTime ; if ( ptime >= maxProcessTime ) { final StackTraceElement [ ] stackElements = task . currentThread . getStackTrace ( ) ; if ( stackElements != null && stackElements . length > 0 ) { final StringBuilder builder = new StringBuilder ( task . toString ( ) ) ; builder . append ( ""--["" + currentTimeAsString + ""]:task info:["" + task . serviceName + "":"" + task . methodName + "":"" + task . versionName + ""]"" ) . append ( ""\n"" ) ; final String firstStackInfo = stackElements [ 0 ] . toString ( ) ; if ( lastStackInfo . containsKey ( task . currentThread ) && lastStackInfo . get ( task . currentThread ) . equals ( firstStackInfo ) ) { builder . append ( ""Same as last check..."" ) ; } else { builder . append ( ""-- The task has been executed "" ) . append ( ptime ) . append ( ""ms and Currently is executing:"" ) ; lastStackInfo . put ( task . currentThread , firstStackInfo ) ; builder . append ( ""\n at "" ) . append ( firstStackInfo ) ; for ( int i = 1 ; i < stackElements . length ; i ++ ) { builder . append ( ""\n at "" + stackElements [ i ] ) ; } } builder . append ( ""\n"" ) . append ( ""\n"" ) ; logger . error ( ""SlowProcess:{}"" , builder . toString ( ) ) ; } } else { lastStackInfo . remove ( task . currentThread ) ; } } tasksCopy . clear ( ) ; } +" +540,"private static void checkSampleTask ( ) { final List < SlowServiceCheckTask > tasksCopy = new ArrayList < > ( tasks ) ; final Iterator < SlowServiceCheckTask > iterator = tasksCopy . iterator ( ) ; final long currentTime = System . currentTimeMillis ( ) ; final String currentTimeAsString = getCurrentTime ( ) ; while ( iterator . hasNext ( ) ) { final SlowServiceCheckTask task = iterator . next ( ) ; long maxProcessTime = task . maxProcessTime . isPresent ( ) ? task . maxProcessTime . get ( ) : MAX_PROCESS_TIME ; MDC . put ( SoaSystemEnvProperties . KEY_LOGGER_SESSION_TID , task . sessionTid . map ( DapengUtil :: longToHexStr ) . orElse ( ""0"" ) ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( ""slow service check {}:{}:{};maxProcessTime:{} "" , task . serviceName , task . versionName , task . methodName , maxProcessTime ) ; } final long ptime = currentTime - task . startTime ; if ( ptime >= maxProcessTime ) { final StackTraceElement [ ] stackElements = task . currentThread . getStackTrace ( ) ; if ( stackElements != null && stackElements . length > 0 ) { final StringBuilder builder = new StringBuilder ( task . toString ( ) ) ; builder . append ( ""--["" + currentTimeAsString + ""]:task info:["" + task . serviceName + "":"" + task . methodName + "":"" + task . versionName + ""]"" ) . append ( ""\n"" ) ; final String firstStackInfo = stackElements [ 0 ] . toString ( ) ; if ( lastStackInfo . containsKey ( task . currentThread ) && lastStackInfo . get ( task . currentThread ) . equals ( firstStackInfo ) ) { builder . append ( ""Same as last check..."" ) ; } else { builder . append ( ""-- The task has been executed "" ) . append ( ptime ) . append ( ""ms and Currently is executing:"" ) ; lastStackInfo . put ( task . currentThread , firstStackInfo ) ; builder . append ( ""\n at "" ) . append ( firstStackInfo ) ; for ( int i = 1 ; i < stackElements . length ; i ++ ) { builder . append ( ""\n at "" + stackElements [ i ] ) ; } } builder . append ( ""\n"" ) . append ( ""\n"" ) ; } } else { lastStackInfo . remove ( task . currentThread ) ; } } tasksCopy . clear ( ) ; } +","private static void checkSampleTask ( ) { final List < SlowServiceCheckTask > tasksCopy = new ArrayList < > ( tasks ) ; final Iterator < SlowServiceCheckTask > iterator = tasksCopy . iterator ( ) ; final long currentTime = System . currentTimeMillis ( ) ; final String currentTimeAsString = getCurrentTime ( ) ; while ( iterator . hasNext ( ) ) { final SlowServiceCheckTask task = iterator . next ( ) ; long maxProcessTime = task . maxProcessTime . isPresent ( ) ? task . maxProcessTime . get ( ) : MAX_PROCESS_TIME ; MDC . put ( SoaSystemEnvProperties . KEY_LOGGER_SESSION_TID , task . sessionTid . map ( DapengUtil :: longToHexStr ) . orElse ( ""0"" ) ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( ""slow service check {}:{}:{};maxProcessTime:{} "" , task . serviceName , task . versionName , task . methodName , maxProcessTime ) ; } final long ptime = currentTime - task . startTime ; if ( ptime >= maxProcessTime ) { final StackTraceElement [ ] stackElements = task . currentThread . getStackTrace ( ) ; if ( stackElements != null && stackElements . length > 0 ) { final StringBuilder builder = new StringBuilder ( task . toString ( ) ) ; builder . append ( ""--["" + currentTimeAsString + ""]:task info:["" + task . serviceName + "":"" + task . methodName + "":"" + task . versionName + ""]"" ) . append ( ""\n"" ) ; final String firstStackInfo = stackElements [ 0 ] . toString ( ) ; if ( lastStackInfo . containsKey ( task . currentThread ) && lastStackInfo . get ( task . currentThread ) . equals ( firstStackInfo ) ) { builder . append ( ""Same as last check..."" ) ; } else { builder . append ( ""-- The task has been executed "" ) . append ( ptime ) . append ( ""ms and Currently is executing:"" ) ; lastStackInfo . put ( task . currentThread , firstStackInfo ) ; builder . append ( ""\n at "" ) . append ( firstStackInfo ) ; for ( int i = 1 ; i < stackElements . length ; i ++ ) { builder . append ( ""\n at "" + stackElements [ i ] ) ; } } builder . append ( ""\n"" ) . append ( ""\n"" ) ; logger . error ( ""SlowProcess:{}"" , builder . toString ( ) ) ; } } else { lastStackInfo . remove ( task . currentThread ) ; } } tasksCopy . clear ( ) ; } +" +541,"public Long getRunningCollectionDataCount ( ) { Long collectionCountSinceLastRestart = 0L ; Client client = ClientBuilder . newBuilder ( ) . register ( JacksonFeature . class ) . build ( ) ; try { WebTarget webResource = client . target ( fetchMainUrl + ""/manage/count"" ) ; Response clientResponse = webResource . request ( MediaType . APPLICATION_JSON ) . get ( ) ; String jsonString = clientResponse . readEntity ( String . class ) ; JSONParser parser = new JSONParser ( ) ; JSONObject jsonResponse = ( JSONObject ) parser . parse ( jsonString ) ; collectionCountSinceLastRestart = ( Long ) jsonResponse . get ( ""count"" ) ; } catch ( Exception e ) { } return collectionCountSinceLastRestart ; } +","public Long getRunningCollectionDataCount ( ) { Long collectionCountSinceLastRestart = 0L ; Client client = ClientBuilder . newBuilder ( ) . register ( JacksonFeature . class ) . build ( ) ; try { WebTarget webResource = client . target ( fetchMainUrl + ""/manage/count"" ) ; Response clientResponse = webResource . request ( MediaType . APPLICATION_JSON ) . get ( ) ; String jsonString = clientResponse . readEntity ( String . class ) ; JSONParser parser = new JSONParser ( ) ; JSONObject jsonResponse = ( JSONObject ) parser . parse ( jsonString ) ; collectionCountSinceLastRestart = ( Long ) jsonResponse . get ( ""count"" ) ; } catch ( Exception e ) { logger . warn ( ""Error while fetching count"" , e ) ; } return collectionCountSinceLastRestart ; } +" +542,"protected boolean objectExistsInRegistry ( String pid ) throws StorageDeviceException { Connection conn = null ; PreparedStatement s = null ; ResultSet results = null ; try { conn = m_connectionPool . getReadOnlyConnection ( ) ; s = conn . prepareStatement ( REGISTERED_PID_QUERY ) ; s . setString ( 1 , pid ) ; results = s . executeQuery ( ) ; return results . next ( ) ; } catch ( SQLException sqle ) { throw new StorageDeviceException ( ""Unexpected error from SQL database: "" + sqle . getMessage ( ) , sqle ) ; } finally { try { if ( results != null ) { results . close ( ) ; } if ( s != null ) { s . close ( ) ; } if ( conn != null ) { m_connectionPool . free ( conn ) ; } } catch ( SQLException sqle ) { throw new StorageDeviceException ( ""Unexpected error from SQL database: "" + sqle . getMessage ( ) , sqle ) ; } finally { results = null ; s = null ; } } } +","protected boolean objectExistsInRegistry ( String pid ) throws StorageDeviceException { logger . debug ( ""Checking if {} already exists"" , pid ) ; Connection conn = null ; PreparedStatement s = null ; ResultSet results = null ; try { conn = m_connectionPool . getReadOnlyConnection ( ) ; s = conn . prepareStatement ( REGISTERED_PID_QUERY ) ; s . setString ( 1 , pid ) ; results = s . executeQuery ( ) ; return results . next ( ) ; } catch ( SQLException sqle ) { throw new StorageDeviceException ( ""Unexpected error from SQL database: "" + sqle . getMessage ( ) , sqle ) ; } finally { try { if ( results != null ) { results . close ( ) ; } if ( s != null ) { s . close ( ) ; } if ( conn != null ) { m_connectionPool . free ( conn ) ; } } catch ( SQLException sqle ) { throw new StorageDeviceException ( ""Unexpected error from SQL database: "" + sqle . getMessage ( ) , sqle ) ; } finally { results = null ; s = null ; } } } +" +543,"public void logout ( KeycloakDeployment deployment ) { try { ServerRequest . invokeLogout ( deployment , refreshToken ) ; } catch ( Exception e ) { } } +","public void logout ( KeycloakDeployment deployment ) { try { ServerRequest . invokeLogout ( deployment , refreshToken ) ; } catch ( Exception e ) { log . error ( ""failed to invoke remote logout"" , e ) ; } } +" +544,"public Status update ( String table , String key , Map < String , ByteIterator > values ) { try { Set < String > fields = values . keySet ( ) ; PreparedStatement stmt = updateStmts . get ( fields ) ; if ( stmt == null ) { Update updateStmt = QueryBuilder . update ( table ) ; for ( String field : fields ) { updateStmt . with ( QueryBuilder . set ( field , QueryBuilder . bindMarker ( ) ) ) ; } updateStmt . where ( QueryBuilder . eq ( YCSB_KEY , QueryBuilder . bindMarker ( ) ) ) ; stmt = session . prepare ( updateStmt ) ; stmt . setConsistencyLevel ( writeConsistencyLevel ) ; if ( trace ) { stmt . enableTracing ( ) ; } PreparedStatement prevStmt = updateStmts . putIfAbsent ( new HashSet ( fields ) , stmt ) ; if ( prevStmt != null ) { stmt = prevStmt ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""key = {}"" , key ) ; for ( Map . Entry < String , ByteIterator > entry : values . entrySet ( ) ) { logger . debug ( ""{} = {}"" , entry . getKey ( ) , entry . getValue ( ) ) ; } } ColumnDefinitions vars = stmt . getVariables ( ) ; BoundStatement boundStmt = stmt . bind ( ) ; for ( int i = 0 ; i < vars . size ( ) - 1 ; i ++ ) { boundStmt . setString ( i , values . get ( vars . getName ( i ) ) . toString ( ) ) ; } boundStmt . setString ( vars . size ( ) - 1 , key ) ; session . execute ( boundStmt ) ; return Status . OK ; } catch ( Exception e ) { logger . error ( MessageFormatter . format ( ""Error updating key: {}"" , key ) . getMessage ( ) , e ) ; } return Status . ERROR ; } +","public Status update ( String table , String key , Map < String , ByteIterator > values ) { try { Set < String > fields = values . keySet ( ) ; PreparedStatement stmt = updateStmts . get ( fields ) ; if ( stmt == null ) { Update updateStmt = QueryBuilder . update ( table ) ; for ( String field : fields ) { updateStmt . with ( QueryBuilder . set ( field , QueryBuilder . bindMarker ( ) ) ) ; } updateStmt . where ( QueryBuilder . eq ( YCSB_KEY , QueryBuilder . bindMarker ( ) ) ) ; stmt = session . prepare ( updateStmt ) ; stmt . setConsistencyLevel ( writeConsistencyLevel ) ; if ( trace ) { stmt . enableTracing ( ) ; } PreparedStatement prevStmt = updateStmts . putIfAbsent ( new HashSet ( fields ) , stmt ) ; if ( prevStmt != null ) { stmt = prevStmt ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( stmt . getQueryString ( ) ) ; logger . debug ( ""key = {}"" , key ) ; for ( Map . Entry < String , ByteIterator > entry : values . entrySet ( ) ) { logger . debug ( ""{} = {}"" , entry . getKey ( ) , entry . getValue ( ) ) ; } } ColumnDefinitions vars = stmt . getVariables ( ) ; BoundStatement boundStmt = stmt . bind ( ) ; for ( int i = 0 ; i < vars . size ( ) - 1 ; i ++ ) { boundStmt . setString ( i , values . get ( vars . getName ( i ) ) . toString ( ) ) ; } boundStmt . setString ( vars . size ( ) - 1 , key ) ; session . execute ( boundStmt ) ; return Status . OK ; } catch ( Exception e ) { logger . error ( MessageFormatter . format ( ""Error updating key: {}"" , key ) . getMessage ( ) , e ) ; } return Status . ERROR ; } +" +545,"public Status update ( String table , String key , Map < String , ByteIterator > values ) { try { Set < String > fields = values . keySet ( ) ; PreparedStatement stmt = updateStmts . get ( fields ) ; if ( stmt == null ) { Update updateStmt = QueryBuilder . update ( table ) ; for ( String field : fields ) { updateStmt . with ( QueryBuilder . set ( field , QueryBuilder . bindMarker ( ) ) ) ; } updateStmt . where ( QueryBuilder . eq ( YCSB_KEY , QueryBuilder . bindMarker ( ) ) ) ; stmt = session . prepare ( updateStmt ) ; stmt . setConsistencyLevel ( writeConsistencyLevel ) ; if ( trace ) { stmt . enableTracing ( ) ; } PreparedStatement prevStmt = updateStmts . putIfAbsent ( new HashSet ( fields ) , stmt ) ; if ( prevStmt != null ) { stmt = prevStmt ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( stmt . getQueryString ( ) ) ; for ( Map . Entry < String , ByteIterator > entry : values . entrySet ( ) ) { logger . debug ( ""{} = {}"" , entry . getKey ( ) , entry . getValue ( ) ) ; } } ColumnDefinitions vars = stmt . getVariables ( ) ; BoundStatement boundStmt = stmt . bind ( ) ; for ( int i = 0 ; i < vars . size ( ) - 1 ; i ++ ) { boundStmt . setString ( i , values . get ( vars . getName ( i ) ) . toString ( ) ) ; } boundStmt . setString ( vars . size ( ) - 1 , key ) ; session . execute ( boundStmt ) ; return Status . OK ; } catch ( Exception e ) { logger . error ( MessageFormatter . format ( ""Error updating key: {}"" , key ) . getMessage ( ) , e ) ; } return Status . ERROR ; } +","public Status update ( String table , String key , Map < String , ByteIterator > values ) { try { Set < String > fields = values . keySet ( ) ; PreparedStatement stmt = updateStmts . get ( fields ) ; if ( stmt == null ) { Update updateStmt = QueryBuilder . update ( table ) ; for ( String field : fields ) { updateStmt . with ( QueryBuilder . set ( field , QueryBuilder . bindMarker ( ) ) ) ; } updateStmt . where ( QueryBuilder . eq ( YCSB_KEY , QueryBuilder . bindMarker ( ) ) ) ; stmt = session . prepare ( updateStmt ) ; stmt . setConsistencyLevel ( writeConsistencyLevel ) ; if ( trace ) { stmt . enableTracing ( ) ; } PreparedStatement prevStmt = updateStmts . putIfAbsent ( new HashSet ( fields ) , stmt ) ; if ( prevStmt != null ) { stmt = prevStmt ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( stmt . getQueryString ( ) ) ; logger . debug ( ""key = {}"" , key ) ; for ( Map . Entry < String , ByteIterator > entry : values . entrySet ( ) ) { logger . debug ( ""{} = {}"" , entry . getKey ( ) , entry . getValue ( ) ) ; } } ColumnDefinitions vars = stmt . getVariables ( ) ; BoundStatement boundStmt = stmt . bind ( ) ; for ( int i = 0 ; i < vars . size ( ) - 1 ; i ++ ) { boundStmt . setString ( i , values . get ( vars . getName ( i ) ) . toString ( ) ) ; } boundStmt . setString ( vars . size ( ) - 1 , key ) ; session . execute ( boundStmt ) ; return Status . OK ; } catch ( Exception e ) { logger . error ( MessageFormatter . format ( ""Error updating key: {}"" , key ) . getMessage ( ) , e ) ; } return Status . ERROR ; } +" +546,"public Status update ( String table , String key , Map < String , ByteIterator > values ) { try { Set < String > fields = values . keySet ( ) ; PreparedStatement stmt = updateStmts . get ( fields ) ; if ( stmt == null ) { Update updateStmt = QueryBuilder . update ( table ) ; for ( String field : fields ) { updateStmt . with ( QueryBuilder . set ( field , QueryBuilder . bindMarker ( ) ) ) ; } updateStmt . where ( QueryBuilder . eq ( YCSB_KEY , QueryBuilder . bindMarker ( ) ) ) ; stmt = session . prepare ( updateStmt ) ; stmt . setConsistencyLevel ( writeConsistencyLevel ) ; if ( trace ) { stmt . enableTracing ( ) ; } PreparedStatement prevStmt = updateStmts . putIfAbsent ( new HashSet ( fields ) , stmt ) ; if ( prevStmt != null ) { stmt = prevStmt ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( stmt . getQueryString ( ) ) ; logger . debug ( ""key = {}"" , key ) ; for ( Map . Entry < String , ByteIterator > entry : values . entrySet ( ) ) { } } ColumnDefinitions vars = stmt . getVariables ( ) ; BoundStatement boundStmt = stmt . bind ( ) ; for ( int i = 0 ; i < vars . size ( ) - 1 ; i ++ ) { boundStmt . setString ( i , values . get ( vars . getName ( i ) ) . toString ( ) ) ; } boundStmt . setString ( vars . size ( ) - 1 , key ) ; session . execute ( boundStmt ) ; return Status . OK ; } catch ( Exception e ) { logger . error ( MessageFormatter . format ( ""Error updating key: {}"" , key ) . getMessage ( ) , e ) ; } return Status . ERROR ; } +","public Status update ( String table , String key , Map < String , ByteIterator > values ) { try { Set < String > fields = values . keySet ( ) ; PreparedStatement stmt = updateStmts . get ( fields ) ; if ( stmt == null ) { Update updateStmt = QueryBuilder . update ( table ) ; for ( String field : fields ) { updateStmt . with ( QueryBuilder . set ( field , QueryBuilder . bindMarker ( ) ) ) ; } updateStmt . where ( QueryBuilder . eq ( YCSB_KEY , QueryBuilder . bindMarker ( ) ) ) ; stmt = session . prepare ( updateStmt ) ; stmt . setConsistencyLevel ( writeConsistencyLevel ) ; if ( trace ) { stmt . enableTracing ( ) ; } PreparedStatement prevStmt = updateStmts . putIfAbsent ( new HashSet ( fields ) , stmt ) ; if ( prevStmt != null ) { stmt = prevStmt ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( stmt . getQueryString ( ) ) ; logger . debug ( ""key = {}"" , key ) ; for ( Map . Entry < String , ByteIterator > entry : values . entrySet ( ) ) { logger . debug ( ""{} = {}"" , entry . getKey ( ) , entry . getValue ( ) ) ; } } ColumnDefinitions vars = stmt . getVariables ( ) ; BoundStatement boundStmt = stmt . bind ( ) ; for ( int i = 0 ; i < vars . size ( ) - 1 ; i ++ ) { boundStmt . setString ( i , values . get ( vars . getName ( i ) ) . toString ( ) ) ; } boundStmt . setString ( vars . size ( ) - 1 , key ) ; session . execute ( boundStmt ) ; return Status . OK ; } catch ( Exception e ) { logger . error ( MessageFormatter . format ( ""Error updating key: {}"" , key ) . getMessage ( ) , e ) ; } return Status . ERROR ; } +" +547,"public Status update ( String table , String key , Map < String , ByteIterator > values ) { try { Set < String > fields = values . keySet ( ) ; PreparedStatement stmt = updateStmts . get ( fields ) ; if ( stmt == null ) { Update updateStmt = QueryBuilder . update ( table ) ; for ( String field : fields ) { updateStmt . with ( QueryBuilder . set ( field , QueryBuilder . bindMarker ( ) ) ) ; } updateStmt . where ( QueryBuilder . eq ( YCSB_KEY , QueryBuilder . bindMarker ( ) ) ) ; stmt = session . prepare ( updateStmt ) ; stmt . setConsistencyLevel ( writeConsistencyLevel ) ; if ( trace ) { stmt . enableTracing ( ) ; } PreparedStatement prevStmt = updateStmts . putIfAbsent ( new HashSet ( fields ) , stmt ) ; if ( prevStmt != null ) { stmt = prevStmt ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( stmt . getQueryString ( ) ) ; logger . debug ( ""key = {}"" , key ) ; for ( Map . Entry < String , ByteIterator > entry : values . entrySet ( ) ) { logger . debug ( ""{} = {}"" , entry . getKey ( ) , entry . getValue ( ) ) ; } } ColumnDefinitions vars = stmt . getVariables ( ) ; BoundStatement boundStmt = stmt . bind ( ) ; for ( int i = 0 ; i < vars . size ( ) - 1 ; i ++ ) { boundStmt . setString ( i , values . get ( vars . getName ( i ) ) . toString ( ) ) ; } boundStmt . setString ( vars . size ( ) - 1 , key ) ; session . execute ( boundStmt ) ; return Status . OK ; } catch ( Exception e ) { } return Status . ERROR ; } +","public Status update ( String table , String key , Map < String , ByteIterator > values ) { try { Set < String > fields = values . keySet ( ) ; PreparedStatement stmt = updateStmts . get ( fields ) ; if ( stmt == null ) { Update updateStmt = QueryBuilder . update ( table ) ; for ( String field : fields ) { updateStmt . with ( QueryBuilder . set ( field , QueryBuilder . bindMarker ( ) ) ) ; } updateStmt . where ( QueryBuilder . eq ( YCSB_KEY , QueryBuilder . bindMarker ( ) ) ) ; stmt = session . prepare ( updateStmt ) ; stmt . setConsistencyLevel ( writeConsistencyLevel ) ; if ( trace ) { stmt . enableTracing ( ) ; } PreparedStatement prevStmt = updateStmts . putIfAbsent ( new HashSet ( fields ) , stmt ) ; if ( prevStmt != null ) { stmt = prevStmt ; } } if ( logger . isDebugEnabled ( ) ) { logger . debug ( stmt . getQueryString ( ) ) ; logger . debug ( ""key = {}"" , key ) ; for ( Map . Entry < String , ByteIterator > entry : values . entrySet ( ) ) { logger . debug ( ""{} = {}"" , entry . getKey ( ) , entry . getValue ( ) ) ; } } ColumnDefinitions vars = stmt . getVariables ( ) ; BoundStatement boundStmt = stmt . bind ( ) ; for ( int i = 0 ; i < vars . size ( ) - 1 ; i ++ ) { boundStmt . setString ( i , values . get ( vars . getName ( i ) ) . toString ( ) ) ; } boundStmt . setString ( vars . size ( ) - 1 , key ) ; session . execute ( boundStmt ) ; return Status . OK ; } catch ( Exception e ) { logger . error ( MessageFormatter . format ( ""Error updating key: {}"" , key ) . getMessage ( ) , e ) ; } return Status . ERROR ; } +" +548,"public static void get ( HttpServletRequest request , HttpServletResponse response , boolean v1 ) throws IOException { response . setContentType ( ""application/json; charset=utf-8"" ) ; boolean allParameters = false ; String param = request . getParameter ( ""param"" ) ; String url = request . getRequestURI ( ) ; String fileName = ManagementInterfaceUtils . getFileName ( url ) ; if ( ! ( fileName . startsWith ( ""agent_"" ) ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Agent file name must start with 'agent_'\""}"" ) ; return ; } if ( param == null ) { allParameters = true ; } else if ( param . equals ( """" ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Parse error, empty parameter (param_name). Check it for errors\""}"" ) ; LOGGER . error ( ""Parse error, empty parameter (param_name). Check it for errors."" ) ; return ; } String pathToFile ; if ( v1 ) { pathToFile = url . substring ( 29 ) ; } else { pathToFile = url . substring ( 26 ) ; } File file = new File ( pathToFile ) ; if ( file . exists ( ) ) { FileInputStream fileInputStream = new FileInputStream ( file ) ; Properties properties = new Properties ( ) ; properties . load ( fileInputStream ) ; JSONObject jsonObject = new JSONObject ( ) ; if ( allParameters ) { jsonObject . put ( ""agent"" , properties ) ; } else { String property = properties . getProperty ( param ) ; if ( property != null ) { jsonObject . put ( param , properties . getProperty ( param ) ) ; } else { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""result\"":\""Param '"" + param + ""' not found in the agent\""}"" ) ; return ; } } response . getWriter ( ) . println ( ""{\""success\"":\""true\"",\""result\"":"" + jsonObject + """" ) ; LOGGER . debug ( jsonObject ) ; response . setStatus ( HttpServletResponse . SC_OK ) ; } else { response . getWriter ( ) . println ( ""{\""success\"":\""false\"",\""result\"":\""File not found in the path received\""}"" ) ; LOGGER . debug ( ""File not found in the path received"" ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; } } +","public static void get ( HttpServletRequest request , HttpServletResponse response , boolean v1 ) throws IOException { response . setContentType ( ""application/json; charset=utf-8"" ) ; boolean allParameters = false ; String param = request . getParameter ( ""param"" ) ; String url = request . getRequestURI ( ) ; String fileName = ManagementInterfaceUtils . getFileName ( url ) ; if ( ! ( fileName . startsWith ( ""agent_"" ) ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Agent file name must start with 'agent_'\""}"" ) ; LOGGER . error ( ""Agent file name must start with 'agent_'."" ) ; return ; } if ( param == null ) { allParameters = true ; } else if ( param . equals ( """" ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Parse error, empty parameter (param_name). Check it for errors\""}"" ) ; LOGGER . error ( ""Parse error, empty parameter (param_name). Check it for errors."" ) ; return ; } String pathToFile ; if ( v1 ) { pathToFile = url . substring ( 29 ) ; } else { pathToFile = url . substring ( 26 ) ; } File file = new File ( pathToFile ) ; if ( file . exists ( ) ) { FileInputStream fileInputStream = new FileInputStream ( file ) ; Properties properties = new Properties ( ) ; properties . load ( fileInputStream ) ; JSONObject jsonObject = new JSONObject ( ) ; if ( allParameters ) { jsonObject . put ( ""agent"" , properties ) ; } else { String property = properties . getProperty ( param ) ; if ( property != null ) { jsonObject . put ( param , properties . getProperty ( param ) ) ; } else { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""result\"":\""Param '"" + param + ""' not found in the agent\""}"" ) ; return ; } } response . getWriter ( ) . println ( ""{\""success\"":\""true\"",\""result\"":"" + jsonObject + """" ) ; LOGGER . debug ( jsonObject ) ; response . setStatus ( HttpServletResponse . SC_OK ) ; } else { response . getWriter ( ) . println ( ""{\""success\"":\""false\"",\""result\"":\""File not found in the path received\""}"" ) ; LOGGER . debug ( ""File not found in the path received"" ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; } } +" +549,"public static void get ( HttpServletRequest request , HttpServletResponse response , boolean v1 ) throws IOException { response . setContentType ( ""application/json; charset=utf-8"" ) ; boolean allParameters = false ; String param = request . getParameter ( ""param"" ) ; String url = request . getRequestURI ( ) ; String fileName = ManagementInterfaceUtils . getFileName ( url ) ; if ( ! ( fileName . startsWith ( ""agent_"" ) ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Agent file name must start with 'agent_'\""}"" ) ; LOGGER . error ( ""Agent file name must start with 'agent_'."" ) ; return ; } if ( param == null ) { allParameters = true ; } else if ( param . equals ( """" ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Parse error, empty parameter (param_name). Check it for errors\""}"" ) ; return ; } String pathToFile ; if ( v1 ) { pathToFile = url . substring ( 29 ) ; } else { pathToFile = url . substring ( 26 ) ; } File file = new File ( pathToFile ) ; if ( file . exists ( ) ) { FileInputStream fileInputStream = new FileInputStream ( file ) ; Properties properties = new Properties ( ) ; properties . load ( fileInputStream ) ; JSONObject jsonObject = new JSONObject ( ) ; if ( allParameters ) { jsonObject . put ( ""agent"" , properties ) ; } else { String property = properties . getProperty ( param ) ; if ( property != null ) { jsonObject . put ( param , properties . getProperty ( param ) ) ; } else { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""result\"":\""Param '"" + param + ""' not found in the agent\""}"" ) ; return ; } } response . getWriter ( ) . println ( ""{\""success\"":\""true\"",\""result\"":"" + jsonObject + """" ) ; LOGGER . debug ( jsonObject ) ; response . setStatus ( HttpServletResponse . SC_OK ) ; } else { response . getWriter ( ) . println ( ""{\""success\"":\""false\"",\""result\"":\""File not found in the path received\""}"" ) ; LOGGER . debug ( ""File not found in the path received"" ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; } } +","public static void get ( HttpServletRequest request , HttpServletResponse response , boolean v1 ) throws IOException { response . setContentType ( ""application/json; charset=utf-8"" ) ; boolean allParameters = false ; String param = request . getParameter ( ""param"" ) ; String url = request . getRequestURI ( ) ; String fileName = ManagementInterfaceUtils . getFileName ( url ) ; if ( ! ( fileName . startsWith ( ""agent_"" ) ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Agent file name must start with 'agent_'\""}"" ) ; LOGGER . error ( ""Agent file name must start with 'agent_'."" ) ; return ; } if ( param == null ) { allParameters = true ; } else if ( param . equals ( """" ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Parse error, empty parameter (param_name). Check it for errors\""}"" ) ; LOGGER . error ( ""Parse error, empty parameter (param_name). Check it for errors."" ) ; return ; } String pathToFile ; if ( v1 ) { pathToFile = url . substring ( 29 ) ; } else { pathToFile = url . substring ( 26 ) ; } File file = new File ( pathToFile ) ; if ( file . exists ( ) ) { FileInputStream fileInputStream = new FileInputStream ( file ) ; Properties properties = new Properties ( ) ; properties . load ( fileInputStream ) ; JSONObject jsonObject = new JSONObject ( ) ; if ( allParameters ) { jsonObject . put ( ""agent"" , properties ) ; } else { String property = properties . getProperty ( param ) ; if ( property != null ) { jsonObject . put ( param , properties . getProperty ( param ) ) ; } else { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""result\"":\""Param '"" + param + ""' not found in the agent\""}"" ) ; return ; } } response . getWriter ( ) . println ( ""{\""success\"":\""true\"",\""result\"":"" + jsonObject + """" ) ; LOGGER . debug ( jsonObject ) ; response . setStatus ( HttpServletResponse . SC_OK ) ; } else { response . getWriter ( ) . println ( ""{\""success\"":\""false\"",\""result\"":\""File not found in the path received\""}"" ) ; LOGGER . debug ( ""File not found in the path received"" ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; } } +" +550,"public static void get ( HttpServletRequest request , HttpServletResponse response , boolean v1 ) throws IOException { response . setContentType ( ""application/json; charset=utf-8"" ) ; boolean allParameters = false ; String param = request . getParameter ( ""param"" ) ; String url = request . getRequestURI ( ) ; String fileName = ManagementInterfaceUtils . getFileName ( url ) ; if ( ! ( fileName . startsWith ( ""agent_"" ) ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Agent file name must start with 'agent_'\""}"" ) ; LOGGER . error ( ""Agent file name must start with 'agent_'."" ) ; return ; } if ( param == null ) { allParameters = true ; } else if ( param . equals ( """" ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Parse error, empty parameter (param_name). Check it for errors\""}"" ) ; LOGGER . error ( ""Parse error, empty parameter (param_name). Check it for errors."" ) ; return ; } String pathToFile ; if ( v1 ) { pathToFile = url . substring ( 29 ) ; } else { pathToFile = url . substring ( 26 ) ; } File file = new File ( pathToFile ) ; if ( file . exists ( ) ) { FileInputStream fileInputStream = new FileInputStream ( file ) ; Properties properties = new Properties ( ) ; properties . load ( fileInputStream ) ; JSONObject jsonObject = new JSONObject ( ) ; if ( allParameters ) { jsonObject . put ( ""agent"" , properties ) ; } else { String property = properties . getProperty ( param ) ; if ( property != null ) { jsonObject . put ( param , properties . getProperty ( param ) ) ; } else { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""result\"":\""Param '"" + param + ""' not found in the agent\""}"" ) ; return ; } } response . getWriter ( ) . println ( ""{\""success\"":\""true\"",\""result\"":"" + jsonObject + """" ) ; response . setStatus ( HttpServletResponse . SC_OK ) ; } else { response . getWriter ( ) . println ( ""{\""success\"":\""false\"",\""result\"":\""File not found in the path received\""}"" ) ; LOGGER . debug ( ""File not found in the path received"" ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; } } +","public static void get ( HttpServletRequest request , HttpServletResponse response , boolean v1 ) throws IOException { response . setContentType ( ""application/json; charset=utf-8"" ) ; boolean allParameters = false ; String param = request . getParameter ( ""param"" ) ; String url = request . getRequestURI ( ) ; String fileName = ManagementInterfaceUtils . getFileName ( url ) ; if ( ! ( fileName . startsWith ( ""agent_"" ) ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Agent file name must start with 'agent_'\""}"" ) ; LOGGER . error ( ""Agent file name must start with 'agent_'."" ) ; return ; } if ( param == null ) { allParameters = true ; } else if ( param . equals ( """" ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Parse error, empty parameter (param_name). Check it for errors\""}"" ) ; LOGGER . error ( ""Parse error, empty parameter (param_name). Check it for errors."" ) ; return ; } String pathToFile ; if ( v1 ) { pathToFile = url . substring ( 29 ) ; } else { pathToFile = url . substring ( 26 ) ; } File file = new File ( pathToFile ) ; if ( file . exists ( ) ) { FileInputStream fileInputStream = new FileInputStream ( file ) ; Properties properties = new Properties ( ) ; properties . load ( fileInputStream ) ; JSONObject jsonObject = new JSONObject ( ) ; if ( allParameters ) { jsonObject . put ( ""agent"" , properties ) ; } else { String property = properties . getProperty ( param ) ; if ( property != null ) { jsonObject . put ( param , properties . getProperty ( param ) ) ; } else { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""result\"":\""Param '"" + param + ""' not found in the agent\""}"" ) ; return ; } } response . getWriter ( ) . println ( ""{\""success\"":\""true\"",\""result\"":"" + jsonObject + """" ) ; LOGGER . debug ( jsonObject ) ; response . setStatus ( HttpServletResponse . SC_OK ) ; } else { response . getWriter ( ) . println ( ""{\""success\"":\""false\"",\""result\"":\""File not found in the path received\""}"" ) ; LOGGER . debug ( ""File not found in the path received"" ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; } } +" +551,"public static void get ( HttpServletRequest request , HttpServletResponse response , boolean v1 ) throws IOException { response . setContentType ( ""application/json; charset=utf-8"" ) ; boolean allParameters = false ; String param = request . getParameter ( ""param"" ) ; String url = request . getRequestURI ( ) ; String fileName = ManagementInterfaceUtils . getFileName ( url ) ; if ( ! ( fileName . startsWith ( ""agent_"" ) ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Agent file name must start with 'agent_'\""}"" ) ; LOGGER . error ( ""Agent file name must start with 'agent_'."" ) ; return ; } if ( param == null ) { allParameters = true ; } else if ( param . equals ( """" ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Parse error, empty parameter (param_name). Check it for errors\""}"" ) ; LOGGER . error ( ""Parse error, empty parameter (param_name). Check it for errors."" ) ; return ; } String pathToFile ; if ( v1 ) { pathToFile = url . substring ( 29 ) ; } else { pathToFile = url . substring ( 26 ) ; } File file = new File ( pathToFile ) ; if ( file . exists ( ) ) { FileInputStream fileInputStream = new FileInputStream ( file ) ; Properties properties = new Properties ( ) ; properties . load ( fileInputStream ) ; JSONObject jsonObject = new JSONObject ( ) ; if ( allParameters ) { jsonObject . put ( ""agent"" , properties ) ; } else { String property = properties . getProperty ( param ) ; if ( property != null ) { jsonObject . put ( param , properties . getProperty ( param ) ) ; } else { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""result\"":\""Param '"" + param + ""' not found in the agent\""}"" ) ; return ; } } response . getWriter ( ) . println ( ""{\""success\"":\""true\"",\""result\"":"" + jsonObject + """" ) ; LOGGER . debug ( jsonObject ) ; response . setStatus ( HttpServletResponse . SC_OK ) ; } else { response . getWriter ( ) . println ( ""{\""success\"":\""false\"",\""result\"":\""File not found in the path received\""}"" ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; } } +","public static void get ( HttpServletRequest request , HttpServletResponse response , boolean v1 ) throws IOException { response . setContentType ( ""application/json; charset=utf-8"" ) ; boolean allParameters = false ; String param = request . getParameter ( ""param"" ) ; String url = request . getRequestURI ( ) ; String fileName = ManagementInterfaceUtils . getFileName ( url ) ; if ( ! ( fileName . startsWith ( ""agent_"" ) ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Agent file name must start with 'agent_'\""}"" ) ; LOGGER . error ( ""Agent file name must start with 'agent_'."" ) ; return ; } if ( param == null ) { allParameters = true ; } else if ( param . equals ( """" ) ) { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""error\"":\""Parse error, empty parameter (param_name). Check it for errors\""}"" ) ; LOGGER . error ( ""Parse error, empty parameter (param_name). Check it for errors."" ) ; return ; } String pathToFile ; if ( v1 ) { pathToFile = url . substring ( 29 ) ; } else { pathToFile = url . substring ( 26 ) ; } File file = new File ( pathToFile ) ; if ( file . exists ( ) ) { FileInputStream fileInputStream = new FileInputStream ( file ) ; Properties properties = new Properties ( ) ; properties . load ( fileInputStream ) ; JSONObject jsonObject = new JSONObject ( ) ; if ( allParameters ) { jsonObject . put ( ""agent"" , properties ) ; } else { String property = properties . getProperty ( param ) ; if ( property != null ) { jsonObject . put ( param , properties . getProperty ( param ) ) ; } else { response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; response . getWriter ( ) . println ( ""{\""success\"":\""false\"","" + ""\""result\"":\""Param '"" + param + ""' not found in the agent\""}"" ) ; return ; } } response . getWriter ( ) . println ( ""{\""success\"":\""true\"",\""result\"":"" + jsonObject + """" ) ; LOGGER . debug ( jsonObject ) ; response . setStatus ( HttpServletResponse . SC_OK ) ; } else { response . getWriter ( ) . println ( ""{\""success\"":\""false\"",\""result\"":\""File not found in the path received\""}"" ) ; LOGGER . debug ( ""File not found in the path received"" ) ; response . setStatus ( HttpServletResponse . SC_BAD_REQUEST ) ; } } +" +552,"public PutObjectResult putObject ( PutObjectRequest putObjectRequest , AmazonS3 s3Client ) { String s3BucketName = putObjectRequest . getBucketName ( ) ; InputStream inputStream = putObjectRequest . getInputStream ( ) ; ObjectMetadata metadata = putObjectRequest . getMetadata ( ) ; if ( metadata == null ) { metadata = new ObjectMetadata ( ) ; } File file = putObjectRequest . getFile ( ) ; if ( file != null ) { try { inputStream = new FileInputStream ( file ) ; metadata . setContentLength ( file . length ( ) ) ; } catch ( FileNotFoundException e ) { throw new IllegalArgumentException ( ""File not found "" + file , e ) ; } } String s3ObjectKey = putObjectRequest . getKey ( ) ; String s3ObjectVersion = MOCK_S3_BUCKET_NAME_VERSIONING_ENABLED . equals ( putObjectRequest . getBucketName ( ) ) ? UUID . randomUUID ( ) . toString ( ) : null ; String s3ObjectKeyVersion = s3ObjectKey + ( s3ObjectVersion != null ? s3ObjectVersion : """" ) ; byte [ ] s3ObjectData ; try { s3ObjectData = IOUtils . toByteArray ( inputStream ) ; metadata . setContentLength ( s3ObjectData . length ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( ""Error converting input stream into byte array"" , e ) ; } finally { try { inputStream . close ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error closing stream "" + inputStream , e ) ; } } metadata . setLastModified ( new Date ( ) ) ; MockS3Bucket mockS3Bucket = getOrCreateBucket ( s3BucketName ) ; MockS3Object mockS3Object = new MockS3Object ( ) ; mockS3Object . setKey ( s3ObjectKey ) ; mockS3Object . setVersion ( s3ObjectVersion ) ; mockS3Object . setData ( s3ObjectData ) ; mockS3Object . setObjectMetadata ( metadata ) ; if ( putObjectRequest . getTagging ( ) != null ) { mockS3Object . setTags ( putObjectRequest . getTagging ( ) . getTagSet ( ) ) ; } mockS3Bucket . getObjects ( ) . put ( s3ObjectKey , mockS3Object ) ; mockS3Bucket . getVersions ( ) . put ( s3ObjectKeyVersion , mockS3Object ) ; return new PutObjectResult ( ) ; } +","public PutObjectResult putObject ( PutObjectRequest putObjectRequest , AmazonS3 s3Client ) { LOGGER . debug ( ""putObject(): putObjectRequest.getBucketName() = "" + putObjectRequest . getBucketName ( ) + "", putObjectRequest.getKey() = "" + putObjectRequest . getKey ( ) ) ; String s3BucketName = putObjectRequest . getBucketName ( ) ; InputStream inputStream = putObjectRequest . getInputStream ( ) ; ObjectMetadata metadata = putObjectRequest . getMetadata ( ) ; if ( metadata == null ) { metadata = new ObjectMetadata ( ) ; } File file = putObjectRequest . getFile ( ) ; if ( file != null ) { try { inputStream = new FileInputStream ( file ) ; metadata . setContentLength ( file . length ( ) ) ; } catch ( FileNotFoundException e ) { throw new IllegalArgumentException ( ""File not found "" + file , e ) ; } } String s3ObjectKey = putObjectRequest . getKey ( ) ; String s3ObjectVersion = MOCK_S3_BUCKET_NAME_VERSIONING_ENABLED . equals ( putObjectRequest . getBucketName ( ) ) ? UUID . randomUUID ( ) . toString ( ) : null ; String s3ObjectKeyVersion = s3ObjectKey + ( s3ObjectVersion != null ? s3ObjectVersion : """" ) ; byte [ ] s3ObjectData ; try { s3ObjectData = IOUtils . toByteArray ( inputStream ) ; metadata . setContentLength ( s3ObjectData . length ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( ""Error converting input stream into byte array"" , e ) ; } finally { try { inputStream . close ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error closing stream "" + inputStream , e ) ; } } metadata . setLastModified ( new Date ( ) ) ; MockS3Bucket mockS3Bucket = getOrCreateBucket ( s3BucketName ) ; MockS3Object mockS3Object = new MockS3Object ( ) ; mockS3Object . setKey ( s3ObjectKey ) ; mockS3Object . setVersion ( s3ObjectVersion ) ; mockS3Object . setData ( s3ObjectData ) ; mockS3Object . setObjectMetadata ( metadata ) ; if ( putObjectRequest . getTagging ( ) != null ) { mockS3Object . setTags ( putObjectRequest . getTagging ( ) . getTagSet ( ) ) ; } mockS3Bucket . getObjects ( ) . put ( s3ObjectKey , mockS3Object ) ; mockS3Bucket . getVersions ( ) . put ( s3ObjectKeyVersion , mockS3Object ) ; return new PutObjectResult ( ) ; } +" +553,"public PutObjectResult putObject ( PutObjectRequest putObjectRequest , AmazonS3 s3Client ) { LOGGER . debug ( ""putObject(): putObjectRequest.getBucketName() = "" + putObjectRequest . getBucketName ( ) + "", putObjectRequest.getKey() = "" + putObjectRequest . getKey ( ) ) ; String s3BucketName = putObjectRequest . getBucketName ( ) ; InputStream inputStream = putObjectRequest . getInputStream ( ) ; ObjectMetadata metadata = putObjectRequest . getMetadata ( ) ; if ( metadata == null ) { metadata = new ObjectMetadata ( ) ; } File file = putObjectRequest . getFile ( ) ; if ( file != null ) { try { inputStream = new FileInputStream ( file ) ; metadata . setContentLength ( file . length ( ) ) ; } catch ( FileNotFoundException e ) { throw new IllegalArgumentException ( ""File not found "" + file , e ) ; } } String s3ObjectKey = putObjectRequest . getKey ( ) ; String s3ObjectVersion = MOCK_S3_BUCKET_NAME_VERSIONING_ENABLED . equals ( putObjectRequest . getBucketName ( ) ) ? UUID . randomUUID ( ) . toString ( ) : null ; String s3ObjectKeyVersion = s3ObjectKey + ( s3ObjectVersion != null ? s3ObjectVersion : """" ) ; byte [ ] s3ObjectData ; try { s3ObjectData = IOUtils . toByteArray ( inputStream ) ; metadata . setContentLength ( s3ObjectData . length ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( ""Error converting input stream into byte array"" , e ) ; } finally { try { inputStream . close ( ) ; } catch ( IOException e ) { } } metadata . setLastModified ( new Date ( ) ) ; MockS3Bucket mockS3Bucket = getOrCreateBucket ( s3BucketName ) ; MockS3Object mockS3Object = new MockS3Object ( ) ; mockS3Object . setKey ( s3ObjectKey ) ; mockS3Object . setVersion ( s3ObjectVersion ) ; mockS3Object . setData ( s3ObjectData ) ; mockS3Object . setObjectMetadata ( metadata ) ; if ( putObjectRequest . getTagging ( ) != null ) { mockS3Object . setTags ( putObjectRequest . getTagging ( ) . getTagSet ( ) ) ; } mockS3Bucket . getObjects ( ) . put ( s3ObjectKey , mockS3Object ) ; mockS3Bucket . getVersions ( ) . put ( s3ObjectKeyVersion , mockS3Object ) ; return new PutObjectResult ( ) ; } +","public PutObjectResult putObject ( PutObjectRequest putObjectRequest , AmazonS3 s3Client ) { LOGGER . debug ( ""putObject(): putObjectRequest.getBucketName() = "" + putObjectRequest . getBucketName ( ) + "", putObjectRequest.getKey() = "" + putObjectRequest . getKey ( ) ) ; String s3BucketName = putObjectRequest . getBucketName ( ) ; InputStream inputStream = putObjectRequest . getInputStream ( ) ; ObjectMetadata metadata = putObjectRequest . getMetadata ( ) ; if ( metadata == null ) { metadata = new ObjectMetadata ( ) ; } File file = putObjectRequest . getFile ( ) ; if ( file != null ) { try { inputStream = new FileInputStream ( file ) ; metadata . setContentLength ( file . length ( ) ) ; } catch ( FileNotFoundException e ) { throw new IllegalArgumentException ( ""File not found "" + file , e ) ; } } String s3ObjectKey = putObjectRequest . getKey ( ) ; String s3ObjectVersion = MOCK_S3_BUCKET_NAME_VERSIONING_ENABLED . equals ( putObjectRequest . getBucketName ( ) ) ? UUID . randomUUID ( ) . toString ( ) : null ; String s3ObjectKeyVersion = s3ObjectKey + ( s3ObjectVersion != null ? s3ObjectVersion : """" ) ; byte [ ] s3ObjectData ; try { s3ObjectData = IOUtils . toByteArray ( inputStream ) ; metadata . setContentLength ( s3ObjectData . length ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( ""Error converting input stream into byte array"" , e ) ; } finally { try { inputStream . close ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error closing stream "" + inputStream , e ) ; } } metadata . setLastModified ( new Date ( ) ) ; MockS3Bucket mockS3Bucket = getOrCreateBucket ( s3BucketName ) ; MockS3Object mockS3Object = new MockS3Object ( ) ; mockS3Object . setKey ( s3ObjectKey ) ; mockS3Object . setVersion ( s3ObjectVersion ) ; mockS3Object . setData ( s3ObjectData ) ; mockS3Object . setObjectMetadata ( metadata ) ; if ( putObjectRequest . getTagging ( ) != null ) { mockS3Object . setTags ( putObjectRequest . getTagging ( ) . getTagSet ( ) ) ; } mockS3Bucket . getObjects ( ) . put ( s3ObjectKey , mockS3Object ) ; mockS3Bucket . getVersions ( ) . put ( s3ObjectKeyVersion , mockS3Object ) ; return new PutObjectResult ( ) ; } +" +554,"public void close ( ) throws GuacamoleException { AuthenticatedUser authenticatedUser = session . getAuthenticatedUser ( ) ; fireTunnelClosedEvent ( authenticatedUser , authenticatedUser . getCredentials ( ) , tunnel ) ; long connectionEndTime = System . currentTimeMillis ( ) ; long duration = connectionEndTime - connectionStartTime ; try { session . removeTunnel ( getUUID ( ) . toString ( ) ) ; super . close ( ) ; } catch ( GuacamoleUnauthorizedException e ) { if ( authenticationService . destroyGuacamoleSession ( authToken ) ) logger . debug ( ""Implicitly invalidated session for token \""{}\""."" , authToken ) ; throw e ; } } +","public void close ( ) throws GuacamoleException { AuthenticatedUser authenticatedUser = session . getAuthenticatedUser ( ) ; fireTunnelClosedEvent ( authenticatedUser , authenticatedUser . getCredentials ( ) , tunnel ) ; long connectionEndTime = System . currentTimeMillis ( ) ; long duration = connectionEndTime - connectionStartTime ; logger . info ( ""User \""{}\"" disconnected from {} \""{}\"". Duration: {} milliseconds"" , session . getAuthenticatedUser ( ) . getIdentifier ( ) , type . NAME , id , duration ) ; try { session . removeTunnel ( getUUID ( ) . toString ( ) ) ; super . close ( ) ; } catch ( GuacamoleUnauthorizedException e ) { if ( authenticationService . destroyGuacamoleSession ( authToken ) ) logger . debug ( ""Implicitly invalidated session for token \""{}\""."" , authToken ) ; throw e ; } } +" +555,"public void close ( ) throws GuacamoleException { AuthenticatedUser authenticatedUser = session . getAuthenticatedUser ( ) ; fireTunnelClosedEvent ( authenticatedUser , authenticatedUser . getCredentials ( ) , tunnel ) ; long connectionEndTime = System . currentTimeMillis ( ) ; long duration = connectionEndTime - connectionStartTime ; logger . info ( ""User \""{}\"" disconnected from {} \""{}\"". Duration: {} milliseconds"" , session . getAuthenticatedUser ( ) . getIdentifier ( ) , type . NAME , id , duration ) ; try { session . removeTunnel ( getUUID ( ) . toString ( ) ) ; super . close ( ) ; } catch ( GuacamoleUnauthorizedException e ) { if ( authenticationService . destroyGuacamoleSession ( authToken ) ) throw e ; } } +","public void close ( ) throws GuacamoleException { AuthenticatedUser authenticatedUser = session . getAuthenticatedUser ( ) ; fireTunnelClosedEvent ( authenticatedUser , authenticatedUser . getCredentials ( ) , tunnel ) ; long connectionEndTime = System . currentTimeMillis ( ) ; long duration = connectionEndTime - connectionStartTime ; logger . info ( ""User \""{}\"" disconnected from {} \""{}\"". Duration: {} milliseconds"" , session . getAuthenticatedUser ( ) . getIdentifier ( ) , type . NAME , id , duration ) ; try { session . removeTunnel ( getUUID ( ) . toString ( ) ) ; super . close ( ) ; } catch ( GuacamoleUnauthorizedException e ) { if ( authenticationService . destroyGuacamoleSession ( authToken ) ) logger . debug ( ""Implicitly invalidated session for token \""{}\""."" , authToken ) ; throw e ; } } +" +556,"@ POST ( path = ""/backend/config/gauges/update"" , permission = ""agent:config:edit:gauge"" ) String updateGauge ( @ BindAgentId String agentId , @ BindRequest GaugeConfigDto gaugeConfigDto ) throws Exception { GaugeConfig gaugeConfig = gaugeConfigDto . convert ( ) ; String version = gaugeConfigDto . version ( ) . get ( ) ; try { configRepository . updateGaugeConfig ( agentId , gaugeConfig , version ) ; } catch ( DuplicateMBeanObjectNameException e ) { throw new JsonServiceException ( CONFLICT , ""mbeanObjectName"" ) ; } return getGaugeResponse ( agentId , gaugeConfig ) ; } +","@ POST ( path = ""/backend/config/gauges/update"" , permission = ""agent:config:edit:gauge"" ) String updateGauge ( @ BindAgentId String agentId , @ BindRequest GaugeConfigDto gaugeConfigDto ) throws Exception { GaugeConfig gaugeConfig = gaugeConfigDto . convert ( ) ; String version = gaugeConfigDto . version ( ) . get ( ) ; try { configRepository . updateGaugeConfig ( agentId , gaugeConfig , version ) ; } catch ( DuplicateMBeanObjectNameException e ) { logger . debug ( e . getMessage ( ) , e ) ; throw new JsonServiceException ( CONFLICT , ""mbeanObjectName"" ) ; } return getGaugeResponse ( agentId , gaugeConfig ) ; } +" +557,"public void backup ( final Collection colection , final SAXSerializer serializer ) { for ( final Plug plugin : jacks . values ( ) ) { if ( plugin instanceof BackupHandler ) { try { ( ( BackupHandler ) plugin ) . backup ( colection , serializer ) ; } catch ( final Exception e ) { } } } } +","public void backup ( final Collection colection , final SAXSerializer serializer ) { for ( final Plug plugin : jacks . values ( ) ) { if ( plugin instanceof BackupHandler ) { try { ( ( BackupHandler ) plugin ) . backup ( colection , serializer ) ; } catch ( final Exception e ) { LOG . error ( e . getMessage ( ) , e ) ; } } } } +" +558,"public void onSuccessfulConnection ( Address address ) { } +","public void onSuccessfulConnection ( Address address ) { logger . info ( ""Successful connection: "" + address ) ; } +" +559,"public SimpleExchangeManagementResponseMessageType listOrganizationByHCIDList ( ListOrganizationsByHCIDListRequestMessageType request ) { SimpleExchangeManagementResponseMessageType response ; String exchangeName = StringUtils . isBlank ( request . getExchangeName ( ) ) ? null : request . getExchangeName ( ) . trim ( ) ; if ( CollectionUtils . isEmpty ( request . getHcidList ( ) ) ) { return buildSimpleResponse ( Boolean . FALSE , ""HCID is required."" ) ; } try { OrganizationListType orglist = buildOrganizationListType ( getExchangeManager ( ) . getOrganizationSet ( request . getHcidList ( ) , exchangeName ) ) ; response = buildSimpleResponse ( Boolean . TRUE , ACT_SUCCESSFUL ) ; response . setOrganizationList ( orglist ) ; } catch ( ExchangeManagerException ex ) { LOG . error ( ""listOrganizationByHCIDList encountered error: {}"" , ex . getLocalizedMessage ( ) , ex ) ; response = buildSimpleResponse ( Boolean . FALSE , ACT_FAIL ) ; } return response ; } +","public SimpleExchangeManagementResponseMessageType listOrganizationByHCIDList ( ListOrganizationsByHCIDListRequestMessageType request ) { LOG . trace ( ""listOrganizationByHCIDList--call"" ) ; SimpleExchangeManagementResponseMessageType response ; String exchangeName = StringUtils . isBlank ( request . getExchangeName ( ) ) ? null : request . getExchangeName ( ) . trim ( ) ; if ( CollectionUtils . isEmpty ( request . getHcidList ( ) ) ) { return buildSimpleResponse ( Boolean . FALSE , ""HCID is required."" ) ; } try { OrganizationListType orglist = buildOrganizationListType ( getExchangeManager ( ) . getOrganizationSet ( request . getHcidList ( ) , exchangeName ) ) ; response = buildSimpleResponse ( Boolean . TRUE , ACT_SUCCESSFUL ) ; response . setOrganizationList ( orglist ) ; } catch ( ExchangeManagerException ex ) { LOG . error ( ""listOrganizationByHCIDList encountered error: {}"" , ex . getLocalizedMessage ( ) , ex ) ; response = buildSimpleResponse ( Boolean . FALSE , ACT_FAIL ) ; } return response ; } +" +560,"public SimpleExchangeManagementResponseMessageType listOrganizationByHCIDList ( ListOrganizationsByHCIDListRequestMessageType request ) { LOG . trace ( ""listOrganizationByHCIDList--call"" ) ; SimpleExchangeManagementResponseMessageType response ; String exchangeName = StringUtils . isBlank ( request . getExchangeName ( ) ) ? null : request . getExchangeName ( ) . trim ( ) ; if ( CollectionUtils . isEmpty ( request . getHcidList ( ) ) ) { return buildSimpleResponse ( Boolean . FALSE , ""HCID is required."" ) ; } try { OrganizationListType orglist = buildOrganizationListType ( getExchangeManager ( ) . getOrganizationSet ( request . getHcidList ( ) , exchangeName ) ) ; response = buildSimpleResponse ( Boolean . TRUE , ACT_SUCCESSFUL ) ; response . setOrganizationList ( orglist ) ; } catch ( ExchangeManagerException ex ) { response = buildSimpleResponse ( Boolean . FALSE , ACT_FAIL ) ; } return response ; } +","public SimpleExchangeManagementResponseMessageType listOrganizationByHCIDList ( ListOrganizationsByHCIDListRequestMessageType request ) { LOG . trace ( ""listOrganizationByHCIDList--call"" ) ; SimpleExchangeManagementResponseMessageType response ; String exchangeName = StringUtils . isBlank ( request . getExchangeName ( ) ) ? null : request . getExchangeName ( ) . trim ( ) ; if ( CollectionUtils . isEmpty ( request . getHcidList ( ) ) ) { return buildSimpleResponse ( Boolean . FALSE , ""HCID is required."" ) ; } try { OrganizationListType orglist = buildOrganizationListType ( getExchangeManager ( ) . getOrganizationSet ( request . getHcidList ( ) , exchangeName ) ) ; response = buildSimpleResponse ( Boolean . TRUE , ACT_SUCCESSFUL ) ; response . setOrganizationList ( orglist ) ; } catch ( ExchangeManagerException ex ) { LOG . error ( ""listOrganizationByHCIDList encountered error: {}"" , ex . getLocalizedMessage ( ) , ex ) ; response = buildSimpleResponse ( Boolean . FALSE , ACT_FAIL ) ; } return response ; } +" +561,"public void addReasonerFactories ( Set < ProtegeOWLReasonerPlugin > plugins ) { for ( ProtegeOWLReasonerPlugin plugin : plugins ) { try { ProtegeOWLReasonerInfo factory = plugin . newInstance ( ) ; factory . initialise ( ) ; reasonerFactories . add ( factory ) ; } catch ( Throwable t ) { } } } +","public void addReasonerFactories ( Set < ProtegeOWLReasonerPlugin > plugins ) { for ( ProtegeOWLReasonerPlugin plugin : plugins ) { try { ProtegeOWLReasonerInfo factory = plugin . newInstance ( ) ; factory . initialise ( ) ; reasonerFactories . add ( factory ) ; } catch ( Throwable t ) { logger . warn ( ""An error occurred whilst instantiating the '{}' reasoner. Error: {}"" , plugin . getName ( ) , t ) ; } } } +" +562,"protected void registerDevice ( String ownId , OpenWebNetThingHandler thingHandler ) { if ( registeredDevices . containsKey ( ownId ) ) { } registeredDevices . put ( ownId , thingHandler ) ; logger . debug ( ""registered device ownId={}, thing={}"" , ownId , thingHandler . getThing ( ) . getUID ( ) ) ; } +","protected void registerDevice ( String ownId , OpenWebNetThingHandler thingHandler ) { if ( registeredDevices . containsKey ( ownId ) ) { logger . warn ( ""registering device with an existing ownId={}"" , ownId ) ; } registeredDevices . put ( ownId , thingHandler ) ; logger . debug ( ""registered device ownId={}, thing={}"" , ownId , thingHandler . getThing ( ) . getUID ( ) ) ; } +" +563,"protected void registerDevice ( String ownId , OpenWebNetThingHandler thingHandler ) { if ( registeredDevices . containsKey ( ownId ) ) { logger . warn ( ""registering device with an existing ownId={}"" , ownId ) ; } registeredDevices . put ( ownId , thingHandler ) ; } +","protected void registerDevice ( String ownId , OpenWebNetThingHandler thingHandler ) { if ( registeredDevices . containsKey ( ownId ) ) { logger . warn ( ""registering device with an existing ownId={}"" , ownId ) ; } registeredDevices . put ( ownId , thingHandler ) ; logger . debug ( ""registered device ownId={}, thing={}"" , ownId , thingHandler . getThing ( ) . getUID ( ) ) ; } +" +564,"public MbZielobjZusatz findById ( sernet . gs . reveng . MbZielobjZusatzId id ) { try { MbZielobjZusatz instance = ( MbZielobjZusatz ) sessionFactory . getCurrentSession ( ) . get ( ""sernet.gs.reveng.MbZielobjZusatz"" , id ) ; if ( instance == null ) { log . debug ( ""get successful, no instance found"" ) ; } else { log . debug ( ""get successful, instance found"" ) ; } return instance ; } catch ( RuntimeException re ) { log . error ( ""get failed"" , re ) ; throw re ; } } +","public MbZielobjZusatz findById ( sernet . gs . reveng . MbZielobjZusatzId id ) { log . debug ( ""getting MbZielobjZusatz instance with id: "" + id ) ; try { MbZielobjZusatz instance = ( MbZielobjZusatz ) sessionFactory . getCurrentSession ( ) . get ( ""sernet.gs.reveng.MbZielobjZusatz"" , id ) ; if ( instance == null ) { log . debug ( ""get successful, no instance found"" ) ; } else { log . debug ( ""get successful, instance found"" ) ; } return instance ; } catch ( RuntimeException re ) { log . error ( ""get failed"" , re ) ; throw re ; } } +" +565,"public MbZielobjZusatz findById ( sernet . gs . reveng . MbZielobjZusatzId id ) { log . debug ( ""getting MbZielobjZusatz instance with id: "" + id ) ; try { MbZielobjZusatz instance = ( MbZielobjZusatz ) sessionFactory . getCurrentSession ( ) . get ( ""sernet.gs.reveng.MbZielobjZusatz"" , id ) ; if ( instance == null ) { } else { log . debug ( ""get successful, instance found"" ) ; } return instance ; } catch ( RuntimeException re ) { log . error ( ""get failed"" , re ) ; throw re ; } } +","public MbZielobjZusatz findById ( sernet . gs . reveng . MbZielobjZusatzId id ) { log . debug ( ""getting MbZielobjZusatz instance with id: "" + id ) ; try { MbZielobjZusatz instance = ( MbZielobjZusatz ) sessionFactory . getCurrentSession ( ) . get ( ""sernet.gs.reveng.MbZielobjZusatz"" , id ) ; if ( instance == null ) { log . debug ( ""get successful, no instance found"" ) ; } else { log . debug ( ""get successful, instance found"" ) ; } return instance ; } catch ( RuntimeException re ) { log . error ( ""get failed"" , re ) ; throw re ; } } +" +566,"public MbZielobjZusatz findById ( sernet . gs . reveng . MbZielobjZusatzId id ) { log . debug ( ""getting MbZielobjZusatz instance with id: "" + id ) ; try { MbZielobjZusatz instance = ( MbZielobjZusatz ) sessionFactory . getCurrentSession ( ) . get ( ""sernet.gs.reveng.MbZielobjZusatz"" , id ) ; if ( instance == null ) { log . debug ( ""get successful, no instance found"" ) ; } else { } return instance ; } catch ( RuntimeException re ) { log . error ( ""get failed"" , re ) ; throw re ; } } +","public MbZielobjZusatz findById ( sernet . gs . reveng . MbZielobjZusatzId id ) { log . debug ( ""getting MbZielobjZusatz instance with id: "" + id ) ; try { MbZielobjZusatz instance = ( MbZielobjZusatz ) sessionFactory . getCurrentSession ( ) . get ( ""sernet.gs.reveng.MbZielobjZusatz"" , id ) ; if ( instance == null ) { log . debug ( ""get successful, no instance found"" ) ; } else { log . debug ( ""get successful, instance found"" ) ; } return instance ; } catch ( RuntimeException re ) { log . error ( ""get failed"" , re ) ; throw re ; } } +" +567,"public MbZielobjZusatz findById ( sernet . gs . reveng . MbZielobjZusatzId id ) { log . debug ( ""getting MbZielobjZusatz instance with id: "" + id ) ; try { MbZielobjZusatz instance = ( MbZielobjZusatz ) sessionFactory . getCurrentSession ( ) . get ( ""sernet.gs.reveng.MbZielobjZusatz"" , id ) ; if ( instance == null ) { log . debug ( ""get successful, no instance found"" ) ; } else { log . debug ( ""get successful, instance found"" ) ; } return instance ; } catch ( RuntimeException re ) { throw re ; } } +","public MbZielobjZusatz findById ( sernet . gs . reveng . MbZielobjZusatzId id ) { log . debug ( ""getting MbZielobjZusatz instance with id: "" + id ) ; try { MbZielobjZusatz instance = ( MbZielobjZusatz ) sessionFactory . getCurrentSession ( ) . get ( ""sernet.gs.reveng.MbZielobjZusatz"" , id ) ; if ( instance == null ) { log . debug ( ""get successful, no instance found"" ) ; } else { log . debug ( ""get successful, instance found"" ) ; } return instance ; } catch ( RuntimeException re ) { log . error ( ""get failed"" , re ) ; throw re ; } } +" +568,"public static void deleteHTable ( TableName tableName ) throws IOException { Admin admin = HBaseConnection . get ( KylinConfig . getInstanceFromEnv ( ) . getStorageUrl ( ) ) . getAdmin ( ) ; try { if ( admin . tableExists ( tableName ) ) { admin . disableTable ( tableName ) ; logger . info ( ""deleting hbase table "" + tableName ) ; admin . deleteTable ( tableName ) ; } } finally { IOUtils . closeQuietly ( admin ) ; } } +","public static void deleteHTable ( TableName tableName ) throws IOException { Admin admin = HBaseConnection . get ( KylinConfig . getInstanceFromEnv ( ) . getStorageUrl ( ) ) . getAdmin ( ) ; try { if ( admin . tableExists ( tableName ) ) { logger . info ( ""disabling hbase table "" + tableName ) ; admin . disableTable ( tableName ) ; logger . info ( ""deleting hbase table "" + tableName ) ; admin . deleteTable ( tableName ) ; } } finally { IOUtils . closeQuietly ( admin ) ; } } +" +569,"public static void deleteHTable ( TableName tableName ) throws IOException { Admin admin = HBaseConnection . get ( KylinConfig . getInstanceFromEnv ( ) . getStorageUrl ( ) ) . getAdmin ( ) ; try { if ( admin . tableExists ( tableName ) ) { logger . info ( ""disabling hbase table "" + tableName ) ; admin . disableTable ( tableName ) ; admin . deleteTable ( tableName ) ; } } finally { IOUtils . closeQuietly ( admin ) ; } } +","public static void deleteHTable ( TableName tableName ) throws IOException { Admin admin = HBaseConnection . get ( KylinConfig . getInstanceFromEnv ( ) . getStorageUrl ( ) ) . getAdmin ( ) ; try { if ( admin . tableExists ( tableName ) ) { logger . info ( ""disabling hbase table "" + tableName ) ; admin . disableTable ( tableName ) ; logger . info ( ""deleting hbase table "" + tableName ) ; admin . deleteTable ( tableName ) ; } } finally { IOUtils . closeQuietly ( admin ) ; } } +" +570,"public void visit ( FilterModelChange change ) { FilterModel model = change . getFilterModel ( ) ; Set < String > done = new HashSet < > ( ) ; if ( change . getKind ( ) == OpCode . ADD && model . isDynamic ( ) ) { for ( OsgiContextModel ocm : change . getContextModels ( ) ) { String contextPath = ocm . getContextPath ( ) ; if ( ! done . add ( contextPath ) ) { continue ; } OsgiContextModel highestRankedModel = null ; for ( OsgiContextModel cm : model . getContextModels ( ) ) { if ( cm . getContextPath ( ) . equals ( contextPath ) ) { highestRankedModel = cm ; break ; } } if ( highestRankedModel == null ) { highestRankedModel = ocm ; } PaxWebServletContextHandler sch = contextHandlers . get ( contextPath ) ; OsgiServletContext context = osgiServletContexts . get ( highestRankedModel ) ; ServletHandler servletHandler = sch . getServletHandler ( ) ; List < PaxWebFilterMapping > mapping = configureFilterMappings ( model ) ; PaxWebFilterHolder holder = new PaxWebFilterHolder ( model , context ) ; for ( PaxWebFilterMapping m : mapping ) { servletHandler . addFilter ( holder , m ) ; } } } } +","public void visit ( FilterModelChange change ) { FilterModel model = change . getFilterModel ( ) ; Set < String > done = new HashSet < > ( ) ; if ( change . getKind ( ) == OpCode . ADD && model . isDynamic ( ) ) { for ( OsgiContextModel ocm : change . getContextModels ( ) ) { String contextPath = ocm . getContextPath ( ) ; if ( ! done . add ( contextPath ) ) { continue ; } LOG . info ( ""Adding dynamic filter to context {}"" , contextPath ) ; OsgiContextModel highestRankedModel = null ; for ( OsgiContextModel cm : model . getContextModels ( ) ) { if ( cm . getContextPath ( ) . equals ( contextPath ) ) { highestRankedModel = cm ; break ; } } if ( highestRankedModel == null ) { highestRankedModel = ocm ; } PaxWebServletContextHandler sch = contextHandlers . get ( contextPath ) ; OsgiServletContext context = osgiServletContexts . get ( highestRankedModel ) ; ServletHandler servletHandler = sch . getServletHandler ( ) ; List < PaxWebFilterMapping > mapping = configureFilterMappings ( model ) ; PaxWebFilterHolder holder = new PaxWebFilterHolder ( model , context ) ; for ( PaxWebFilterMapping m : mapping ) { servletHandler . addFilter ( holder , m ) ; } } } } +" +571,"public RepositoryCreationResponse createSCMRepository ( String scmUrl , Boolean preBuildSyncEnabled , JobNotificationType jobType , Consumer < RepositoryCreated > consumer , Optional < BuildConfiguration > buildConfiguration ) { if ( StringUtils . isEmpty ( scmUrl ) ) throw new InvalidEntityException ( ""You must specify the SCM URL."" ) ; if ( scmUrl . contains ( config . getInternalScmAuthority ( ) ) ) { validateInternalRepository ( scmUrl ) ; validateRepositoryWithInternalURLDoesNotExist ( scmUrl , null ) ; SCMRepository scmRepository = createSCMRepositoryFromValues ( null , scmUrl , false ) ; consumer . accept ( new RepositoryCreated ( null , Integer . valueOf ( scmRepository . getId ( ) ) ) ) ; return new RepositoryCreationResponse ( scmRepository ) ; } else { validateRepositoryWithExternalURLDoesNotExist ( scmUrl , null ) ; boolean sync = preBuildSyncEnabled == null || preBuildSyncEnabled ; Integer taskId = startRCreationTask ( scmUrl , sync , jobType , consumer , buildConfiguration ) ; return new RepositoryCreationResponse ( taskId ) ; } } +","public RepositoryCreationResponse createSCMRepository ( String scmUrl , Boolean preBuildSyncEnabled , JobNotificationType jobType , Consumer < RepositoryCreated > consumer , Optional < BuildConfiguration > buildConfiguration ) { log . trace ( ""Received request to start RC creation with url autodetect: "" + scmUrl + "" (sync enabled? "" + preBuildSyncEnabled + "")"" ) ; if ( StringUtils . isEmpty ( scmUrl ) ) throw new InvalidEntityException ( ""You must specify the SCM URL."" ) ; if ( scmUrl . contains ( config . getInternalScmAuthority ( ) ) ) { validateInternalRepository ( scmUrl ) ; validateRepositoryWithInternalURLDoesNotExist ( scmUrl , null ) ; SCMRepository scmRepository = createSCMRepositoryFromValues ( null , scmUrl , false ) ; consumer . accept ( new RepositoryCreated ( null , Integer . valueOf ( scmRepository . getId ( ) ) ) ) ; return new RepositoryCreationResponse ( scmRepository ) ; } else { validateRepositoryWithExternalURLDoesNotExist ( scmUrl , null ) ; boolean sync = preBuildSyncEnabled == null || preBuildSyncEnabled ; Integer taskId = startRCreationTask ( scmUrl , sync , jobType , consumer , buildConfiguration ) ; return new RepositoryCreationResponse ( taskId ) ; } } +" +572,"public static CompilationUnitElement create ( IResource resource ) { try { IJavaProject project = getJavaProject ( resource ) ; if ( project == null ) { return null ; } if ( resource . getType ( ) == IResource . FILE ) { IJavaElement javaEl = JavaCore . create ( ( IFile ) resource ) ; if ( javaEl instanceof ICompilationUnit ) { return new CompilationUnitElement ( ( ICompilationUnit ) javaEl ) ; } } } catch ( Exception e ) { } return null ; } +","public static CompilationUnitElement create ( IResource resource ) { try { IJavaProject project = getJavaProject ( resource ) ; if ( project == null ) { return null ; } if ( resource . getType ( ) == IResource . FILE ) { IJavaElement javaEl = JavaCore . create ( ( IFile ) resource ) ; if ( javaEl instanceof ICompilationUnit ) { return new CompilationUnitElement ( ( ICompilationUnit ) javaEl ) ; } } } catch ( Exception e ) { Log . log ( e ) ; } return null ; } +" +573,"public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +","public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +" +574,"public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +","public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +" +575,"public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +","public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +" +576,"public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +","public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +" +577,"public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +","public void start ( final ZigBeePort serialPort ) { this . serialPort = serialPort ; timeoutScheduler = ZigBeeExecutors . newSingleThreadScheduledExecutor ( ""TelegesisTimer"" ) ; parserThread = new Thread ( ""TelegesisFrameHandler"" ) { @ Override public void run ( ) { logger . debug ( ""TelegesisFrameHandler thread started"" ) ; while ( ! closeHandler ) { try { synchronized ( commandLock ) { if ( sentCommand == null ) { sendNextFrame ( ) ; } } int [ ] responseData = getPacket ( ) ; if ( responseData == null ) { continue ; } StringBuilder builder = new StringBuilder ( ) ; for ( int value : responseData ) { builder . append ( String . format ( ""%c"" , value ) ) ; } logger . debug ( ""RX Telegesis Data:{}"" , builder . toString ( ) ) ; TelegesisEvent event = TelegesisEventFactory . getTelegesisFrame ( responseData ) ; if ( event != null ) { notifyEventReceived ( event ) ; continue ; } synchronized ( commandLock ) { if ( sentCommand != null ) { boolean done ; try { done = sentCommand . deserialize ( responseData ) ; } catch ( Exception e ) { logger . debug ( ""Exception deserialising frame {}. Transaction will complete. "" , builder . toString ( ) , e ) ; done = true ; } if ( done ) { notifyTransactionComplete ( sentCommand ) ; sentCommand = null ; } } } } catch ( Exception e ) { logger . error ( ""TelegesisFrameHandler exception"" , e ) ; } } logger . debug ( ""TelegesisFrameHandler thread exited."" ) ; } } ; parserThread . setDaemon ( true ) ; parserThread . start ( ) ; } +" +578,"public Boolean setupServer ( String key ) throws IOException { try { com . trilead . ssh2 . Connection sshConnection = SSHCmdHelper . acquireAuthorizedConnection ( config . getAgentIp ( ) , config . getAgentSshUserName ( ) , config . getAgentSshPassword ( ) ) ; if ( sshConnection == null ) { throw new ConfigurationException ( String . format ( ""Unable to "" + ""connect to server(IP=%1$s, username=%2$s, "" + ""password=%3$s"" , config . getAgentIp ( ) , config . getAgentSshUserName ( ) , config . getAgentSshPassword ( ) ) ) ; } SCPClient scp = new SCPClient ( sshConnection ) ; String userDataScriptDir = ""scripts/vm/hypervisor/ovm3/"" ; String userDataScriptPath = Script . findScript ( """" , userDataScriptDir ) ; if ( userDataScriptPath == null ) { throw new ConfigurationException ( ""Can not find "" + userDataScriptDir ) ; } String mkdir = ""mkdir -p "" + config . getAgentScriptsDir ( ) ; if ( ! SSHCmdHelper . sshExecuteCmd ( sshConnection , mkdir ) ) { throw new ConfigurationException ( ""Failed "" + mkdir + "" on "" + config . getAgentHostname ( ) ) ; } for ( String script : config . getAgentScripts ( ) ) { script = userDataScriptPath + ""/"" + script ; scp . put ( script , config . getAgentScriptsDir ( ) , ""0755"" ) ; } String prepareCmd = String . format ( config . getAgentScriptsDir ( ) + ""/"" + config . getAgentScript ( ) + "" --ssl="" + c . getUseSsl ( ) + "" "" + ""--port="" + c . getPort ( ) ) ; if ( ! SSHCmdHelper . sshExecuteCmd ( sshConnection , prepareCmd ) ) { throw new ConfigurationException ( ""Failed to insert module on "" + config . getAgentHostname ( ) ) ; } else { Thread . sleep ( 5000 ) ; } CloudstackPlugin cSp = new CloudstackPlugin ( c ) ; cSp . ovsUploadSshKey ( config . getAgentSshKeyFileName ( ) , FileUtils . readFileToString ( getSystemVMKeyFile ( key ) ) ) ; cSp . dom0CheckStorageHealthCheck ( config . getAgentScriptsDir ( ) , config . getAgentCheckStorageScript ( ) , config . getCsHostGuid ( ) , config . getAgentStorageCheckTimeout ( ) , config . getAgentStorageCheckInterval ( ) ) ; } catch ( Exception es ) { LOGGER . error ( ""Unexpected exception "" , es ) ; String msg = ""Unable to install module in agent"" ; throw new CloudRuntimeException ( msg ) ; } return true ; } +","public Boolean setupServer ( String key ) throws IOException { LOGGER . debug ( ""Setup all bits on agent: "" + config . getAgentHostname ( ) ) ; try { com . trilead . ssh2 . Connection sshConnection = SSHCmdHelper . acquireAuthorizedConnection ( config . getAgentIp ( ) , config . getAgentSshUserName ( ) , config . getAgentSshPassword ( ) ) ; if ( sshConnection == null ) { throw new ConfigurationException ( String . format ( ""Unable to "" + ""connect to server(IP=%1$s, username=%2$s, "" + ""password=%3$s"" , config . getAgentIp ( ) , config . getAgentSshUserName ( ) , config . getAgentSshPassword ( ) ) ) ; } SCPClient scp = new SCPClient ( sshConnection ) ; String userDataScriptDir = ""scripts/vm/hypervisor/ovm3/"" ; String userDataScriptPath = Script . findScript ( """" , userDataScriptDir ) ; if ( userDataScriptPath == null ) { throw new ConfigurationException ( ""Can not find "" + userDataScriptDir ) ; } String mkdir = ""mkdir -p "" + config . getAgentScriptsDir ( ) ; if ( ! SSHCmdHelper . sshExecuteCmd ( sshConnection , mkdir ) ) { throw new ConfigurationException ( ""Failed "" + mkdir + "" on "" + config . getAgentHostname ( ) ) ; } for ( String script : config . getAgentScripts ( ) ) { script = userDataScriptPath + ""/"" + script ; scp . put ( script , config . getAgentScriptsDir ( ) , ""0755"" ) ; } String prepareCmd = String . format ( config . getAgentScriptsDir ( ) + ""/"" + config . getAgentScript ( ) + "" --ssl="" + c . getUseSsl ( ) + "" "" + ""--port="" + c . getPort ( ) ) ; if ( ! SSHCmdHelper . sshExecuteCmd ( sshConnection , prepareCmd ) ) { throw new ConfigurationException ( ""Failed to insert module on "" + config . getAgentHostname ( ) ) ; } else { Thread . sleep ( 5000 ) ; } CloudstackPlugin cSp = new CloudstackPlugin ( c ) ; cSp . ovsUploadSshKey ( config . getAgentSshKeyFileName ( ) , FileUtils . readFileToString ( getSystemVMKeyFile ( key ) ) ) ; cSp . dom0CheckStorageHealthCheck ( config . getAgentScriptsDir ( ) , config . getAgentCheckStorageScript ( ) , config . getCsHostGuid ( ) , config . getAgentStorageCheckTimeout ( ) , config . getAgentStorageCheckInterval ( ) ) ; } catch ( Exception es ) { LOGGER . error ( ""Unexpected exception "" , es ) ; String msg = ""Unable to install module in agent"" ; throw new CloudRuntimeException ( msg ) ; } return true ; } +" +579,"public Boolean setupServer ( String key ) throws IOException { LOGGER . debug ( ""Setup all bits on agent: "" + config . getAgentHostname ( ) ) ; try { com . trilead . ssh2 . Connection sshConnection = SSHCmdHelper . acquireAuthorizedConnection ( config . getAgentIp ( ) , config . getAgentSshUserName ( ) , config . getAgentSshPassword ( ) ) ; if ( sshConnection == null ) { throw new ConfigurationException ( String . format ( ""Unable to "" + ""connect to server(IP=%1$s, username=%2$s, "" + ""password=%3$s"" , config . getAgentIp ( ) , config . getAgentSshUserName ( ) , config . getAgentSshPassword ( ) ) ) ; } SCPClient scp = new SCPClient ( sshConnection ) ; String userDataScriptDir = ""scripts/vm/hypervisor/ovm3/"" ; String userDataScriptPath = Script . findScript ( """" , userDataScriptDir ) ; if ( userDataScriptPath == null ) { throw new ConfigurationException ( ""Can not find "" + userDataScriptDir ) ; } String mkdir = ""mkdir -p "" + config . getAgentScriptsDir ( ) ; if ( ! SSHCmdHelper . sshExecuteCmd ( sshConnection , mkdir ) ) { throw new ConfigurationException ( ""Failed "" + mkdir + "" on "" + config . getAgentHostname ( ) ) ; } for ( String script : config . getAgentScripts ( ) ) { script = userDataScriptPath + ""/"" + script ; scp . put ( script , config . getAgentScriptsDir ( ) , ""0755"" ) ; } String prepareCmd = String . format ( config . getAgentScriptsDir ( ) + ""/"" + config . getAgentScript ( ) + "" --ssl="" + c . getUseSsl ( ) + "" "" + ""--port="" + c . getPort ( ) ) ; if ( ! SSHCmdHelper . sshExecuteCmd ( sshConnection , prepareCmd ) ) { throw new ConfigurationException ( ""Failed to insert module on "" + config . getAgentHostname ( ) ) ; } else { Thread . sleep ( 5000 ) ; } CloudstackPlugin cSp = new CloudstackPlugin ( c ) ; cSp . ovsUploadSshKey ( config . getAgentSshKeyFileName ( ) , FileUtils . readFileToString ( getSystemVMKeyFile ( key ) ) ) ; cSp . dom0CheckStorageHealthCheck ( config . getAgentScriptsDir ( ) , config . getAgentCheckStorageScript ( ) , config . getCsHostGuid ( ) , config . getAgentStorageCheckTimeout ( ) , config . getAgentStorageCheckInterval ( ) ) ; } catch ( Exception es ) { String msg = ""Unable to install module in agent"" ; throw new CloudRuntimeException ( msg ) ; } return true ; } +","public Boolean setupServer ( String key ) throws IOException { LOGGER . debug ( ""Setup all bits on agent: "" + config . getAgentHostname ( ) ) ; try { com . trilead . ssh2 . Connection sshConnection = SSHCmdHelper . acquireAuthorizedConnection ( config . getAgentIp ( ) , config . getAgentSshUserName ( ) , config . getAgentSshPassword ( ) ) ; if ( sshConnection == null ) { throw new ConfigurationException ( String . format ( ""Unable to "" + ""connect to server(IP=%1$s, username=%2$s, "" + ""password=%3$s"" , config . getAgentIp ( ) , config . getAgentSshUserName ( ) , config . getAgentSshPassword ( ) ) ) ; } SCPClient scp = new SCPClient ( sshConnection ) ; String userDataScriptDir = ""scripts/vm/hypervisor/ovm3/"" ; String userDataScriptPath = Script . findScript ( """" , userDataScriptDir ) ; if ( userDataScriptPath == null ) { throw new ConfigurationException ( ""Can not find "" + userDataScriptDir ) ; } String mkdir = ""mkdir -p "" + config . getAgentScriptsDir ( ) ; if ( ! SSHCmdHelper . sshExecuteCmd ( sshConnection , mkdir ) ) { throw new ConfigurationException ( ""Failed "" + mkdir + "" on "" + config . getAgentHostname ( ) ) ; } for ( String script : config . getAgentScripts ( ) ) { script = userDataScriptPath + ""/"" + script ; scp . put ( script , config . getAgentScriptsDir ( ) , ""0755"" ) ; } String prepareCmd = String . format ( config . getAgentScriptsDir ( ) + ""/"" + config . getAgentScript ( ) + "" --ssl="" + c . getUseSsl ( ) + "" "" + ""--port="" + c . getPort ( ) ) ; if ( ! SSHCmdHelper . sshExecuteCmd ( sshConnection , prepareCmd ) ) { throw new ConfigurationException ( ""Failed to insert module on "" + config . getAgentHostname ( ) ) ; } else { Thread . sleep ( 5000 ) ; } CloudstackPlugin cSp = new CloudstackPlugin ( c ) ; cSp . ovsUploadSshKey ( config . getAgentSshKeyFileName ( ) , FileUtils . readFileToString ( getSystemVMKeyFile ( key ) ) ) ; cSp . dom0CheckStorageHealthCheck ( config . getAgentScriptsDir ( ) , config . getAgentCheckStorageScript ( ) , config . getCsHostGuid ( ) , config . getAgentStorageCheckTimeout ( ) , config . getAgentStorageCheckInterval ( ) ) ; } catch ( Exception es ) { LOGGER . error ( ""Unexpected exception "" , es ) ; String msg = ""Unable to install module in agent"" ; throw new CloudRuntimeException ( msg ) ; } return true ; } +" +580,"public byte [ ] loadRenderedOutput ( ReportRequest request ) { try { File outputFile = getReportOutputFile ( request ) ; if ( outputFile . exists ( ) ) { return ReportUtil . readByteArrayFromFile ( outputFile ) ; } } catch ( Exception e ) { log . warn ( ""Failed to load Rendered Output from disk for request "" + request + "" due to "" + e . getMessage ( ) ) ; } return null ; } +","public byte [ ] loadRenderedOutput ( ReportRequest request ) { log . debug ( ""Loading Rendered Output for ReportRequest"" ) ; try { File outputFile = getReportOutputFile ( request ) ; if ( outputFile . exists ( ) ) { return ReportUtil . readByteArrayFromFile ( outputFile ) ; } } catch ( Exception e ) { log . warn ( ""Failed to load Rendered Output from disk for request "" + request + "" due to "" + e . getMessage ( ) ) ; } return null ; } +" +581,"public byte [ ] loadRenderedOutput ( ReportRequest request ) { log . debug ( ""Loading Rendered Output for ReportRequest"" ) ; try { File outputFile = getReportOutputFile ( request ) ; if ( outputFile . exists ( ) ) { return ReportUtil . readByteArrayFromFile ( outputFile ) ; } } catch ( Exception e ) { } return null ; } +","public byte [ ] loadRenderedOutput ( ReportRequest request ) { log . debug ( ""Loading Rendered Output for ReportRequest"" ) ; try { File outputFile = getReportOutputFile ( request ) ; if ( outputFile . exists ( ) ) { return ReportUtil . readByteArrayFromFile ( outputFile ) ; } } catch ( Exception e ) { log . warn ( ""Failed to load Rendered Output from disk for request "" + request + "" due to "" + e . getMessage ( ) ) ; } return null ; } +" +582,"public void deletePortletFileEntry ( long fileEntryId ) throws PortalException { try { LocalRepository localRepository = _repositoryProvider . getFileEntryLocalRepository ( fileEntryId ) ; if ( _isAttachment ( localRepository . getFileEntry ( fileEntryId ) ) ) { _run ( FileEntry . class , ( ) -> { localRepository . deleteFileEntry ( fileEntryId ) ; return null ; } ) ; } else { _run ( ( ) -> { localRepository . deleteFileEntry ( fileEntryId ) ; return null ; } ) ; } } catch ( NoSuchFileEntryException noSuchFileEntryException ) { if ( _log . isWarnEnabled ( ) ) { } } } +","public void deletePortletFileEntry ( long fileEntryId ) throws PortalException { try { LocalRepository localRepository = _repositoryProvider . getFileEntryLocalRepository ( fileEntryId ) ; if ( _isAttachment ( localRepository . getFileEntry ( fileEntryId ) ) ) { _run ( FileEntry . class , ( ) -> { localRepository . deleteFileEntry ( fileEntryId ) ; return null ; } ) ; } else { _run ( ( ) -> { localRepository . deleteFileEntry ( fileEntryId ) ; return null ; } ) ; } } catch ( NoSuchFileEntryException noSuchFileEntryException ) { if ( _log . isWarnEnabled ( ) ) { _log . warn ( noSuchFileEntryException , noSuchFileEntryException ) ; } } } +" +583,"public void logError ( Throwable e ) { ILogger logger = getLogger ( ) ; if ( e instanceof PartitionStateVersionMismatchException ) { if ( logger . isFineEnabled ( ) ) { logger . fine ( e . getMessage ( ) , e ) ; } else { } return ; } if ( ! nodeStartCompleted && e instanceof IllegalStateException ) { logger . warning ( e . getMessage ( ) ) ; if ( logger . isFineEnabled ( ) ) { logger . fine ( e ) ; } return ; } super . logError ( e ) ; } +","public void logError ( Throwable e ) { ILogger logger = getLogger ( ) ; if ( e instanceof PartitionStateVersionMismatchException ) { if ( logger . isFineEnabled ( ) ) { logger . fine ( e . getMessage ( ) , e ) ; } else { logger . info ( e . getMessage ( ) ) ; } return ; } if ( ! nodeStartCompleted && e instanceof IllegalStateException ) { logger . warning ( e . getMessage ( ) ) ; if ( logger . isFineEnabled ( ) ) { logger . fine ( e ) ; } return ; } super . logError ( e ) ; } +" +584,"public void logError ( Throwable e ) { ILogger logger = getLogger ( ) ; if ( e instanceof PartitionStateVersionMismatchException ) { if ( logger . isFineEnabled ( ) ) { logger . fine ( e . getMessage ( ) , e ) ; } else { logger . info ( e . getMessage ( ) ) ; } return ; } if ( ! nodeStartCompleted && e instanceof IllegalStateException ) { if ( logger . isFineEnabled ( ) ) { logger . fine ( e ) ; } return ; } super . logError ( e ) ; } +","public void logError ( Throwable e ) { ILogger logger = getLogger ( ) ; if ( e instanceof PartitionStateVersionMismatchException ) { if ( logger . isFineEnabled ( ) ) { logger . fine ( e . getMessage ( ) , e ) ; } else { logger . info ( e . getMessage ( ) ) ; } return ; } if ( ! nodeStartCompleted && e instanceof IllegalStateException ) { logger . warning ( e . getMessage ( ) ) ; if ( logger . isFineEnabled ( ) ) { logger . fine ( e ) ; } return ; } super . logError ( e ) ; } +" +585,"protected void doPost ( final HttpServletRequest httpServletRequest , final HttpServletResponse httpServletResponse ) throws ServletException { try { if ( Math . random ( ) > 0.7d ) { httpServletResponse . setStatus ( 404 ) ; httpServletResponse . getWriter ( ) . println ( ""The chaos monkey strikes again!"" ) ; httpServletResponse . flushBuffer ( ) ; } else if ( Math . random ( ) < 0.3d ) { httpServletResponse . setStatus ( 401 ) ; httpServletResponse . getWriter ( ) . println ( ""The chaos monkey strikes again!"" ) ; httpServletResponse . flushBuffer ( ) ; } else { super . doPost ( httpServletRequest , httpServletResponse ) ; } } catch ( IOException ioe ) { } } +","protected void doPost ( final HttpServletRequest httpServletRequest , final HttpServletResponse httpServletResponse ) throws ServletException { try { if ( Math . random ( ) > 0.7d ) { httpServletResponse . setStatus ( 404 ) ; httpServletResponse . getWriter ( ) . println ( ""The chaos monkey strikes again!"" ) ; httpServletResponse . flushBuffer ( ) ; } else if ( Math . random ( ) < 0.3d ) { httpServletResponse . setStatus ( 401 ) ; httpServletResponse . getWriter ( ) . println ( ""The chaos monkey strikes again!"" ) ; httpServletResponse . flushBuffer ( ) ; } else { super . doPost ( httpServletRequest , httpServletResponse ) ; } } catch ( IOException ioe ) { log . debug ( ""Chaos Monkey ran into problem"" , ioe ) ; } } +" +586,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +587,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +588,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +589,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +590,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +591,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +592,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +593,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +594,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +595,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +596,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +597,"private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { } } +","private void printSummary ( long processedCosmicLines , long ignoredCosmicLines ) { NumberFormat formatter = NumberFormat . getInstance ( ) ; logger . info ( """" ) ; logger . info ( ""Summary"" ) ; logger . info ( ""======="" ) ; logger . info ( ""Processed "" + formatter . format ( processedCosmicLines ) + "" cosmic lines"" ) ; logger . info ( ""Serialized "" + formatter . format ( processedCosmicLines - ignoredCosmicLines ) + "" cosmic objects"" ) ; logger . info ( formatter . format ( ignoredCosmicLines ) + "" cosmic lines ignored: "" ) ; if ( invalidPositionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidPositionLines ) + "" lines by invalid position"" ) ; } if ( invalidSubstitutionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidSubstitutionLines ) + "" lines by invalid substitution CDS"" ) ; } if ( invalidInsertionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidInsertionLines ) + "" lines by invalid insertion CDS"" ) ; } if ( invalidDeletionLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDeletionLines ) + "" lines by invalid deletion CDS"" ) ; } if ( invalidDuplicationLines > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidDuplicationLines ) + "" lines because mutation CDS is a duplication"" ) ; } if ( invalidMutationCDSOtherReason > 0 ) { logger . info ( ""\t-"" + formatter . format ( invalidMutationCDSOtherReason ) + "" lines because mutation CDS is invalid for other reasons"" ) ; } } +" +598,"@ RequestMapping ( value = Constants . WHITELIST_DELETE , method = RequestMethod . DELETE ) public void deleteWhitelist ( @ PathVariable ( value = ""id"" ) String id ) { whitelistService . delete ( Long . valueOf ( id ) , GtasSecurityUtils . fetchLoggedInUserId ( ) ) ; } +","@ RequestMapping ( value = Constants . WHITELIST_DELETE , method = RequestMethod . DELETE ) public void deleteWhitelist ( @ PathVariable ( value = ""id"" ) String id ) { logger . info ( ""delete a whitelist."" ) ; whitelistService . delete ( Long . valueOf ( id ) , GtasSecurityUtils . fetchLoggedInUserId ( ) ) ; } +" +599,"public void onClick ( final AjaxRequestTarget target , final UserTO ignore ) { model . setObject ( restClient . read ( model . getObject ( ) . getKey ( ) ) ) ; altDefaultModal . header ( new Model < > ( getString ( ""auditHistory.title"" , new Model < > ( new AnyWrapper < > ( model . getObject ( ) ) ) ) ) ) ; altDefaultModal . show ( true ) ; } +","public void onClick ( final AjaxRequestTarget target , final UserTO ignore ) { model . setObject ( restClient . read ( model . getObject ( ) . getKey ( ) ) ) ; target . add ( altDefaultModal . setContent ( new AuditHistoryModal < UserTO > ( altDefaultModal , AuditElements . EventCategoryType . LOGIC , ""UserLogic"" , model . getObject ( ) , IdRepoEntitlement . USER_UPDATE , pageRef ) { private static final long serialVersionUID = 959378158400669867L ; @ Override protected void restore ( final String json , final AjaxRequestTarget target ) { UserTO original = model . getObject ( ) ; try { UserTO updated = MAPPER . readValue ( json , UserTO . class ) ; UserUR updateReq = AnyOperations . diff ( updated , original , false ) ; updateReq . setPassword ( null ) ; updateReq . setSecurityAnswer ( null ) ; ProvisioningResult < UserTO > result = restClient . update ( original . getETagValue ( ) , updateReq ) ; model . getObject ( ) . setLastChangeDate ( result . getEntity ( ) . getLastChangeDate ( ) ) ; SyncopeConsoleSession . get ( ) . success ( getString ( Constants . OPERATION_SUCCEEDED ) ) ; target . add ( container ) ; } catch ( Exception e ) { LOG . error ( ""While restoring user {}"" , model . getObject ( ) . getKey ( ) , e ) ; SyncopeConsoleSession . get ( ) . onException ( e ) ; } ( ( BasePage ) pageRef . getPage ( ) ) . getNotificationPanel ( ) . refresh ( target ) ; } } ) ) ; altDefaultModal . header ( new Model < > ( getString ( ""auditHistory.title"" , new Model < > ( new AnyWrapper < > ( model . getObject ( ) ) ) ) ) ) ; altDefaultModal . show ( true ) ; } +" +600,"public void onClick ( final AjaxRequestTarget target , final UserTO ignore ) { model . setObject ( restClient . read ( model . getObject ( ) . getKey ( ) ) ) ; target . add ( altDefaultModal . setContent ( new AuditHistoryModal < UserTO > ( altDefaultModal , AuditElements . EventCategoryType . LOGIC , ""UserLogic"" , model . getObject ( ) , IdRepoEntitlement . USER_UPDATE , pageRef ) { private static final long serialVersionUID = 959378158400669867L ; @ Override protected void restore ( final String json , final AjaxRequestTarget target ) { UserTO original = model . getObject ( ) ; try { UserTO updated = MAPPER . readValue ( json , UserTO . class ) ; UserUR updateReq = AnyOperations . diff ( updated , original , false ) ; updateReq . setPassword ( null ) ; updateReq . setSecurityAnswer ( null ) ; ProvisioningResult < UserTO > result = restClient . update ( original . getETagValue ( ) , updateReq ) ; model . getObject ( ) . setLastChangeDate ( result . getEntity ( ) . getLastChangeDate ( ) ) ; SyncopeConsoleSession . get ( ) . success ( getString ( Constants . OPERATION_SUCCEEDED ) ) ; target . add ( container ) ; } catch ( Exception e ) { SyncopeConsoleSession . get ( ) . onException ( e ) ; } ( ( BasePage ) pageRef . getPage ( ) ) . getNotificationPanel ( ) . refresh ( target ) ; } } ) ) ; altDefaultModal . header ( new Model < > ( getString ( ""auditHistory.title"" , new Model < > ( new AnyWrapper < > ( model . getObject ( ) ) ) ) ) ) ; altDefaultModal . show ( true ) ; } +","public void onClick ( final AjaxRequestTarget target , final UserTO ignore ) { model . setObject ( restClient . read ( model . getObject ( ) . getKey ( ) ) ) ; target . add ( altDefaultModal . setContent ( new AuditHistoryModal < UserTO > ( altDefaultModal , AuditElements . EventCategoryType . LOGIC , ""UserLogic"" , model . getObject ( ) , IdRepoEntitlement . USER_UPDATE , pageRef ) { private static final long serialVersionUID = 959378158400669867L ; @ Override protected void restore ( final String json , final AjaxRequestTarget target ) { UserTO original = model . getObject ( ) ; try { UserTO updated = MAPPER . readValue ( json , UserTO . class ) ; UserUR updateReq = AnyOperations . diff ( updated , original , false ) ; updateReq . setPassword ( null ) ; updateReq . setSecurityAnswer ( null ) ; ProvisioningResult < UserTO > result = restClient . update ( original . getETagValue ( ) , updateReq ) ; model . getObject ( ) . setLastChangeDate ( result . getEntity ( ) . getLastChangeDate ( ) ) ; SyncopeConsoleSession . get ( ) . success ( getString ( Constants . OPERATION_SUCCEEDED ) ) ; target . add ( container ) ; } catch ( Exception e ) { LOG . error ( ""While restoring user {}"" , model . getObject ( ) . getKey ( ) , e ) ; SyncopeConsoleSession . get ( ) . onException ( e ) ; } ( ( BasePage ) pageRef . getPage ( ) ) . getNotificationPanel ( ) . refresh ( target ) ; } } ) ) ; altDefaultModal . header ( new Model < > ( getString ( ""auditHistory.title"" , new Model < > ( new AnyWrapper < > ( model . getObject ( ) ) ) ) ) ) ; altDefaultModal . show ( true ) ; } +" +601,"private void GetPercentageFromErrorStream ( String input ) { Matcher matcher ; try { matcher = CACHE . get ( PATTERN_FFMPEG ) . matcher ( input ) ; if ( matcher . find ( ) ) { String dauer = matcher . group ( ) . trim ( ) ; String [ ] hms = dauer . split ( "":"" ) ; totalSecs = Integer . parseInt ( hms [ 0 ] ) * 3600 + Integer . parseInt ( hms [ 1 ] ) * 60 + Double . parseDouble ( hms [ 2 ] ) ; } matcher = CACHE . get ( PATTERN_SIZE ) . matcher ( input ) ; if ( matcher . find ( ) ) { String s = matcher . group ( ) . trim ( ) ; if ( ! s . isEmpty ( ) ) { try { final long aktSize = Integer . parseInt ( StringUtils . replace ( s , ""kB"" , """" ) ) ; mVFilmSize . setAktSize ( aktSize * 1_000 ) ; long akt = start . startZeit . diffInSekunden ( ) ; if ( oldSecs < akt - 5 ) { start . bandbreite = ( aktSize - oldSize ) * 1_000 / ( akt - oldSecs ) ; oldSecs = akt ; oldSize = aktSize ; } } catch ( NumberFormatException ignored ) { } } } matcher = CACHE . get ( PATTERN_TIME ) . matcher ( input ) ; if ( totalSecs > 0 && matcher . find ( ) ) { String zeit = matcher . group ( ) ; if ( zeit . contains ( "":"" ) ) { String [ ] hms = zeit . split ( "":"" ) ; final double aktSecs = Integer . parseInt ( hms [ 0 ] ) * 3600 + Integer . parseInt ( hms [ 1 ] ) * 60 + Double . parseDouble ( hms [ 2 ] ) ; double d = aktSecs / totalSecs * 100 ; meldenDouble ( d ) ; } else { double aktSecs = Double . parseDouble ( zeit ) ; double d = aktSecs / totalSecs * 100 ; meldenDouble ( d ) ; } } } catch ( Exception ex ) { MessageBus . getMessageBus ( ) . publishAsync ( new DownloadProgressChangedEvent ( ) ) ; } } +","private void GetPercentageFromErrorStream ( String input ) { Matcher matcher ; try { matcher = CACHE . get ( PATTERN_FFMPEG ) . matcher ( input ) ; if ( matcher . find ( ) ) { String dauer = matcher . group ( ) . trim ( ) ; String [ ] hms = dauer . split ( "":"" ) ; totalSecs = Integer . parseInt ( hms [ 0 ] ) * 3600 + Integer . parseInt ( hms [ 1 ] ) * 60 + Double . parseDouble ( hms [ 2 ] ) ; } matcher = CACHE . get ( PATTERN_SIZE ) . matcher ( input ) ; if ( matcher . find ( ) ) { String s = matcher . group ( ) . trim ( ) ; if ( ! s . isEmpty ( ) ) { try { final long aktSize = Integer . parseInt ( StringUtils . replace ( s , ""kB"" , """" ) ) ; mVFilmSize . setAktSize ( aktSize * 1_000 ) ; long akt = start . startZeit . diffInSekunden ( ) ; if ( oldSecs < akt - 5 ) { start . bandbreite = ( aktSize - oldSize ) * 1_000 / ( akt - oldSecs ) ; oldSecs = akt ; oldSize = aktSize ; } } catch ( NumberFormatException ignored ) { } } } matcher = CACHE . get ( PATTERN_TIME ) . matcher ( input ) ; if ( totalSecs > 0 && matcher . find ( ) ) { String zeit = matcher . group ( ) ; if ( zeit . contains ( "":"" ) ) { String [ ] hms = zeit . split ( "":"" ) ; final double aktSecs = Integer . parseInt ( hms [ 0 ] ) * 3600 + Integer . parseInt ( hms [ 1 ] ) * 60 + Double . parseDouble ( hms [ 2 ] ) ; double d = aktSecs / totalSecs * 100 ; meldenDouble ( d ) ; } else { double aktSecs = Double . parseDouble ( zeit ) ; double d = aktSecs / totalSecs * 100 ; meldenDouble ( d ) ; } } } catch ( Exception ex ) { MessageBus . getMessageBus ( ) . publishAsync ( new DownloadProgressChangedEvent ( ) ) ; logger . error ( ""GetPercentageFromErrorStream(): {}"" , input ) ; } } +" +602,"public TSStatus executeNonQueryPlan ( PhysicalPlan plan ) { TSStatus result ; long startTime = Timer . Statistic . META_GROUP_MEMBER_EXECUTE_NON_QUERY . getOperationStartTime ( ) ; if ( PartitionUtils . isGlobalMetaPlan ( plan ) ) { result = processNonPartitionedMetaPlan ( plan ) ; } else { logger . warn ( ""receive a plan {} could not be processed in local"" , plan ) ; result = StatusUtils . UNSUPPORTED_OPERATION ; } Timer . Statistic . META_GROUP_MEMBER_EXECUTE_NON_QUERY . calOperationCostTimeFromStart ( startTime ) ; return result ; } +","public TSStatus executeNonQueryPlan ( PhysicalPlan plan ) { TSStatus result ; long startTime = Timer . Statistic . META_GROUP_MEMBER_EXECUTE_NON_QUERY . getOperationStartTime ( ) ; if ( PartitionUtils . isGlobalMetaPlan ( plan ) ) { logger . debug ( ""receive a global meta plan {}"" , plan ) ; result = processNonPartitionedMetaPlan ( plan ) ; } else { logger . warn ( ""receive a plan {} could not be processed in local"" , plan ) ; result = StatusUtils . UNSUPPORTED_OPERATION ; } Timer . Statistic . META_GROUP_MEMBER_EXECUTE_NON_QUERY . calOperationCostTimeFromStart ( startTime ) ; return result ; } +" +603,"public TSStatus executeNonQueryPlan ( PhysicalPlan plan ) { TSStatus result ; long startTime = Timer . Statistic . META_GROUP_MEMBER_EXECUTE_NON_QUERY . getOperationStartTime ( ) ; if ( PartitionUtils . isGlobalMetaPlan ( plan ) ) { logger . debug ( ""receive a global meta plan {}"" , plan ) ; result = processNonPartitionedMetaPlan ( plan ) ; } else { result = StatusUtils . UNSUPPORTED_OPERATION ; } Timer . Statistic . META_GROUP_MEMBER_EXECUTE_NON_QUERY . calOperationCostTimeFromStart ( startTime ) ; return result ; } +","public TSStatus executeNonQueryPlan ( PhysicalPlan plan ) { TSStatus result ; long startTime = Timer . Statistic . META_GROUP_MEMBER_EXECUTE_NON_QUERY . getOperationStartTime ( ) ; if ( PartitionUtils . isGlobalMetaPlan ( plan ) ) { logger . debug ( ""receive a global meta plan {}"" , plan ) ; result = processNonPartitionedMetaPlan ( plan ) ; } else { logger . warn ( ""receive a plan {} could not be processed in local"" , plan ) ; result = StatusUtils . UNSUPPORTED_OPERATION ; } Timer . Statistic . META_GROUP_MEMBER_EXECUTE_NON_QUERY . calOperationCostTimeFromStart ( startTime ) ; return result ; } +" +604,"public void onMessage ( final Message message ) { try { configureAuthentication ( ) ; updateService . updateDocumentElement ( ( DocumentElement ) ( ( ObjectMessage ) message ) . getObject ( ) ) ; } catch ( final JMSException e ) { } finally { clearAuthentication ( ) ; } } +","public void onMessage ( final Message message ) { try { configureAuthentication ( ) ; updateService . updateDocumentElement ( ( DocumentElement ) ( ( ObjectMessage ) message ) . getObject ( ) ) ; } catch ( final JMSException e ) { LOGGER . warn ( ""Error loading riksdagen document"" , e ) ; } finally { clearAuthentication ( ) ; } } +" +605,"private Map < String , List < String > > validationMessages ( Set < ConstraintViolation < ? > > failures ) { Map < String , List < String > > mp = new HashMap < > ( ) ; for ( ConstraintViolation < ? > failure : failures ) { String property = failure . getPropertyPath ( ) . toString ( ) ; if ( mp . containsKey ( property ) ) { mp . get ( failure . getPropertyPath ( ) . toString ( ) ) . add ( failure . getMessage ( ) ) ; } else { List < String > list = new ArrayList < > ( ) ; list . add ( failure . getMessage ( ) ) ; mp . put ( property , list ) ; } } return mp ; } +","private Map < String , List < String > > validationMessages ( Set < ConstraintViolation < ? > > failures ) { Map < String , List < String > > mp = new HashMap < > ( ) ; for ( ConstraintViolation < ? > failure : failures ) { logger . debug ( failure . getPropertyPath ( ) . toString ( ) + "": "" + failure . getMessage ( ) ) ; String property = failure . getPropertyPath ( ) . toString ( ) ; if ( mp . containsKey ( property ) ) { mp . get ( failure . getPropertyPath ( ) . toString ( ) ) . add ( failure . getMessage ( ) ) ; } else { List < String > list = new ArrayList < > ( ) ; list . add ( failure . getMessage ( ) ) ; mp . put ( property , list ) ; } } return mp ; } +" +606,"protected boolean generate ( ByteBufferPool . Lease lease ) { int dataRemaining = getDataBytesRemaining ( ) ; int sessionSendWindow = getSendWindow ( ) ; int streamSendWindow = stream . updateSendWindow ( 0 ) ; int window = Math . min ( streamSendWindow , sessionSendWindow ) ; if ( window <= 0 && dataRemaining > 0 ) return false ; int length = Math . min ( dataRemaining , window ) ; DataFrame dataFrame = ( DataFrame ) frame ; int frameBytes = generator . data ( lease , dataFrame , length ) ; this . frameBytes += frameBytes ; this . frameRemaining += frameBytes ; int dataBytes = frameBytes - Frame . HEADER_LENGTH ; this . dataBytes += dataBytes ; this . dataRemaining -= dataBytes ; if ( LOG . isDebugEnabled ( ) ) flowControl . onDataSending ( stream , dataBytes ) ; stream . updateClose ( dataFrame . isEndStream ( ) , CloseState . Event . BEFORE_SEND ) ; return true ; } +","protected boolean generate ( ByteBufferPool . Lease lease ) { int dataRemaining = getDataBytesRemaining ( ) ; int sessionSendWindow = getSendWindow ( ) ; int streamSendWindow = stream . updateSendWindow ( 0 ) ; int window = Math . min ( streamSendWindow , sessionSendWindow ) ; if ( window <= 0 && dataRemaining > 0 ) return false ; int length = Math . min ( dataRemaining , window ) ; DataFrame dataFrame = ( DataFrame ) frame ; int frameBytes = generator . data ( lease , dataFrame , length ) ; this . frameBytes += frameBytes ; this . frameRemaining += frameBytes ; int dataBytes = frameBytes - Frame . HEADER_LENGTH ; this . dataBytes += dataBytes ; this . dataRemaining -= dataBytes ; if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Generated {}, length/window/data={}/{}/{}"" , dataFrame , dataBytes , window , dataRemaining ) ; flowControl . onDataSending ( stream , dataBytes ) ; stream . updateClose ( dataFrame . isEndStream ( ) , CloseState . Event . BEFORE_SEND ) ; return true ; } +" +607,"private List < GluuSimplePerson > loadSourceServerEntries ( CacheRefreshConfiguration cacheRefreshConfiguration , LdapServerConnection [ ] sourceServerConnections ) throws SearchException { Filter customFilter = cacheRefreshService . createFilter ( cacheRefreshConfiguration . getCustomLdapFilter ( ) ) ; String [ ] keyAttributes = getCompoundKeyAttributes ( cacheRefreshConfiguration ) ; String [ ] keyAttributesWithoutValues = getCompoundKeyAttributesWithoutValues ( cacheRefreshConfiguration ) ; String [ ] keyObjectClasses = getCompoundKeyObjectClasses ( cacheRefreshConfiguration ) ; String [ ] sourceAttributes = getSourceAttributes ( cacheRefreshConfiguration ) ; String [ ] twoLettersArray = createTwoLettersArray ( ) ; String [ ] returnAttributes = ArrayHelper . arrayMerge ( keyAttributesWithoutValues , sourceAttributes ) ; Set < String > addedDns = new HashSet < String > ( ) ; List < GluuSimplePerson > sourcePersons = new ArrayList < GluuSimplePerson > ( ) ; for ( LdapServerConnection sourceServerConnection : sourceServerConnections ) { String sourceServerName = sourceServerConnection . getSourceServerName ( ) ; PersistenceEntryManager sourcePersistenceEntryManager = sourceServerConnection . getPersistenceEntryManager ( ) ; String [ ] baseDns = sourceServerConnection . getBaseDns ( ) ; for ( String keyAttributeStart : twoLettersArray ) { Filter filter = cacheRefreshService . createFilter ( keyAttributes , keyObjectClasses , keyAttributeStart , customFilter ) ; if ( log . isDebugEnabled ( ) ) { } for ( String baseDn : baseDns ) { List < GluuSimplePerson > currentSourcePersons = sourcePersistenceEntryManager . findEntries ( baseDn , GluuSimplePerson . class , filter , SearchScope . SUB , returnAttributes , null , 0 , 0 , cacheRefreshConfiguration . getLdapSearchSizeLimit ( ) ) ; for ( GluuSimplePerson currentSourcePerson : currentSourcePersons ) { currentSourcePerson . setSourceServerName ( sourceServerName ) ; String currentSourcePersonDn = currentSourcePerson . getDn ( ) . toLowerCase ( ) ; if ( ! addedDns . contains ( currentSourcePersonDn ) ) { sourcePersons . add ( currentSourcePerson ) ; addedDns . add ( currentSourcePersonDn ) ; } } } } } return sourcePersons ; } +","private List < GluuSimplePerson > loadSourceServerEntries ( CacheRefreshConfiguration cacheRefreshConfiguration , LdapServerConnection [ ] sourceServerConnections ) throws SearchException { Filter customFilter = cacheRefreshService . createFilter ( cacheRefreshConfiguration . getCustomLdapFilter ( ) ) ; String [ ] keyAttributes = getCompoundKeyAttributes ( cacheRefreshConfiguration ) ; String [ ] keyAttributesWithoutValues = getCompoundKeyAttributesWithoutValues ( cacheRefreshConfiguration ) ; String [ ] keyObjectClasses = getCompoundKeyObjectClasses ( cacheRefreshConfiguration ) ; String [ ] sourceAttributes = getSourceAttributes ( cacheRefreshConfiguration ) ; String [ ] twoLettersArray = createTwoLettersArray ( ) ; String [ ] returnAttributes = ArrayHelper . arrayMerge ( keyAttributesWithoutValues , sourceAttributes ) ; Set < String > addedDns = new HashSet < String > ( ) ; List < GluuSimplePerson > sourcePersons = new ArrayList < GluuSimplePerson > ( ) ; for ( LdapServerConnection sourceServerConnection : sourceServerConnections ) { String sourceServerName = sourceServerConnection . getSourceServerName ( ) ; PersistenceEntryManager sourcePersistenceEntryManager = sourceServerConnection . getPersistenceEntryManager ( ) ; String [ ] baseDns = sourceServerConnection . getBaseDns ( ) ; for ( String keyAttributeStart : twoLettersArray ) { Filter filter = cacheRefreshService . createFilter ( keyAttributes , keyObjectClasses , keyAttributeStart , customFilter ) ; if ( log . isDebugEnabled ( ) ) { log . trace ( ""Using next filter to load entris from source server: {}"" , filter ) ; } for ( String baseDn : baseDns ) { List < GluuSimplePerson > currentSourcePersons = sourcePersistenceEntryManager . findEntries ( baseDn , GluuSimplePerson . class , filter , SearchScope . SUB , returnAttributes , null , 0 , 0 , cacheRefreshConfiguration . getLdapSearchSizeLimit ( ) ) ; for ( GluuSimplePerson currentSourcePerson : currentSourcePersons ) { currentSourcePerson . setSourceServerName ( sourceServerName ) ; String currentSourcePersonDn = currentSourcePerson . getDn ( ) . toLowerCase ( ) ; if ( ! addedDns . contains ( currentSourcePersonDn ) ) { sourcePersons . add ( currentSourcePerson ) ; addedDns . add ( currentSourcePersonDn ) ; } } } } } return sourcePersons ; } +" +608,"@ SuppressFBWarnings ( ""BC_UNCONFIRMED_CAST_OF_RETURN_VALUE"" ) protected NetconfServerSession getSession ( NetconfServerSessionListener sessionListener , Channel channel , NetconfHelloMessage message ) { Optional < NetconfHelloMessageAdditionalHeader > additionalHeader = message . getAdditionalHeader ( ) ; NetconfHelloMessageAdditionalHeader parsedHeader ; if ( additionalHeader . isPresent ( ) ) { parsedHeader = additionalHeader . get ( ) ; } else { parsedHeader = new NetconfHelloMessageAdditionalHeader ( UNKNOWN , getHostName ( channel . localAddress ( ) ) . getValue ( ) , getHostName ( channel . localAddress ( ) ) . getKey ( ) , ""tcp"" , ""client"" ) ; } return new NetconfServerSession ( sessionListener , channel , getSessionPreferences ( ) . getSessionId ( ) , parsedHeader ) ; } +","@ SuppressFBWarnings ( ""BC_UNCONFIRMED_CAST_OF_RETURN_VALUE"" ) protected NetconfServerSession getSession ( NetconfServerSessionListener sessionListener , Channel channel , NetconfHelloMessage message ) { Optional < NetconfHelloMessageAdditionalHeader > additionalHeader = message . getAdditionalHeader ( ) ; NetconfHelloMessageAdditionalHeader parsedHeader ; if ( additionalHeader . isPresent ( ) ) { parsedHeader = additionalHeader . get ( ) ; } else { parsedHeader = new NetconfHelloMessageAdditionalHeader ( UNKNOWN , getHostName ( channel . localAddress ( ) ) . getValue ( ) , getHostName ( channel . localAddress ( ) ) . getKey ( ) , ""tcp"" , ""client"" ) ; } LOG . debug ( ""Additional header from hello parsed as {} from {}"" , parsedHeader , additionalHeader ) ; return new NetconfServerSession ( sessionListener , channel , getSessionPreferences ( ) . getSessionId ( ) , parsedHeader ) ; } +" +609,"public PublicKey getPublicKey ( ) { if ( publicKey != null ) { return publicKey ; } Path keyPath = securityConfig . getKeyLocation ( component ) ; if ( OzoneSecurityUtil . checkIfFileExist ( keyPath , securityConfig . getPublicKeyFileName ( ) ) ) { try { publicKey = keyCodec . readPublicKey ( ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException | IOException e ) { } } return publicKey ; } +","public PublicKey getPublicKey ( ) { if ( publicKey != null ) { return publicKey ; } Path keyPath = securityConfig . getKeyLocation ( component ) ; if ( OzoneSecurityUtil . checkIfFileExist ( keyPath , securityConfig . getPublicKeyFileName ( ) ) ) { try { publicKey = keyCodec . readPublicKey ( ) ; } catch ( InvalidKeySpecException | NoSuchAlgorithmException | IOException e ) { getLogger ( ) . error ( ""Error while getting public key."" , e ) ; } } return publicKey ; } +" +610,"public JSONArray getDDMFormFieldTypesJSONArray ( ) throws PortalException { List < DDMFormFieldType > availableDDMFormFieldTypes = _removeDDMFormFieldTypesOutOfScope ( _ddmFormFieldTypeServicesTracker . getDDMFormFieldTypes ( ) ) ; String serializedFormFieldTypes = serialize ( availableDDMFormFieldTypes ) ; JSONArray jsonArray = jsonFactory . createJSONArray ( serializedFormFieldTypes ) ; HttpServletRequest httpServletRequest = formAdminRequestHelper . getRequest ( ) ; HttpServletResponse httpServletResponse = PortalUtil . getHttpServletResponse ( renderResponse ) ; for ( int i = 0 ; i < jsonArray . length ( ) ; i ++ ) { DDMFormFieldType ddmFormFieldType = availableDDMFormFieldTypes . get ( i ) ; JSONObject jsonObject = jsonArray . getJSONObject ( i ) ; Class < ? > ddmFormFieldTypeSettings = ddmFormFieldType . getDDMFormFieldTypeSettings ( ) ; DDMForm ddmForm = DDMFormFactory . create ( ddmFormFieldTypeSettings ) ; DDMFormLayout ddmFormLayout = DDMFormLayoutFactory . create ( ddmFormFieldTypeSettings ) ; DDMFormRenderingContext ddmFormRenderingContext = new DDMFormRenderingContext ( ) ; ddmFormRenderingContext . setHttpServletRequest ( httpServletRequest ) ; ddmFormRenderingContext . setHttpServletResponse ( httpServletResponse ) ; ddmFormRenderingContext . setContainerId ( ""settings"" ) ; ddmFormRenderingContext . setLocale ( LocaleUtil . fromLanguageId ( getDefaultLanguageId ( ) ) ) ; ddmFormRenderingContext . setPortletNamespace ( renderResponse . getNamespace ( ) ) ; ddmFormRenderingContext . setViewMode ( true ) ; try { Map < String , Object > ddmFormTemplateContext = _ddmFormTemplateContextFactory . create ( ddmForm , ddmFormLayout , ddmFormRenderingContext ) ; jsonObject . put ( ""settingsContext"" , jsonFactory . createJSONObject ( jsonFactory . looseSerializeDeep ( ddmFormTemplateContext ) ) ) ; ThemeDisplay themeDisplay = ( ThemeDisplay ) httpServletRequest . getAttribute ( WebKeys . THEME_DISPLAY ) ; if ( ( themeDisplay != null ) && StringUtil . equals ( ddmFormFieldType . getName ( ) , ""rich_text"" ) ) { EditorConfiguration editorConfiguration = EditorConfigurationFactoryUtil . getEditorConfiguration ( StringPool . BLANK , ddmFormFieldType . getName ( ) , ""ckeditor_classic"" , new HashMap < String , Object > ( ) , themeDisplay , RequestBackedPortletURLFactoryUtil . create ( httpServletRequest ) ) ; Map < String , Object > editorConfigurationData = editorConfiguration . getData ( ) ; jsonObject . put ( ""editorConfig"" , editorConfigurationData . get ( ""editorConfig"" ) ) ; } } catch ( PortalException portalException ) { } } return jsonArray ; } +","public JSONArray getDDMFormFieldTypesJSONArray ( ) throws PortalException { List < DDMFormFieldType > availableDDMFormFieldTypes = _removeDDMFormFieldTypesOutOfScope ( _ddmFormFieldTypeServicesTracker . getDDMFormFieldTypes ( ) ) ; String serializedFormFieldTypes = serialize ( availableDDMFormFieldTypes ) ; JSONArray jsonArray = jsonFactory . createJSONArray ( serializedFormFieldTypes ) ; HttpServletRequest httpServletRequest = formAdminRequestHelper . getRequest ( ) ; HttpServletResponse httpServletResponse = PortalUtil . getHttpServletResponse ( renderResponse ) ; for ( int i = 0 ; i < jsonArray . length ( ) ; i ++ ) { DDMFormFieldType ddmFormFieldType = availableDDMFormFieldTypes . get ( i ) ; JSONObject jsonObject = jsonArray . getJSONObject ( i ) ; Class < ? > ddmFormFieldTypeSettings = ddmFormFieldType . getDDMFormFieldTypeSettings ( ) ; DDMForm ddmForm = DDMFormFactory . create ( ddmFormFieldTypeSettings ) ; DDMFormLayout ddmFormLayout = DDMFormLayoutFactory . create ( ddmFormFieldTypeSettings ) ; DDMFormRenderingContext ddmFormRenderingContext = new DDMFormRenderingContext ( ) ; ddmFormRenderingContext . setHttpServletRequest ( httpServletRequest ) ; ddmFormRenderingContext . setHttpServletResponse ( httpServletResponse ) ; ddmFormRenderingContext . setContainerId ( ""settings"" ) ; ddmFormRenderingContext . setLocale ( LocaleUtil . fromLanguageId ( getDefaultLanguageId ( ) ) ) ; ddmFormRenderingContext . setPortletNamespace ( renderResponse . getNamespace ( ) ) ; ddmFormRenderingContext . setViewMode ( true ) ; try { Map < String , Object > ddmFormTemplateContext = _ddmFormTemplateContextFactory . create ( ddmForm , ddmFormLayout , ddmFormRenderingContext ) ; jsonObject . put ( ""settingsContext"" , jsonFactory . createJSONObject ( jsonFactory . looseSerializeDeep ( ddmFormTemplateContext ) ) ) ; ThemeDisplay themeDisplay = ( ThemeDisplay ) httpServletRequest . getAttribute ( WebKeys . THEME_DISPLAY ) ; if ( ( themeDisplay != null ) && StringUtil . equals ( ddmFormFieldType . getName ( ) , ""rich_text"" ) ) { EditorConfiguration editorConfiguration = EditorConfigurationFactoryUtil . getEditorConfiguration ( StringPool . BLANK , ddmFormFieldType . getName ( ) , ""ckeditor_classic"" , new HashMap < String , Object > ( ) , themeDisplay , RequestBackedPortletURLFactoryUtil . create ( httpServletRequest ) ) ; Map < String , Object > editorConfigurationData = editorConfiguration . getData ( ) ; jsonObject . put ( ""editorConfig"" , editorConfigurationData . get ( ""editorConfig"" ) ) ; } } catch ( PortalException portalException ) { _log . error ( portalException , portalException ) ; } } return jsonArray ; } +" +611,"public String getLimitClause ( ) { String ret = limit == 0 ? """" : concat ( LIMIT , limit . toString ( ) , OFFSET , offset . toString ( ) ) ; return ret ; } +","public String getLimitClause ( ) { String ret = limit == 0 ? """" : concat ( LIMIT , limit . toString ( ) , OFFSET , offset . toString ( ) ) ; LOGGER . debug ( concat ( ""getLimitClause() returns: "" , ret ) ) ; return ret ; } +" +612,"public Detecting . Detection detect ( ByteBuffer buffer ) { if ( LOG . isDebugEnabled ( ) ) boolean needMoreBytes = true ; for ( Detecting detectingConnectionFactory : _detectingConnectionFactories ) { Detecting . Detection detection = detectingConnectionFactory . detect ( buffer ) ; if ( detection == Detecting . Detection . RECOGNIZED ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} recognized bytes using {}"" , getProtocol ( ) , detection ) ; return Detecting . Detection . RECOGNIZED ; } needMoreBytes &= detection == Detection . NEED_MORE_BYTES ; } if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} {}"" , getProtocol ( ) , ( needMoreBytes ? ""requires more bytes"" : ""failed to recognize bytes"" ) ) ; return needMoreBytes ? Detection . NEED_MORE_BYTES : Detection . NOT_RECOGNIZED ; } +","public Detecting . Detection detect ( ByteBuffer buffer ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} detecting from buffer {} using {}"" , getProtocol ( ) , BufferUtil . toHexString ( buffer ) , _detectingConnectionFactories ) ; boolean needMoreBytes = true ; for ( Detecting detectingConnectionFactory : _detectingConnectionFactories ) { Detecting . Detection detection = detectingConnectionFactory . detect ( buffer ) ; if ( detection == Detecting . Detection . RECOGNIZED ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} recognized bytes using {}"" , getProtocol ( ) , detection ) ; return Detecting . Detection . RECOGNIZED ; } needMoreBytes &= detection == Detection . NEED_MORE_BYTES ; } if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} {}"" , getProtocol ( ) , ( needMoreBytes ? ""requires more bytes"" : ""failed to recognize bytes"" ) ) ; return needMoreBytes ? Detection . NEED_MORE_BYTES : Detection . NOT_RECOGNIZED ; } +" +613,"public Detecting . Detection detect ( ByteBuffer buffer ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} detecting from buffer {} using {}"" , getProtocol ( ) , BufferUtil . toHexString ( buffer ) , _detectingConnectionFactories ) ; boolean needMoreBytes = true ; for ( Detecting detectingConnectionFactory : _detectingConnectionFactories ) { Detecting . Detection detection = detectingConnectionFactory . detect ( buffer ) ; if ( detection == Detecting . Detection . RECOGNIZED ) { if ( LOG . isDebugEnabled ( ) ) return Detecting . Detection . RECOGNIZED ; } needMoreBytes &= detection == Detection . NEED_MORE_BYTES ; } if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} {}"" , getProtocol ( ) , ( needMoreBytes ? ""requires more bytes"" : ""failed to recognize bytes"" ) ) ; return needMoreBytes ? Detection . NEED_MORE_BYTES : Detection . NOT_RECOGNIZED ; } +","public Detecting . Detection detect ( ByteBuffer buffer ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} detecting from buffer {} using {}"" , getProtocol ( ) , BufferUtil . toHexString ( buffer ) , _detectingConnectionFactories ) ; boolean needMoreBytes = true ; for ( Detecting detectingConnectionFactory : _detectingConnectionFactories ) { Detecting . Detection detection = detectingConnectionFactory . detect ( buffer ) ; if ( detection == Detecting . Detection . RECOGNIZED ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} recognized bytes using {}"" , getProtocol ( ) , detection ) ; return Detecting . Detection . RECOGNIZED ; } needMoreBytes &= detection == Detection . NEED_MORE_BYTES ; } if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} {}"" , getProtocol ( ) , ( needMoreBytes ? ""requires more bytes"" : ""failed to recognize bytes"" ) ) ; return needMoreBytes ? Detection . NEED_MORE_BYTES : Detection . NOT_RECOGNIZED ; } +" +614,"public Detecting . Detection detect ( ByteBuffer buffer ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} detecting from buffer {} using {}"" , getProtocol ( ) , BufferUtil . toHexString ( buffer ) , _detectingConnectionFactories ) ; boolean needMoreBytes = true ; for ( Detecting detectingConnectionFactory : _detectingConnectionFactories ) { Detecting . Detection detection = detectingConnectionFactory . detect ( buffer ) ; if ( detection == Detecting . Detection . RECOGNIZED ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} recognized bytes using {}"" , getProtocol ( ) , detection ) ; return Detecting . Detection . RECOGNIZED ; } needMoreBytes &= detection == Detection . NEED_MORE_BYTES ; } if ( LOG . isDebugEnabled ( ) ) return needMoreBytes ? Detection . NEED_MORE_BYTES : Detection . NOT_RECOGNIZED ; } +","public Detecting . Detection detect ( ByteBuffer buffer ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} detecting from buffer {} using {}"" , getProtocol ( ) , BufferUtil . toHexString ( buffer ) , _detectingConnectionFactories ) ; boolean needMoreBytes = true ; for ( Detecting detectingConnectionFactory : _detectingConnectionFactories ) { Detecting . Detection detection = detectingConnectionFactory . detect ( buffer ) ; if ( detection == Detecting . Detection . RECOGNIZED ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} recognized bytes using {}"" , getProtocol ( ) , detection ) ; return Detecting . Detection . RECOGNIZED ; } needMoreBytes &= detection == Detection . NEED_MORE_BYTES ; } if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""Detector {} {}"" , getProtocol ( ) , ( needMoreBytes ? ""requires more bytes"" : ""failed to recognize bytes"" ) ) ; return needMoreBytes ? Detection . NEED_MORE_BYTES : Detection . NOT_RECOGNIZED ; } +" +615,"public void startTimedTask ( ) { } +","public void startTimedTask ( ) { executorService . scheduleWithFixedDelay ( ( ) -> { try { syncAll ( ) ; } catch ( Exception e ) { logger . error ( ""Sync failed"" , e ) ; } } , SyncConstant . SYNC_PROCESS_DELAY , SyncConstant . SYNC_PROCESS_PERIOD , TimeUnit . SECONDS ) ; } +" +616,"public void startTimedTask ( ) { executorService . scheduleWithFixedDelay ( ( ) -> { try { syncAll ( ) ; } catch ( Exception e ) { } } , SyncConstant . SYNC_PROCESS_DELAY , SyncConstant . SYNC_PROCESS_PERIOD , TimeUnit . SECONDS ) ; } +","public void startTimedTask ( ) { executorService . scheduleWithFixedDelay ( ( ) -> { try { syncAll ( ) ; } catch ( Exception e ) { logger . error ( ""Sync failed"" , e ) ; } } , SyncConstant . SYNC_PROCESS_DELAY , SyncConstant . SYNC_PROCESS_PERIOD , TimeUnit . SECONDS ) ; } +" +617,"public static void deleteOrganization ( long organizationId ) throws RemoteException { try { OrganizationServiceUtil . deleteOrganization ( organizationId ) ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } } +","public static void deleteOrganization ( long organizationId ) throws RemoteException { try { OrganizationServiceUtil . deleteOrganization ( organizationId ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } } +" +618,"public static String getSchema ( ) { Connection conn = null ; PreparedStatement pstmt = null ; ResultSet resultSet = null ; try { conn = dataSource . getConnection ( ) ; pstmt = conn . prepareStatement ( ""select current_schema()"" ) ; resultSet = pstmt . executeQuery ( ) ; while ( resultSet . next ( ) ) { if ( resultSet . isFirst ( ) ) { return resultSet . getString ( 1 ) ; } } } catch ( SQLException e ) { } finally { ConnectionUtils . releaseResource ( resultSet , pstmt , conn ) ; } return """" ; } +","public static String getSchema ( ) { Connection conn = null ; PreparedStatement pstmt = null ; ResultSet resultSet = null ; try { conn = dataSource . getConnection ( ) ; pstmt = conn . prepareStatement ( ""select current_schema()"" ) ; resultSet = pstmt . executeQuery ( ) ; while ( resultSet . next ( ) ) { if ( resultSet . isFirst ( ) ) { return resultSet . getString ( 1 ) ; } } } catch ( SQLException e ) { logger . error ( e . getMessage ( ) , e ) ; } finally { ConnectionUtils . releaseResource ( resultSet , pstmt , conn ) ; } return """" ; } +" +619,"public void onProcessUnstructuredSSRequest ( ProcessUnstructuredSSRequest procUnstrReqInd ) { TestEvent te = TestEvent . createReceivedEvent ( EventType . ProcessUnstructuredSSRequestIndication , procUnstrReqInd , sequence ++ ) ; this . observerdEvents . add ( te ) ; } +","public void onProcessUnstructuredSSRequest ( ProcessUnstructuredSSRequest procUnstrReqInd ) { this . logger . debug ( ""onProcessUnstructuredSSRequest"" ) ; TestEvent te = TestEvent . createReceivedEvent ( EventType . ProcessUnstructuredSSRequestIndication , procUnstrReqInd , sequence ++ ) ; this . observerdEvents . add ( te ) ; } +" +620,"private boolean downloadBuild ( String fileName , URL downloadLink ) throws IOException { try ( ReadableByteChannel readableByteChannel = Channels . newChannel ( downloadLink . openStream ( ) ) ; FileOutputStream fos = new FileOutputStream ( fileName ) ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { Thread . interrupted ( ) ; } fos . getChannel ( ) . transferFrom ( readableByteChannel , 0 , Long . MAX_VALUE ) ; LOGGER . info ( ""Successfully Transferred..."" ) ; return false ; } catch ( ClosedByInterruptException ie1 ) { LOGGER . info ( ""Retrying...."" ) ; LOGGER . error ( ""Getting Error: "" + ie1 . getMessage ( ) , ie1 ) ; return true ; } } +","private boolean downloadBuild ( String fileName , URL downloadLink ) throws IOException { try ( ReadableByteChannel readableByteChannel = Channels . newChannel ( downloadLink . openStream ( ) ) ; FileOutputStream fos = new FileOutputStream ( fileName ) ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { LOGGER . debug ( String . format ( ""Current Thread (%s) is interrupted, clearing interruption."" , Thread . currentThread ( ) . getId ( ) ) ) ; Thread . interrupted ( ) ; } fos . getChannel ( ) . transferFrom ( readableByteChannel , 0 , Long . MAX_VALUE ) ; LOGGER . info ( ""Successfully Transferred..."" ) ; return false ; } catch ( ClosedByInterruptException ie1 ) { LOGGER . info ( ""Retrying...."" ) ; LOGGER . error ( ""Getting Error: "" + ie1 . getMessage ( ) , ie1 ) ; return true ; } } +" +621,"private boolean downloadBuild ( String fileName , URL downloadLink ) throws IOException { try ( ReadableByteChannel readableByteChannel = Channels . newChannel ( downloadLink . openStream ( ) ) ; FileOutputStream fos = new FileOutputStream ( fileName ) ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { LOGGER . debug ( String . format ( ""Current Thread (%s) is interrupted, clearing interruption."" , Thread . currentThread ( ) . getId ( ) ) ) ; Thread . interrupted ( ) ; } fos . getChannel ( ) . transferFrom ( readableByteChannel , 0 , Long . MAX_VALUE ) ; return false ; } catch ( ClosedByInterruptException ie1 ) { LOGGER . info ( ""Retrying...."" ) ; LOGGER . error ( ""Getting Error: "" + ie1 . getMessage ( ) , ie1 ) ; return true ; } } +","private boolean downloadBuild ( String fileName , URL downloadLink ) throws IOException { try ( ReadableByteChannel readableByteChannel = Channels . newChannel ( downloadLink . openStream ( ) ) ; FileOutputStream fos = new FileOutputStream ( fileName ) ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { LOGGER . debug ( String . format ( ""Current Thread (%s) is interrupted, clearing interruption."" , Thread . currentThread ( ) . getId ( ) ) ) ; Thread . interrupted ( ) ; } fos . getChannel ( ) . transferFrom ( readableByteChannel , 0 , Long . MAX_VALUE ) ; LOGGER . info ( ""Successfully Transferred..."" ) ; return false ; } catch ( ClosedByInterruptException ie1 ) { LOGGER . info ( ""Retrying...."" ) ; LOGGER . error ( ""Getting Error: "" + ie1 . getMessage ( ) , ie1 ) ; return true ; } } +" +622,"private boolean downloadBuild ( String fileName , URL downloadLink ) throws IOException { try ( ReadableByteChannel readableByteChannel = Channels . newChannel ( downloadLink . openStream ( ) ) ; FileOutputStream fos = new FileOutputStream ( fileName ) ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { LOGGER . debug ( String . format ( ""Current Thread (%s) is interrupted, clearing interruption."" , Thread . currentThread ( ) . getId ( ) ) ) ; Thread . interrupted ( ) ; } fos . getChannel ( ) . transferFrom ( readableByteChannel , 0 , Long . MAX_VALUE ) ; LOGGER . info ( ""Successfully Transferred..."" ) ; return false ; } catch ( ClosedByInterruptException ie1 ) { LOGGER . error ( ""Getting Error: "" + ie1 . getMessage ( ) , ie1 ) ; return true ; } } +","private boolean downloadBuild ( String fileName , URL downloadLink ) throws IOException { try ( ReadableByteChannel readableByteChannel = Channels . newChannel ( downloadLink . openStream ( ) ) ; FileOutputStream fos = new FileOutputStream ( fileName ) ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { LOGGER . debug ( String . format ( ""Current Thread (%s) is interrupted, clearing interruption."" , Thread . currentThread ( ) . getId ( ) ) ) ; Thread . interrupted ( ) ; } fos . getChannel ( ) . transferFrom ( readableByteChannel , 0 , Long . MAX_VALUE ) ; LOGGER . info ( ""Successfully Transferred..."" ) ; return false ; } catch ( ClosedByInterruptException ie1 ) { LOGGER . info ( ""Retrying...."" ) ; LOGGER . error ( ""Getting Error: "" + ie1 . getMessage ( ) , ie1 ) ; return true ; } } +" +623,"private boolean downloadBuild ( String fileName , URL downloadLink ) throws IOException { try ( ReadableByteChannel readableByteChannel = Channels . newChannel ( downloadLink . openStream ( ) ) ; FileOutputStream fos = new FileOutputStream ( fileName ) ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { LOGGER . debug ( String . format ( ""Current Thread (%s) is interrupted, clearing interruption."" , Thread . currentThread ( ) . getId ( ) ) ) ; Thread . interrupted ( ) ; } fos . getChannel ( ) . transferFrom ( readableByteChannel , 0 , Long . MAX_VALUE ) ; LOGGER . info ( ""Successfully Transferred..."" ) ; return false ; } catch ( ClosedByInterruptException ie1 ) { LOGGER . info ( ""Retrying...."" ) ; return true ; } } +","private boolean downloadBuild ( String fileName , URL downloadLink ) throws IOException { try ( ReadableByteChannel readableByteChannel = Channels . newChannel ( downloadLink . openStream ( ) ) ; FileOutputStream fos = new FileOutputStream ( fileName ) ) { if ( Thread . currentThread ( ) . isInterrupted ( ) ) { LOGGER . debug ( String . format ( ""Current Thread (%s) is interrupted, clearing interruption."" , Thread . currentThread ( ) . getId ( ) ) ) ; Thread . interrupted ( ) ; } fos . getChannel ( ) . transferFrom ( readableByteChannel , 0 , Long . MAX_VALUE ) ; LOGGER . info ( ""Successfully Transferred..."" ) ; return false ; } catch ( ClosedByInterruptException ie1 ) { LOGGER . info ( ""Retrying...."" ) ; LOGGER . error ( ""Getting Error: "" + ie1 . getMessage ( ) , ie1 ) ; return true ; } } +" +624,"public UberfireRestResponse updateGroupPermissions ( final String groupName , final UpdateSettingRequest permissionsRequest ) { UberfireRestResponse response = new UberfireRestResponse ( ) ; try { Group group = groupManagerService . get ( groupName ) ; AuthorizationPolicy authzPolicy = permissionManager . getAuthorizationPolicy ( ) ; if ( permissionsRequest . getHomePage ( ) != null && permissionValidator . isValidResourceType ( ActivityResourceType . PERSPECTIVE , permissionsRequest . getHomePage ( ) ) ) { authzPolicy . setHomePerspective ( group , permissionsRequest . getHomePage ( ) ) ; } if ( permissionsRequest . getPriority ( ) != null ) { authzPolicy . setPriority ( group , permissionsRequest . getPriority ( ) ) ; } PermissionCollection pc = authzPolicy . getPermissions ( group ) ; generatePermissionCollection ( pc , permissionsRequest ) ; authzPolicy . setPermissions ( group , pc ) ; authorizationService . savePolicy ( authzPolicy ) ; response . setStatus ( Response . Status . OK ) ; response . setMessage ( ""Group "" + groupName + "" permissions are updated successfully."" ) ; } catch ( GroupNotFoundException e ) { response . setStatus ( Response . Status . BAD_REQUEST ) ; response . setMessage ( ""Group with name "" + groupName + ""doesn't exists"" ) ; } catch ( Exception e ) { String errMsg = e . getClass ( ) . getSimpleName ( ) + "" thrown when trying to update permissions for '"" + groupName + ""': "" + e . getMessage ( ) ; response . setStatus ( Response . Status . INTERNAL_SERVER_ERROR ) ; response . setMessage ( errMsg ) ; } return response ; } +","public UberfireRestResponse updateGroupPermissions ( final String groupName , final UpdateSettingRequest permissionsRequest ) { UberfireRestResponse response = new UberfireRestResponse ( ) ; try { Group group = groupManagerService . get ( groupName ) ; AuthorizationPolicy authzPolicy = permissionManager . getAuthorizationPolicy ( ) ; if ( permissionsRequest . getHomePage ( ) != null && permissionValidator . isValidResourceType ( ActivityResourceType . PERSPECTIVE , permissionsRequest . getHomePage ( ) ) ) { authzPolicy . setHomePerspective ( group , permissionsRequest . getHomePage ( ) ) ; } if ( permissionsRequest . getPriority ( ) != null ) { authzPolicy . setPriority ( group , permissionsRequest . getPriority ( ) ) ; } PermissionCollection pc = authzPolicy . getPermissions ( group ) ; generatePermissionCollection ( pc , permissionsRequest ) ; authzPolicy . setPermissions ( group , pc ) ; authorizationService . savePolicy ( authzPolicy ) ; response . setStatus ( Response . Status . OK ) ; response . setMessage ( ""Group "" + groupName + "" permissions are updated successfully."" ) ; } catch ( GroupNotFoundException e ) { response . setStatus ( Response . Status . BAD_REQUEST ) ; response . setMessage ( ""Group with name "" + groupName + ""doesn't exists"" ) ; } catch ( Exception e ) { String errMsg = e . getClass ( ) . getSimpleName ( ) + "" thrown when trying to update permissions for '"" + groupName + ""': "" + e . getMessage ( ) ; logger . error ( errMsg , e ) ; response . setStatus ( Response . Status . INTERNAL_SERVER_ERROR ) ; response . setMessage ( errMsg ) ; } return response ; } +" +625,"public void exec ( final Agent agent ) throws IOException { final XML before = new StrictXML ( Deck . UPGRADE . transform ( new XMLDocument ( new File ( this . path ) ) ) , Deck . SCHEMA ) ; final XML after = new XMLDocument ( new Xembler ( agent . exec ( before ) ) . applyQuietly ( before . node ( ) ) ) ; FileUtils . write ( new File ( this . path ) , new StrictXML ( after , Deck . SCHEMA ) . toString ( ) , CharEncoding . UTF_8 ) ; } +","public void exec ( final Agent agent ) throws IOException { final XML before = new StrictXML ( Deck . UPGRADE . transform ( new XMLDocument ( new File ( this . path ) ) ) , Deck . SCHEMA ) ; final XML after = new XMLDocument ( new Xembler ( agent . exec ( before ) ) . applyQuietly ( before . node ( ) ) ) ; FileUtils . write ( new File ( this . path ) , new StrictXML ( after , Deck . SCHEMA ) . toString ( ) , CharEncoding . UTF_8 ) ; Logger . info ( this , ""deck saved to %s (%d bytes):\n%s"" , this . path , new File ( this . path ) . length ( ) , after ) ; } +" +626,"protected Locale resolveLocale ( SiteContext siteContext , HttpServletRequest request ) { if ( isNotEmpty ( request . getHeader ( headerName ) ) ) { Enumeration < Locale > locales = request . getLocales ( ) ; while ( locales . hasMoreElements ( ) ) { Locale locale = locales . nextElement ( ) ; if ( isSupported ( locale ) ) { return locale ; } else { logger . debug ( ""Locale '{}' requested by the client is not supported, will be skipped"" , locale ) ; } } } else { logger . debug ( ""The request doesn't include a '{}' header, will be skipped"" , headerName ) ; } return null ; } +","protected Locale resolveLocale ( SiteContext siteContext , HttpServletRequest request ) { if ( isNotEmpty ( request . getHeader ( headerName ) ) ) { Enumeration < Locale > locales = request . getLocales ( ) ; while ( locales . hasMoreElements ( ) ) { Locale locale = locales . nextElement ( ) ; if ( isSupported ( locale ) ) { logger . debug ( ""Found supported locale '{}' requested by the client"" , locale ) ; return locale ; } else { logger . debug ( ""Locale '{}' requested by the client is not supported, will be skipped"" , locale ) ; } } } else { logger . debug ( ""The request doesn't include a '{}' header, will be skipped"" , headerName ) ; } return null ; } +" +627,"protected Locale resolveLocale ( SiteContext siteContext , HttpServletRequest request ) { if ( isNotEmpty ( request . getHeader ( headerName ) ) ) { Enumeration < Locale > locales = request . getLocales ( ) ; while ( locales . hasMoreElements ( ) ) { Locale locale = locales . nextElement ( ) ; if ( isSupported ( locale ) ) { logger . debug ( ""Found supported locale '{}' requested by the client"" , locale ) ; return locale ; } else { } } } else { logger . debug ( ""The request doesn't include a '{}' header, will be skipped"" , headerName ) ; } return null ; } +","protected Locale resolveLocale ( SiteContext siteContext , HttpServletRequest request ) { if ( isNotEmpty ( request . getHeader ( headerName ) ) ) { Enumeration < Locale > locales = request . getLocales ( ) ; while ( locales . hasMoreElements ( ) ) { Locale locale = locales . nextElement ( ) ; if ( isSupported ( locale ) ) { logger . debug ( ""Found supported locale '{}' requested by the client"" , locale ) ; return locale ; } else { logger . debug ( ""Locale '{}' requested by the client is not supported, will be skipped"" , locale ) ; } } } else { logger . debug ( ""The request doesn't include a '{}' header, will be skipped"" , headerName ) ; } return null ; } +" +628,"protected Locale resolveLocale ( SiteContext siteContext , HttpServletRequest request ) { if ( isNotEmpty ( request . getHeader ( headerName ) ) ) { Enumeration < Locale > locales = request . getLocales ( ) ; while ( locales . hasMoreElements ( ) ) { Locale locale = locales . nextElement ( ) ; if ( isSupported ( locale ) ) { logger . debug ( ""Found supported locale '{}' requested by the client"" , locale ) ; return locale ; } else { logger . debug ( ""Locale '{}' requested by the client is not supported, will be skipped"" , locale ) ; } } } else { } return null ; } +","protected Locale resolveLocale ( SiteContext siteContext , HttpServletRequest request ) { if ( isNotEmpty ( request . getHeader ( headerName ) ) ) { Enumeration < Locale > locales = request . getLocales ( ) ; while ( locales . hasMoreElements ( ) ) { Locale locale = locales . nextElement ( ) ; if ( isSupported ( locale ) ) { logger . debug ( ""Found supported locale '{}' requested by the client"" , locale ) ; return locale ; } else { logger . debug ( ""Locale '{}' requested by the client is not supported, will be skipped"" , locale ) ; } } } else { logger . debug ( ""The request doesn't include a '{}' header, will be skipped"" , headerName ) ; } return null ; } +" +629,"public void setIdleTime ( String idleTime ) { this . idleTime = idleTime ; } +","public void setIdleTime ( String idleTime ) { log . warn ( ""Parameter 'idleTime' is not supported anymore. "" + ""Please use 'maxIdleTime' in the repository configuration."" ) ; this . idleTime = idleTime ; } +" +630,"public boolean supports ( AuthenticationToken token ) { boolean supported = token != null && token . getCredentials ( ) != null && token instanceof SAMLAuthenticationToken ; if ( supported ) { } else if ( token != null ) { LOGGER . debug ( ""Token {} is not supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else { LOGGER . debug ( ""The supplied authentication token is null. Sending back not supported."" ) ; } return supported ; } +","public boolean supports ( AuthenticationToken token ) { boolean supported = token != null && token . getCredentials ( ) != null && token instanceof SAMLAuthenticationToken ; if ( supported ) { LOGGER . debug ( ""Token {} is supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else if ( token != null ) { LOGGER . debug ( ""Token {} is not supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else { LOGGER . debug ( ""The supplied authentication token is null. Sending back not supported."" ) ; } return supported ; } +" +631,"public boolean supports ( AuthenticationToken token ) { boolean supported = token != null && token . getCredentials ( ) != null && token instanceof SAMLAuthenticationToken ; if ( supported ) { LOGGER . debug ( ""Token {} is supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else if ( token != null ) { } else { LOGGER . debug ( ""The supplied authentication token is null. Sending back not supported."" ) ; } return supported ; } +","public boolean supports ( AuthenticationToken token ) { boolean supported = token != null && token . getCredentials ( ) != null && token instanceof SAMLAuthenticationToken ; if ( supported ) { LOGGER . debug ( ""Token {} is supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else if ( token != null ) { LOGGER . debug ( ""Token {} is not supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else { LOGGER . debug ( ""The supplied authentication token is null. Sending back not supported."" ) ; } return supported ; } +" +632,"public boolean supports ( AuthenticationToken token ) { boolean supported = token != null && token . getCredentials ( ) != null && token instanceof SAMLAuthenticationToken ; if ( supported ) { LOGGER . debug ( ""Token {} is supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else if ( token != null ) { LOGGER . debug ( ""Token {} is not supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else { } return supported ; } +","public boolean supports ( AuthenticationToken token ) { boolean supported = token != null && token . getCredentials ( ) != null && token instanceof SAMLAuthenticationToken ; if ( supported ) { LOGGER . debug ( ""Token {} is supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else if ( token != null ) { LOGGER . debug ( ""Token {} is not supported by {}."" , token . getClass ( ) , SamlRealm . class . getName ( ) ) ; } else { LOGGER . debug ( ""The supplied authentication token is null. Sending back not supported."" ) ; } return supported ; } +" +633,"public static Stream < String > getTopLevelClassesInClasspath ( Class < ? > clazz ) { ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; String packageName = clazz . getPackage ( ) . getName ( ) ; String path = packageName . replace ( '.' , '/' ) ; Enumeration < URL > resources = null ; try { resources = classLoader . getResources ( path ) ; } catch ( IOException e ) { } List < File > directories = new ArrayList < > ( ) ; while ( Objects . requireNonNull ( resources ) . hasMoreElements ( ) ) { URL resource = resources . nextElement ( ) ; try { directories . add ( new File ( resource . toURI ( ) ) ) ; } catch ( URISyntaxException e ) { LOG . error ( ""Unable to get "" + e . getMessage ( ) ) ; } } List < String > classes = new ArrayList < > ( ) ; for ( File directory : directories ) { classes . addAll ( findClasses ( directory , packageName ) ) ; } return classes . stream ( ) ; } +","public static Stream < String > getTopLevelClassesInClasspath ( Class < ? > clazz ) { ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; String packageName = clazz . getPackage ( ) . getName ( ) ; String path = packageName . replace ( '.' , '/' ) ; Enumeration < URL > resources = null ; try { resources = classLoader . getResources ( path ) ; } catch ( IOException e ) { LOG . error ( ""Unable to fetch Resources in package "" + e . getMessage ( ) ) ; } List < File > directories = new ArrayList < > ( ) ; while ( Objects . requireNonNull ( resources ) . hasMoreElements ( ) ) { URL resource = resources . nextElement ( ) ; try { directories . add ( new File ( resource . toURI ( ) ) ) ; } catch ( URISyntaxException e ) { LOG . error ( ""Unable to get "" + e . getMessage ( ) ) ; } } List < String > classes = new ArrayList < > ( ) ; for ( File directory : directories ) { classes . addAll ( findClasses ( directory , packageName ) ) ; } return classes . stream ( ) ; } +" +634,"public static Stream < String > getTopLevelClassesInClasspath ( Class < ? > clazz ) { ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; String packageName = clazz . getPackage ( ) . getName ( ) ; String path = packageName . replace ( '.' , '/' ) ; Enumeration < URL > resources = null ; try { resources = classLoader . getResources ( path ) ; } catch ( IOException e ) { LOG . error ( ""Unable to fetch Resources in package "" + e . getMessage ( ) ) ; } List < File > directories = new ArrayList < > ( ) ; while ( Objects . requireNonNull ( resources ) . hasMoreElements ( ) ) { URL resource = resources . nextElement ( ) ; try { directories . add ( new File ( resource . toURI ( ) ) ) ; } catch ( URISyntaxException e ) { } } List < String > classes = new ArrayList < > ( ) ; for ( File directory : directories ) { classes . addAll ( findClasses ( directory , packageName ) ) ; } return classes . stream ( ) ; } +","public static Stream < String > getTopLevelClassesInClasspath ( Class < ? > clazz ) { ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; String packageName = clazz . getPackage ( ) . getName ( ) ; String path = packageName . replace ( '.' , '/' ) ; Enumeration < URL > resources = null ; try { resources = classLoader . getResources ( path ) ; } catch ( IOException e ) { LOG . error ( ""Unable to fetch Resources in package "" + e . getMessage ( ) ) ; } List < File > directories = new ArrayList < > ( ) ; while ( Objects . requireNonNull ( resources ) . hasMoreElements ( ) ) { URL resource = resources . nextElement ( ) ; try { directories . add ( new File ( resource . toURI ( ) ) ) ; } catch ( URISyntaxException e ) { LOG . error ( ""Unable to get "" + e . getMessage ( ) ) ; } } List < String > classes = new ArrayList < > ( ) ; for ( File directory : directories ) { classes . addAll ( findClasses ( directory , packageName ) ) ; } return classes . stream ( ) ; } +" +635,"@ PostConstruct public void init ( ) { logger = LoggerFactory . getLogger ( getClass ( ) ) ; attributes = Attributes . getComponentAttributesFromFacesConfig ( UIDragIndicator . class , getClass ( ) ) ; attributes . setAttribute ( ""rendered"" , true ) ; } +","@ PostConstruct public void init ( ) { logger = LoggerFactory . getLogger ( getClass ( ) ) ; logger . debug ( ""initializing bean "" + getClass ( ) . getName ( ) ) ; attributes = Attributes . getComponentAttributesFromFacesConfig ( UIDragIndicator . class , getClass ( ) ) ; attributes . setAttribute ( ""rendered"" , true ) ; } +" +636,"@ RestAccessControl ( permission = Permission . SUPERUSER ) @ RequestMapping ( value = ""/directory"" , method = RequestMethod . DELETE , produces = MediaType . APPLICATION_JSON_VALUE ) public ResponseEntity < RestResponse < Map , Map > > deleteDirectory ( @ RequestParam String currentPath , @ RequestParam Boolean protectedFolder ) { this . getFileBrowserService ( ) . deleteDirectory ( currentPath , protectedFolder ) ; return this . executeDirectoryRespose ( currentPath , protectedFolder ) ; } +","@ RestAccessControl ( permission = Permission . SUPERUSER ) @ RequestMapping ( value = ""/directory"" , method = RequestMethod . DELETE , produces = MediaType . APPLICATION_JSON_VALUE ) public ResponseEntity < RestResponse < Map , Map > > deleteDirectory ( @ RequestParam String currentPath , @ RequestParam Boolean protectedFolder ) { logger . debug ( ""delete directory {} - protected {}"" , currentPath , protectedFolder ) ; this . getFileBrowserService ( ) . deleteDirectory ( currentPath , protectedFolder ) ; return this . executeDirectoryRespose ( currentPath , protectedFolder ) ; } +" +637,"void put ( Source < KeyValueSlice > source , long size ) throws IOException , InterruptedException { long t0 = - 1 ; if ( LOG . isDebugEnabled ( ) ) { t0 = System . currentTimeMillis ( ) ; } File file = createTemporaryFile ( ) ; boolean success = false ; try { try ( DataOutputStream output = createBlockFileOutput ( file ) ) { while ( source . next ( ) ) { KeyValueSlice slice = source . get ( ) ; output . writeInt ( slice . getKeyLength ( ) ) ; output . writeInt ( slice . getValueLength ( ) ) ; output . write ( slice . getBytes ( ) , slice . getSliceOffset ( ) , slice . getSliceLength ( ) ) ; } output . writeInt ( - 1 ) ; } files . add ( file ) ; success = true ; } finally { if ( success == false ) { deleteTemporaryFile ( file ) ; } } if ( LOG . isDebugEnabled ( ) ) { long t1 = System . currentTimeMillis ( ) ; } totalSize += size ; } +","void put ( Source < KeyValueSlice > source , long size ) throws IOException , InterruptedException { long t0 = - 1 ; if ( LOG . isDebugEnabled ( ) ) { t0 = System . currentTimeMillis ( ) ; } File file = createTemporaryFile ( ) ; boolean success = false ; try { try ( DataOutputStream output = createBlockFileOutput ( file ) ) { while ( source . next ( ) ) { KeyValueSlice slice = source . get ( ) ; output . writeInt ( slice . getKeyLength ( ) ) ; output . writeInt ( slice . getValueLength ( ) ) ; output . write ( slice . getBytes ( ) , slice . getSliceOffset ( ) , slice . getSliceLength ( ) ) ; } output . writeInt ( - 1 ) ; } files . add ( file ) ; success = true ; } finally { if ( success == false ) { deleteTemporaryFile ( file ) ; } } if ( LOG . isDebugEnabled ( ) ) { long t1 = System . currentTimeMillis ( ) ; LOG . debug ( MessageFormat . format ( ""saved block file: {0} (data={1}->{2}bytes, compress={3}, elapsed={4}ms)"" , file , size , file . length ( ) , compress , t1 - t0 ) ) ; } totalSize += size ; } +" +638,"public Collection < InputFile > next ( SizeValidator validator ) { int size = inputs . size ( ) ; if ( size == 0 ) return Collections . EMPTY_SET ; Collection < InputFile > list = new HashSet < > ( ) ; if ( size < fc . getMaxFlags ( ) ) { list . addAll ( inputs ) ; inputs . clear ( ) ; } else { int count = 0 ; Iterator < InputFile > it = inputs . iterator ( ) ; while ( it . hasNext ( ) && ( count < fc . getMaxFlags ( ) ) ) { InputFile inFile = it . next ( ) ; int maps = inFile . getMaps ( ) ; if ( maps > fc . getMaxFlags ( ) ) { } count += maps ; if ( count > fc . getMaxFlags ( ) ) { break ; } list . add ( inFile ) ; if ( validator . isValidSize ( fc , list ) || ( list . size ( ) == 1 ) ) { it . remove ( ) ; } else { list . remove ( inFile ) ; break ; } } } return list ; } +","public Collection < InputFile > next ( SizeValidator validator ) { int size = inputs . size ( ) ; if ( size == 0 ) return Collections . EMPTY_SET ; Collection < InputFile > list = new HashSet < > ( ) ; if ( size < fc . getMaxFlags ( ) ) { list . addAll ( inputs ) ; inputs . clear ( ) ; } else { int count = 0 ; Iterator < InputFile > it = inputs . iterator ( ) ; while ( it . hasNext ( ) && ( count < fc . getMaxFlags ( ) ) ) { InputFile inFile = it . next ( ) ; int maps = inFile . getMaps ( ) ; if ( maps > fc . getMaxFlags ( ) ) { log . warn ( ""Estimated map count ({}) for file exceeds maxFlags ({}). Consider increasing maxFlags to accommodate larger files, or split this file into smaller chunks. File: {}"" , maps , fc . getMaxFlags ( ) , inFile . getFileName ( ) ) ; } count += maps ; if ( count > fc . getMaxFlags ( ) ) { break ; } list . add ( inFile ) ; if ( validator . isValidSize ( fc , list ) || ( list . size ( ) == 1 ) ) { it . remove ( ) ; } else { list . remove ( inFile ) ; break ; } } } return list ; } +" +639,"public void invoke ( List < TwitterListener > listeners ) throws TwitterException { ResponseList < Status > statuses = twitter . getRetweets ( statusId ) ; for ( TwitterListener listener : listeners ) { try { listener . gotRetweets ( statuses ) ; } catch ( Exception e ) { } } } +","public void invoke ( List < TwitterListener > listeners ) throws TwitterException { ResponseList < Status > statuses = twitter . getRetweets ( statusId ) ; for ( TwitterListener listener : listeners ) { try { listener . gotRetweets ( statuses ) ; } catch ( Exception e ) { logger . warn ( ""Exception at getRetweets"" , e ) ; } } } +" +640,"private void cancelRefreshJob ( ) { synchronized ( this ) { if ( refreshJob != null ) { refreshJob . cancel ( true ) ; refreshJob = null ; } } } +","private void cancelRefreshJob ( ) { synchronized ( this ) { if ( refreshJob != null ) { logger . debug ( ""Cancelling refresh job"" ) ; refreshJob . cancel ( true ) ; refreshJob = null ; } } } +" +641,"public void update ( EventBean [ ] arg0 , EventBean [ ] arg1 ) { for ( EventBean bean : arg0 ) { try { beanForwardQueue . put ( bean ) ; } catch ( InterruptedException e ) { return ; } } } +","public void update ( EventBean [ ] arg0 , EventBean [ ] arg1 ) { for ( EventBean bean : arg0 ) { try { beanForwardQueue . put ( bean ) ; } catch ( InterruptedException e ) { log . error ( ""Could not update due to '{}', exception '{}'"" , e . getMessage ( ) , e ) ; return ; } } } +" +642,"protected final synchronized void enterState ( final DestState current , final DestState next ) throws IllegalStateException { verifyState ( current ) ; state = next ; } +","protected final synchronized void enterState ( final DestState current , final DestState next ) throws IllegalStateException { verifyState ( current ) ; LOG . debug ( ""{}: entering state {}"" , this , next ) ; state = next ; } +" +643,"private void invokeListener ( final Runnable call ) { try { call . run ( ) ; } catch ( final RuntimeException ex ) { } } +","private void invokeListener ( final Runnable call ) { try { call . run ( ) ; } catch ( final RuntimeException ex ) { LOG . log ( Level . WARNING , ""Got RuntimeException when invoking listener for path "" + pathToWatch , ex ) ; } } +" +644,"private void readOldEntries ( ) { try ( InputStream is = Files . newInputStream ( historyFilePath ) ; InputStreamReader isr = new InputStreamReader ( is ) ; LineNumberReader in = new LineNumberReader ( isr ) ) { String entryLine ; while ( ( entryLine = in . readLine ( ) ) != null ) { MVUsedUrl oldHistoryEntry = MVUsedUrl . getUrlAusZeile ( entryLine ) ; final var url = oldHistoryEntry . getUrl ( ) ; if ( url . startsWith ( ""rtmp:"" ) ) { continue ; } var okHttpUrl = HttpUrl . parse ( url ) ; if ( okHttpUrl == null ) { continue ; } if ( oldHistoryEntry . getDatum ( ) . isBlank ( ) ) continue ; historyEntries . add ( oldHistoryEntry ) ; } } catch ( Exception ex ) { logger . error ( ""readOldEntries()"" , ex ) ; } logger . trace ( ""historyEntries size: {}"" , historyEntries . size ( ) ) ; } +","private void readOldEntries ( ) { logger . trace ( ""Reading old entries"" ) ; try ( InputStream is = Files . newInputStream ( historyFilePath ) ; InputStreamReader isr = new InputStreamReader ( is ) ; LineNumberReader in = new LineNumberReader ( isr ) ) { String entryLine ; while ( ( entryLine = in . readLine ( ) ) != null ) { MVUsedUrl oldHistoryEntry = MVUsedUrl . getUrlAusZeile ( entryLine ) ; final var url = oldHistoryEntry . getUrl ( ) ; if ( url . startsWith ( ""rtmp:"" ) ) { continue ; } var okHttpUrl = HttpUrl . parse ( url ) ; if ( okHttpUrl == null ) { continue ; } if ( oldHistoryEntry . getDatum ( ) . isBlank ( ) ) continue ; historyEntries . add ( oldHistoryEntry ) ; } } catch ( Exception ex ) { logger . error ( ""readOldEntries()"" , ex ) ; } logger . trace ( ""historyEntries size: {}"" , historyEntries . size ( ) ) ; } +" +645,"private void readOldEntries ( ) { logger . trace ( ""Reading old entries"" ) ; try ( InputStream is = Files . newInputStream ( historyFilePath ) ; InputStreamReader isr = new InputStreamReader ( is ) ; LineNumberReader in = new LineNumberReader ( isr ) ) { String entryLine ; while ( ( entryLine = in . readLine ( ) ) != null ) { MVUsedUrl oldHistoryEntry = MVUsedUrl . getUrlAusZeile ( entryLine ) ; final var url = oldHistoryEntry . getUrl ( ) ; if ( url . startsWith ( ""rtmp:"" ) ) { continue ; } var okHttpUrl = HttpUrl . parse ( url ) ; if ( okHttpUrl == null ) { continue ; } if ( oldHistoryEntry . getDatum ( ) . isBlank ( ) ) continue ; historyEntries . add ( oldHistoryEntry ) ; } } catch ( Exception ex ) { } logger . trace ( ""historyEntries size: {}"" , historyEntries . size ( ) ) ; } +","private void readOldEntries ( ) { logger . trace ( ""Reading old entries"" ) ; try ( InputStream is = Files . newInputStream ( historyFilePath ) ; InputStreamReader isr = new InputStreamReader ( is ) ; LineNumberReader in = new LineNumberReader ( isr ) ) { String entryLine ; while ( ( entryLine = in . readLine ( ) ) != null ) { MVUsedUrl oldHistoryEntry = MVUsedUrl . getUrlAusZeile ( entryLine ) ; final var url = oldHistoryEntry . getUrl ( ) ; if ( url . startsWith ( ""rtmp:"" ) ) { continue ; } var okHttpUrl = HttpUrl . parse ( url ) ; if ( okHttpUrl == null ) { continue ; } if ( oldHistoryEntry . getDatum ( ) . isBlank ( ) ) continue ; historyEntries . add ( oldHistoryEntry ) ; } } catch ( Exception ex ) { logger . error ( ""readOldEntries()"" , ex ) ; } logger . trace ( ""historyEntries size: {}"" , historyEntries . size ( ) ) ; } +" +646,"private void readOldEntries ( ) { logger . trace ( ""Reading old entries"" ) ; try ( InputStream is = Files . newInputStream ( historyFilePath ) ; InputStreamReader isr = new InputStreamReader ( is ) ; LineNumberReader in = new LineNumberReader ( isr ) ) { String entryLine ; while ( ( entryLine = in . readLine ( ) ) != null ) { MVUsedUrl oldHistoryEntry = MVUsedUrl . getUrlAusZeile ( entryLine ) ; final var url = oldHistoryEntry . getUrl ( ) ; if ( url . startsWith ( ""rtmp:"" ) ) { continue ; } var okHttpUrl = HttpUrl . parse ( url ) ; if ( okHttpUrl == null ) { continue ; } if ( oldHistoryEntry . getDatum ( ) . isBlank ( ) ) continue ; historyEntries . add ( oldHistoryEntry ) ; } } catch ( Exception ex ) { logger . error ( ""readOldEntries()"" , ex ) ; } } +","private void readOldEntries ( ) { logger . trace ( ""Reading old entries"" ) ; try ( InputStream is = Files . newInputStream ( historyFilePath ) ; InputStreamReader isr = new InputStreamReader ( is ) ; LineNumberReader in = new LineNumberReader ( isr ) ) { String entryLine ; while ( ( entryLine = in . readLine ( ) ) != null ) { MVUsedUrl oldHistoryEntry = MVUsedUrl . getUrlAusZeile ( entryLine ) ; final var url = oldHistoryEntry . getUrl ( ) ; if ( url . startsWith ( ""rtmp:"" ) ) { continue ; } var okHttpUrl = HttpUrl . parse ( url ) ; if ( okHttpUrl == null ) { continue ; } if ( oldHistoryEntry . getDatum ( ) . isBlank ( ) ) continue ; historyEntries . add ( oldHistoryEntry ) ; } } catch ( Exception ex ) { logger . error ( ""readOldEntries()"" , ex ) ; } logger . trace ( ""historyEntries size: {}"" , historyEntries . size ( ) ) ; } +" +647,"private Class < ? > determineExpectedClass ( final Node node , final Class < ? > fallbackType ) { final NamedNodeMap attributes = node . getAttributes ( ) ; if ( attributes != null ) { final Node attribute = attributes . getNamedItem ( ""class"" ) ; if ( attribute != null ) { final String className = attribute . getTextContent ( ) ; if ( ! StringUtils . isNullOrEmpty ( className ) ) { try { return Class . forName ( className ) ; } catch ( final ClassNotFoundException e ) { } } } } return fallbackType ; } +","private Class < ? > determineExpectedClass ( final Node node , final Class < ? > fallbackType ) { final NamedNodeMap attributes = node . getAttributes ( ) ; if ( attributes != null ) { final Node attribute = attributes . getNamedItem ( ""class"" ) ; if ( attribute != null ) { final String className = attribute . getTextContent ( ) ; if ( ! StringUtils . isNullOrEmpty ( className ) ) { try { return Class . forName ( className ) ; } catch ( final ClassNotFoundException e ) { logger . error ( ""Could not load class: "" + className + "". Falling back to String type."" , e ) ; } } } } return fallbackType ; } +" +648,"private ListenableFuture < ResultSet > rollupOneFromChildren ( int rollupLevel , String agentRollupId , String gaugeName , List < String > childAgentRollupIds , long captureTime , int adjustedTTL ) throws Exception { List < ListenableFuture < ResultSet > > futures = new ArrayList < > ( ) ; for ( String childAgentRollupId : childAgentRollupIds ) { BoundStatement boundStatement = readValueForRollupFromChildPS . bind ( ) ; int i = 0 ; boundStatement . setString ( i ++ , childAgentRollupId ) ; boundStatement . setString ( i ++ , gaugeName ) ; boundStatement . setTimestamp ( i ++ , new Date ( captureTime ) ) ; futures . add ( session . executeAsync ( boundStatement ) ) ; } return Futures . transformAsync ( Futures . allAsList ( futures ) , new AsyncFunction < List < ResultSet > , ResultSet > ( ) { @ Override public ListenableFuture < ResultSet > apply ( @ Nullable List < ResultSet > results ) throws Exception { checkNotNull ( results ) ; List < Row > rows = new ArrayList < > ( ) ; for ( int i = 0 ; i < results . size ( ) ; i ++ ) { Row row = results . get ( i ) . one ( ) ; if ( row == null ) { } else { rows . add ( row ) ; } } if ( rows . isEmpty ( ) ) { return Futures . immediateFuture ( DummyResultSet . INSTANCE ) ; } return rollupOneFromRows ( rollupLevel , agentRollupId , gaugeName , captureTime , adjustedTTL , rows ) ; } } , MoreExecutors . directExecutor ( ) ) ; } +","private ListenableFuture < ResultSet > rollupOneFromChildren ( int rollupLevel , String agentRollupId , String gaugeName , List < String > childAgentRollupIds , long captureTime , int adjustedTTL ) throws Exception { List < ListenableFuture < ResultSet > > futures = new ArrayList < > ( ) ; for ( String childAgentRollupId : childAgentRollupIds ) { BoundStatement boundStatement = readValueForRollupFromChildPS . bind ( ) ; int i = 0 ; boundStatement . setString ( i ++ , childAgentRollupId ) ; boundStatement . setString ( i ++ , gaugeName ) ; boundStatement . setTimestamp ( i ++ , new Date ( captureTime ) ) ; futures . add ( session . executeAsync ( boundStatement ) ) ; } return Futures . transformAsync ( Futures . allAsList ( futures ) , new AsyncFunction < List < ResultSet > , ResultSet > ( ) { @ Override public ListenableFuture < ResultSet > apply ( @ Nullable List < ResultSet > results ) throws Exception { checkNotNull ( results ) ; List < Row > rows = new ArrayList < > ( ) ; for ( int i = 0 ; i < results . size ( ) ; i ++ ) { Row row = results . get ( i ) . one ( ) ; if ( row == null ) { logger . warn ( ""no gauge value table records found for agentRollupId={},"" + "" gaugeName={}, captureTime={}, level={}"" , childAgentRollupIds . get ( i ) , gaugeName , captureTime , rollupLevel ) ; } else { rows . add ( row ) ; } } if ( rows . isEmpty ( ) ) { return Futures . immediateFuture ( DummyResultSet . INSTANCE ) ; } return rollupOneFromRows ( rollupLevel , agentRollupId , gaugeName , captureTime , adjustedTTL , rows ) ; } } , MoreExecutors . directExecutor ( ) ) ; } +" +649,"public boolean clientConnected ( Client client ) { if ( ! clients . containsKey ( client . getClientId ( ) ) ) { clients . putIfAbsent ( client . getClientId ( ) , ( IpPortBasedClient ) client ) ; } return true ; } +","public boolean clientConnected ( Client client ) { Loggers . SRV_LOG . info ( ""Client connection {} connect"" , client . getClientId ( ) ) ; if ( ! clients . containsKey ( client . getClientId ( ) ) ) { clients . putIfAbsent ( client . getClientId ( ) , ( IpPortBasedClient ) client ) ; } return true ; } +" +650,"public Map < String , Class > mapAllClasses ( String uri ) throws IOException , ClassNotFoundException { Map < String , String > map = mapAllStrings ( uri ) ; Map < String , Class > classes = new HashMap < > ( map != null ? map . size ( ) : 0 ) ; if ( map == null ) { throw new IllegalStateException ( ""Null strings map produced for uri: "" + uri ) ; } for ( Map . Entry < String , String > entry : map . entrySet ( ) ) { String string = entry . getKey ( ) ; String className = entry . getValue ( ) ; Class clazz = classLoaderInterface . loadClass ( className ) ; classes . put ( string , clazz ) ; } return classes ; } +","public Map < String , Class > mapAllClasses ( String uri ) throws IOException , ClassNotFoundException { Map < String , String > map = mapAllStrings ( uri ) ; Map < String , Class > classes = new HashMap < > ( map != null ? map . size ( ) : 0 ) ; if ( map == null ) { LOG . trace ( ""Null strings map for [{}], should not be possible!"" , uri ) ; throw new IllegalStateException ( ""Null strings map produced for uri: "" + uri ) ; } for ( Map . Entry < String , String > entry : map . entrySet ( ) ) { String string = entry . getKey ( ) ; String className = entry . getValue ( ) ; Class clazz = classLoaderInterface . loadClass ( className ) ; classes . put ( string , clazz ) ; } return classes ; } +" +651,"public void warn ( Object msg ) { try { } catch ( Exception exception ) { printMsg ( msg ) ; } } +","public void warn ( Object msg ) { try { _log . warn ( msg ) ; } catch ( Exception exception ) { printMsg ( msg ) ; } } +" +652,"@ RequestMapping ( value = ""/getAllCrisisTypes.action"" , method = { RequestMethod . GET } ) @ ResponseBody public Map < String , Object > getAllCrisis ( ) { try { return getUIWrapper ( taggerService . getAllCrisisTypes ( ) , true ) ; } catch ( AidrException e ) { return getUIWrapper ( false , e . getMessage ( ) ) ; } } +","@ RequestMapping ( value = ""/getAllCrisisTypes.action"" , method = { RequestMethod . GET } ) @ ResponseBody public Map < String , Object > getAllCrisis ( ) { try { return getUIWrapper ( taggerService . getAllCrisisTypes ( ) , true ) ; } catch ( AidrException e ) { logger . error ( ""Error while fetching all crisisTypes"" , e ) ; return getUIWrapper ( false , e . getMessage ( ) ) ; } } +" +653,"public Void call ( ) throws ChunkException { if ( ! myInitialized ) { throw new IllegalStateException ( ""Not initialized."" ) ; } myChunkProvider = VideoChunkProviderFactory . getInstance ( ) . createProvider ( myStream , myStreamSpan . getStart ( ) , myApproxSizeMS , myFeedName ) ; boolean success = myChunkProvider . provideChunks ( this , myNonVideoContentHandler ) ; if ( ! success ) { myChunkProvider = VideoChunkProviderFactory . getInstance ( ) . createFailOverProvider ( myChunkProvider , myFeedName ) ; if ( myChunkProvider != null ) { myChunkProvider . provideChunks ( this , myNonVideoContentHandler ) ; } else { LOGGER . error ( ""Failed to transcode stream "" + myFeedName ) ; } } return null ; } +","public Void call ( ) throws ChunkException { if ( ! myInitialized ) { throw new IllegalStateException ( ""Not initialized."" ) ; } myChunkProvider = VideoChunkProviderFactory . getInstance ( ) . createProvider ( myStream , myStreamSpan . getStart ( ) , myApproxSizeMS , myFeedName ) ; boolean success = myChunkProvider . provideChunks ( this , myNonVideoContentHandler ) ; if ( ! success ) { LOGGER . warn ( ""Failed to re-encode the stream "" + myFeedName + "", attempting to transcode the stream."" ) ; myChunkProvider = VideoChunkProviderFactory . getInstance ( ) . createFailOverProvider ( myChunkProvider , myFeedName ) ; if ( myChunkProvider != null ) { myChunkProvider . provideChunks ( this , myNonVideoContentHandler ) ; } else { LOGGER . error ( ""Failed to transcode stream "" + myFeedName ) ; } } return null ; } +" +654,"public Void call ( ) throws ChunkException { if ( ! myInitialized ) { throw new IllegalStateException ( ""Not initialized."" ) ; } myChunkProvider = VideoChunkProviderFactory . getInstance ( ) . createProvider ( myStream , myStreamSpan . getStart ( ) , myApproxSizeMS , myFeedName ) ; boolean success = myChunkProvider . provideChunks ( this , myNonVideoContentHandler ) ; if ( ! success ) { LOGGER . warn ( ""Failed to re-encode the stream "" + myFeedName + "", attempting to transcode the stream."" ) ; myChunkProvider = VideoChunkProviderFactory . getInstance ( ) . createFailOverProvider ( myChunkProvider , myFeedName ) ; if ( myChunkProvider != null ) { myChunkProvider . provideChunks ( this , myNonVideoContentHandler ) ; } else { } } return null ; } +","public Void call ( ) throws ChunkException { if ( ! myInitialized ) { throw new IllegalStateException ( ""Not initialized."" ) ; } myChunkProvider = VideoChunkProviderFactory . getInstance ( ) . createProvider ( myStream , myStreamSpan . getStart ( ) , myApproxSizeMS , myFeedName ) ; boolean success = myChunkProvider . provideChunks ( this , myNonVideoContentHandler ) ; if ( ! success ) { LOGGER . warn ( ""Failed to re-encode the stream "" + myFeedName + "", attempting to transcode the stream."" ) ; myChunkProvider = VideoChunkProviderFactory . getInstance ( ) . createFailOverProvider ( myChunkProvider , myFeedName ) ; if ( myChunkProvider != null ) { myChunkProvider . provideChunks ( this , myNonVideoContentHandler ) ; } else { LOGGER . error ( ""Failed to transcode stream "" + myFeedName ) ; } } return null ; } +" +655,"public WonMessage process ( final WonMessage message ) throws WonMessageProcessingException { WonMessageType type = message . getMessageType ( ) ; if ( type == WonMessageType . SUCCESS_RESPONSE ) { type = message . getRespondingToMessageType ( ) ; } URI webId = message . getRecipientAtomURI ( ) ; if ( type . isConnectionSpecificMessage ( ) ) { Optional < URI > connectionURI = WonLinkedDataUtils . getConnectionURIForIncomingMessage ( message , linkedDataSource ) ; if ( connectionURI . isPresent ( ) ) { try { URI messageContainerUri = WonRelativeUriHelper . createMessageContainerURIForConnection ( connectionURI . get ( ) ) ; invalidate ( messageContainerUri , webId ) ; if ( type . causesConnectionStateChange ( ) ) { invalidate ( connectionURI . get ( ) , webId ) ; } } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } } if ( type . causesNewConnection ( ) ) { logger . debug ( ""invalidating connections list for atom "" + message . getRecipientAtomURI ( ) ) ; try { URI connectionsListUri = WonRelativeUriHelper . createConnectionContainerURIForAtom ( message . getRecipientAtomURI ( ) ) ; invalidate ( connectionsListUri , webId ) ; } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } if ( type . causesAtomStateChange ( ) ) { invalidate ( message . getRecipientAtomURI ( ) , webId ) ; } return message ; } +","public WonMessage process ( final WonMessage message ) throws WonMessageProcessingException { WonMessageType type = message . getMessageType ( ) ; if ( type == WonMessageType . SUCCESS_RESPONSE ) { type = message . getRespondingToMessageType ( ) ; } URI webId = message . getRecipientAtomURI ( ) ; if ( type . isConnectionSpecificMessage ( ) ) { Optional < URI > connectionURI = WonLinkedDataUtils . getConnectionURIForIncomingMessage ( message , linkedDataSource ) ; if ( connectionURI . isPresent ( ) ) { try { logger . debug ( ""invalidating events list for atom "" + message . getRecipientAtomURI ( ) + "" for connection "" + connectionURI . get ( ) ) ; URI messageContainerUri = WonRelativeUriHelper . createMessageContainerURIForConnection ( connectionURI . get ( ) ) ; invalidate ( messageContainerUri , webId ) ; if ( type . causesConnectionStateChange ( ) ) { invalidate ( connectionURI . get ( ) , webId ) ; } } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } } if ( type . causesNewConnection ( ) ) { logger . debug ( ""invalidating connections list for atom "" + message . getRecipientAtomURI ( ) ) ; try { URI connectionsListUri = WonRelativeUriHelper . createConnectionContainerURIForAtom ( message . getRecipientAtomURI ( ) ) ; invalidate ( connectionsListUri , webId ) ; } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } if ( type . causesAtomStateChange ( ) ) { invalidate ( message . getRecipientAtomURI ( ) , webId ) ; } return message ; } +" +656,"public WonMessage process ( final WonMessage message ) throws WonMessageProcessingException { WonMessageType type = message . getMessageType ( ) ; if ( type == WonMessageType . SUCCESS_RESPONSE ) { type = message . getRespondingToMessageType ( ) ; } URI webId = message . getRecipientAtomURI ( ) ; if ( type . isConnectionSpecificMessage ( ) ) { Optional < URI > connectionURI = WonLinkedDataUtils . getConnectionURIForIncomingMessage ( message , linkedDataSource ) ; if ( connectionURI . isPresent ( ) ) { try { logger . debug ( ""invalidating events list for atom "" + message . getRecipientAtomURI ( ) + "" for connection "" + connectionURI . get ( ) ) ; URI messageContainerUri = WonRelativeUriHelper . createMessageContainerURIForConnection ( connectionURI . get ( ) ) ; invalidate ( messageContainerUri , webId ) ; if ( type . causesConnectionStateChange ( ) ) { invalidate ( connectionURI . get ( ) , webId ) ; } } catch ( Exception e ) { } } } if ( type . causesNewConnection ( ) ) { logger . debug ( ""invalidating connections list for atom "" + message . getRecipientAtomURI ( ) ) ; try { URI connectionsListUri = WonRelativeUriHelper . createConnectionContainerURIForAtom ( message . getRecipientAtomURI ( ) ) ; invalidate ( connectionsListUri , webId ) ; } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } if ( type . causesAtomStateChange ( ) ) { invalidate ( message . getRecipientAtomURI ( ) , webId ) ; } return message ; } +","public WonMessage process ( final WonMessage message ) throws WonMessageProcessingException { WonMessageType type = message . getMessageType ( ) ; if ( type == WonMessageType . SUCCESS_RESPONSE ) { type = message . getRespondingToMessageType ( ) ; } URI webId = message . getRecipientAtomURI ( ) ; if ( type . isConnectionSpecificMessage ( ) ) { Optional < URI > connectionURI = WonLinkedDataUtils . getConnectionURIForIncomingMessage ( message , linkedDataSource ) ; if ( connectionURI . isPresent ( ) ) { try { logger . debug ( ""invalidating events list for atom "" + message . getRecipientAtomURI ( ) + "" for connection "" + connectionURI . get ( ) ) ; URI messageContainerUri = WonRelativeUriHelper . createMessageContainerURIForConnection ( connectionURI . get ( ) ) ; invalidate ( messageContainerUri , webId ) ; if ( type . causesConnectionStateChange ( ) ) { invalidate ( connectionURI . get ( ) , webId ) ; } } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } } if ( type . causesNewConnection ( ) ) { logger . debug ( ""invalidating connections list for atom "" + message . getRecipientAtomURI ( ) ) ; try { URI connectionsListUri = WonRelativeUriHelper . createConnectionContainerURIForAtom ( message . getRecipientAtomURI ( ) ) ; invalidate ( connectionsListUri , webId ) ; } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } if ( type . causesAtomStateChange ( ) ) { invalidate ( message . getRecipientAtomURI ( ) , webId ) ; } return message ; } +" +657,"public WonMessage process ( final WonMessage message ) throws WonMessageProcessingException { WonMessageType type = message . getMessageType ( ) ; if ( type == WonMessageType . SUCCESS_RESPONSE ) { type = message . getRespondingToMessageType ( ) ; } URI webId = message . getRecipientAtomURI ( ) ; if ( type . isConnectionSpecificMessage ( ) ) { Optional < URI > connectionURI = WonLinkedDataUtils . getConnectionURIForIncomingMessage ( message , linkedDataSource ) ; if ( connectionURI . isPresent ( ) ) { try { logger . debug ( ""invalidating events list for atom "" + message . getRecipientAtomURI ( ) + "" for connection "" + connectionURI . get ( ) ) ; URI messageContainerUri = WonRelativeUriHelper . createMessageContainerURIForConnection ( connectionURI . get ( ) ) ; invalidate ( messageContainerUri , webId ) ; if ( type . causesConnectionStateChange ( ) ) { invalidate ( connectionURI . get ( ) , webId ) ; } } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } } if ( type . causesNewConnection ( ) ) { try { URI connectionsListUri = WonRelativeUriHelper . createConnectionContainerURIForAtom ( message . getRecipientAtomURI ( ) ) ; invalidate ( connectionsListUri , webId ) ; } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } if ( type . causesAtomStateChange ( ) ) { invalidate ( message . getRecipientAtomURI ( ) , webId ) ; } return message ; } +","public WonMessage process ( final WonMessage message ) throws WonMessageProcessingException { WonMessageType type = message . getMessageType ( ) ; if ( type == WonMessageType . SUCCESS_RESPONSE ) { type = message . getRespondingToMessageType ( ) ; } URI webId = message . getRecipientAtomURI ( ) ; if ( type . isConnectionSpecificMessage ( ) ) { Optional < URI > connectionURI = WonLinkedDataUtils . getConnectionURIForIncomingMessage ( message , linkedDataSource ) ; if ( connectionURI . isPresent ( ) ) { try { logger . debug ( ""invalidating events list for atom "" + message . getRecipientAtomURI ( ) + "" for connection "" + connectionURI . get ( ) ) ; URI messageContainerUri = WonRelativeUriHelper . createMessageContainerURIForConnection ( connectionURI . get ( ) ) ; invalidate ( messageContainerUri , webId ) ; if ( type . causesConnectionStateChange ( ) ) { invalidate ( connectionURI . get ( ) , webId ) ; } } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } } if ( type . causesNewConnection ( ) ) { logger . debug ( ""invalidating connections list for atom "" + message . getRecipientAtomURI ( ) ) ; try { URI connectionsListUri = WonRelativeUriHelper . createConnectionContainerURIForAtom ( message . getRecipientAtomURI ( ) ) ; invalidate ( connectionsListUri , webId ) ; } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } if ( type . causesAtomStateChange ( ) ) { invalidate ( message . getRecipientAtomURI ( ) , webId ) ; } return message ; } +" +658,"public WonMessage process ( final WonMessage message ) throws WonMessageProcessingException { WonMessageType type = message . getMessageType ( ) ; if ( type == WonMessageType . SUCCESS_RESPONSE ) { type = message . getRespondingToMessageType ( ) ; } URI webId = message . getRecipientAtomURI ( ) ; if ( type . isConnectionSpecificMessage ( ) ) { Optional < URI > connectionURI = WonLinkedDataUtils . getConnectionURIForIncomingMessage ( message , linkedDataSource ) ; if ( connectionURI . isPresent ( ) ) { try { logger . debug ( ""invalidating events list for atom "" + message . getRecipientAtomURI ( ) + "" for connection "" + connectionURI . get ( ) ) ; URI messageContainerUri = WonRelativeUriHelper . createMessageContainerURIForConnection ( connectionURI . get ( ) ) ; invalidate ( messageContainerUri , webId ) ; if ( type . causesConnectionStateChange ( ) ) { invalidate ( connectionURI . get ( ) , webId ) ; } } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } } if ( type . causesNewConnection ( ) ) { logger . debug ( ""invalidating connections list for atom "" + message . getRecipientAtomURI ( ) ) ; try { URI connectionsListUri = WonRelativeUriHelper . createConnectionContainerURIForAtom ( message . getRecipientAtomURI ( ) ) ; invalidate ( connectionsListUri , webId ) ; } catch ( Exception e ) { } } if ( type . causesAtomStateChange ( ) ) { invalidate ( message . getRecipientAtomURI ( ) , webId ) ; } return message ; } +","public WonMessage process ( final WonMessage message ) throws WonMessageProcessingException { WonMessageType type = message . getMessageType ( ) ; if ( type == WonMessageType . SUCCESS_RESPONSE ) { type = message . getRespondingToMessageType ( ) ; } URI webId = message . getRecipientAtomURI ( ) ; if ( type . isConnectionSpecificMessage ( ) ) { Optional < URI > connectionURI = WonLinkedDataUtils . getConnectionURIForIncomingMessage ( message , linkedDataSource ) ; if ( connectionURI . isPresent ( ) ) { try { logger . debug ( ""invalidating events list for atom "" + message . getRecipientAtomURI ( ) + "" for connection "" + connectionURI . get ( ) ) ; URI messageContainerUri = WonRelativeUriHelper . createMessageContainerURIForConnection ( connectionURI . get ( ) ) ; invalidate ( messageContainerUri , webId ) ; if ( type . causesConnectionStateChange ( ) ) { invalidate ( connectionURI . get ( ) , webId ) ; } } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } } if ( type . causesNewConnection ( ) ) { logger . debug ( ""invalidating connections list for atom "" + message . getRecipientAtomURI ( ) ) ; try { URI connectionsListUri = WonRelativeUriHelper . createConnectionContainerURIForAtom ( message . getRecipientAtomURI ( ) ) ; invalidate ( connectionsListUri , webId ) ; } catch ( Exception e ) { logger . info ( ""Error occurred while trying to invalidate cache for {}: {}"" , message . getRecipientAtomURI ( ) , e . getMessage ( ) ) ; } } if ( type . causesAtomStateChange ( ) ) { invalidate ( message . getRecipientAtomURI ( ) , webId ) ; } return message ; } +" +659,"public void visitValue ( PValue value , TransformHierarchy . Node producer ) { if ( ! isStreaming ) { if ( value instanceof PCollection && ( ( PCollection ) value ) . isBounded ( ) == PCollection . IsBounded . UNBOUNDED ) { isStreaming = true ; } } } +","public void visitValue ( PValue value , TransformHierarchy . Node producer ) { if ( ! isStreaming ) { if ( value instanceof PCollection && ( ( PCollection ) value ) . isBounded ( ) == PCollection . IsBounded . UNBOUNDED ) { LOG . info ( ""Found unbounded PCollection {}. Switching to streaming execution."" , value . getName ( ) ) ; isStreaming = true ; } } } +" +660,"public synchronized void beginTransaction ( ) { if ( supportsOptimisticConcurrency ) { lockManager . lock ( ) ; LOG . info ( ""Transaction started"" ) ; } } +","public synchronized void beginTransaction ( ) { if ( supportsOptimisticConcurrency ) { LOG . info ( ""Transaction starting without a transaction owner"" ) ; lockManager . lock ( ) ; LOG . info ( ""Transaction started"" ) ; } } +" +661,"public synchronized void beginTransaction ( ) { if ( supportsOptimisticConcurrency ) { LOG . info ( ""Transaction starting without a transaction owner"" ) ; lockManager . lock ( ) ; } } +","public synchronized void beginTransaction ( ) { if ( supportsOptimisticConcurrency ) { LOG . info ( ""Transaction starting without a transaction owner"" ) ; lockManager . lock ( ) ; LOG . info ( ""Transaction started"" ) ; } } +" +662,"private TChannelMessage decodeCallFrame ( ChannelHandlerContext ctx , CallFrame frame ) { ArgScheme scheme ; if ( frame . getType ( ) == FrameType . CallRequest ) { scheme = ArgScheme . toScheme ( ( ( CallRequestFrame ) frame ) . getHeaders ( ) . get ( TransportHeaders . ARG_SCHEME_KEY ) ) ; } else { scheme = ArgScheme . toScheme ( ( ( CallResponseFrame ) frame ) . getHeaders ( ) . get ( TransportHeaders . ARG_SCHEME_KEY ) ) ; } if ( ! ArgScheme . isSupported ( scheme ) ) { if ( frame . getType ( ) == FrameType . CallRequest ) { sendError ( ErrorType . BadRequest , ""Arg Scheme not specified or unsupported"" , frame . getId ( ) , ctx ) ; } else { } return null ; } List < CallFrame > frames = new ArrayList < > ( ) ; frames . add ( frame ) ; frame . retain ( ) ; final TChannelMessage result ; boolean release = true ; try { if ( ! hasMore ( frame ) ) { result = MessageCodec . decodeCallFrames ( frames ) ; } else { callFrames . put ( frame . getId ( ) , frames ) ; result = null ; } release = false ; } finally { if ( release ) { frame . release ( ) ; } } if ( result != null ) { result . touch ( ""finished MessageDefragmenter.decodeCallFrame"" ) ; } return result ; } +","private TChannelMessage decodeCallFrame ( ChannelHandlerContext ctx , CallFrame frame ) { ArgScheme scheme ; if ( frame . getType ( ) == FrameType . CallRequest ) { scheme = ArgScheme . toScheme ( ( ( CallRequestFrame ) frame ) . getHeaders ( ) . get ( TransportHeaders . ARG_SCHEME_KEY ) ) ; } else { scheme = ArgScheme . toScheme ( ( ( CallResponseFrame ) frame ) . getHeaders ( ) . get ( TransportHeaders . ARG_SCHEME_KEY ) ) ; } if ( ! ArgScheme . isSupported ( scheme ) ) { if ( frame . getType ( ) == FrameType . CallRequest ) { sendError ( ErrorType . BadRequest , ""Arg Scheme not specified or unsupported"" , frame . getId ( ) , ctx ) ; } else { logger . error ( ""Arg Scheme not specified or unsupported: {}"" , scheme ) ; } return null ; } List < CallFrame > frames = new ArrayList < > ( ) ; frames . add ( frame ) ; frame . retain ( ) ; final TChannelMessage result ; boolean release = true ; try { if ( ! hasMore ( frame ) ) { result = MessageCodec . decodeCallFrames ( frames ) ; } else { callFrames . put ( frame . getId ( ) , frames ) ; result = null ; } release = false ; } finally { if ( release ) { frame . release ( ) ; } } if ( result != null ) { result . touch ( ""finished MessageDefragmenter.decodeCallFrame"" ) ; } return result ; } +" +663,"@ Test ( groups = { ""TestScaleService"" } ) public void testFindSwapDisk ( ) { DiskEntity swapDisk = scaleService . findSwapDisk ( nodeEntity ) ; Assert . assertTrue ( swapDisk . getName ( ) == SWAP_DISK_NAME , ""swap disk should be "" + SWAP_DISK_NAME ) ; } +","@ Test ( groups = { ""TestScaleService"" } ) public void testFindSwapDisk ( ) { logger . info ( ""test findSwapDisk"" ) ; DiskEntity swapDisk = scaleService . findSwapDisk ( nodeEntity ) ; Assert . assertTrue ( swapDisk . getName ( ) == SWAP_DISK_NAME , ""swap disk should be "" + SWAP_DISK_NAME ) ; } +" +664,"@ Benchmark public void fastLogger_infoLogStatement_lambda_benchmark_02 ( ) { } +","@ Benchmark public void fastLogger_infoLogStatement_lambda_benchmark_02 ( ) { fastLogger . info ( ( ) -> message ) ; } +" +665,"public StgMbBaustTxt merge ( StgMbBaustTxt detachedInstance ) { try { StgMbBaustTxt result = ( StgMbBaustTxt ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +","public StgMbBaustTxt merge ( StgMbBaustTxt detachedInstance ) { log . debug ( ""merging StgMbBaustTxt instance"" ) ; try { StgMbBaustTxt result = ( StgMbBaustTxt ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +" +666,"public StgMbBaustTxt merge ( StgMbBaustTxt detachedInstance ) { log . debug ( ""merging StgMbBaustTxt instance"" ) ; try { StgMbBaustTxt result = ( StgMbBaustTxt ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +","public StgMbBaustTxt merge ( StgMbBaustTxt detachedInstance ) { log . debug ( ""merging StgMbBaustTxt instance"" ) ; try { StgMbBaustTxt result = ( StgMbBaustTxt ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +" +667,"public StgMbBaustTxt merge ( StgMbBaustTxt detachedInstance ) { log . debug ( ""merging StgMbBaustTxt instance"" ) ; try { StgMbBaustTxt result = ( StgMbBaustTxt ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { throw re ; } } +","public StgMbBaustTxt merge ( StgMbBaustTxt detachedInstance ) { log . debug ( ""merging StgMbBaustTxt instance"" ) ; try { StgMbBaustTxt result = ( StgMbBaustTxt ) sessionFactory . getCurrentSession ( ) . merge ( detachedInstance ) ; log . debug ( ""merge successful"" ) ; return result ; } catch ( RuntimeException re ) { log . error ( ""merge failed"" , re ) ; throw re ; } } +" +668,"public static InetAddress getLocalHost ( Properties props ) { String addr = props . getProperty ( ""jcifs.smb.client.laddr"" ) ; if ( addr != null ) { try { return InetAddress . getByName ( addr ) ; } catch ( UnknownHostException uhe ) { } } return null ; } +","public static InetAddress getLocalHost ( Properties props ) { String addr = props . getProperty ( ""jcifs.smb.client.laddr"" ) ; if ( addr != null ) { try { return InetAddress . getByName ( addr ) ; } catch ( UnknownHostException uhe ) { log . error ( ""Ignoring jcifs.smb.client.laddr address: "" + addr , uhe ) ; } } return null ; } +" +669,"public boolean awaitTermination ( long timeout , @ NotNull TimeUnit unit ) throws InterruptedException { if ( ! executor . isShutdown ( ) ) throw new IllegalStateException ( String . format ( ""AsyncProcessor{%s} hasn't been shut down yet"" , name ) ) ; if ( executor . awaitTermination ( timeout , unit ) ) { return true ; } else { return false ; } } +","public boolean awaitTermination ( long timeout , @ NotNull TimeUnit unit ) throws InterruptedException { if ( ! executor . isShutdown ( ) ) throw new IllegalStateException ( String . format ( ""AsyncProcessor{%s} hasn't been shut down yet"" , name ) ) ; if ( executor . awaitTermination ( timeout , unit ) ) { LOGGER . trace ( ""AsyncProcessor{{}} is shut down"" , name ) ; return true ; } else { return false ; } } +" +670,"public void setGiverRelationship ( com . secretpal . model . SPPerson value ) { if ( _SPSecretPal . LOG . isDebugEnabled ( ) ) { } if ( er . extensions . eof . ERXGenericRecord . InverseRelationshipUpdater . updateInverseRelationships ( ) ) { setGiver ( value ) ; } else if ( value == null ) { com . secretpal . model . SPPerson oldValue = giver ( ) ; if ( oldValue != null ) { removeObjectFromBothSidesOfRelationshipWithKey ( oldValue , _SPSecretPal . GIVER_KEY ) ; } } else { addObjectToBothSidesOfRelationshipWithKey ( value , _SPSecretPal . GIVER_KEY ) ; } } +","public void setGiverRelationship ( com . secretpal . model . SPPerson value ) { if ( _SPSecretPal . LOG . isDebugEnabled ( ) ) { _SPSecretPal . LOG . debug ( ""updating giver from "" + giver ( ) + "" to "" + value ) ; } if ( er . extensions . eof . ERXGenericRecord . InverseRelationshipUpdater . updateInverseRelationships ( ) ) { setGiver ( value ) ; } else if ( value == null ) { com . secretpal . model . SPPerson oldValue = giver ( ) ; if ( oldValue != null ) { removeObjectFromBothSidesOfRelationshipWithKey ( oldValue , _SPSecretPal . GIVER_KEY ) ; } } else { addObjectToBothSidesOfRelationshipWithKey ( value , _SPSecretPal . GIVER_KEY ) ; } } +" +671,"protected void initSystemZNode ( ) { try { persist ( getMasterZNodeParentPath ( ) , """" ) ; persist ( getWorkerZNodeParentPath ( ) , """" ) ; persist ( getDeadZNodeParentPath ( ) , """" ) ; } catch ( Exception e ) { logger . error ( ""init system znode failed"" , e ) ; } } +","protected void initSystemZNode ( ) { try { persist ( getMasterZNodeParentPath ( ) , """" ) ; persist ( getWorkerZNodeParentPath ( ) , """" ) ; persist ( getDeadZNodeParentPath ( ) , """" ) ; logger . info ( ""initialize server nodes success."" ) ; } catch ( Exception e ) { logger . error ( ""init system znode failed"" , e ) ; } } +" +672,"protected void initSystemZNode ( ) { try { persist ( getMasterZNodeParentPath ( ) , """" ) ; persist ( getWorkerZNodeParentPath ( ) , """" ) ; persist ( getDeadZNodeParentPath ( ) , """" ) ; logger . info ( ""initialize server nodes success."" ) ; } catch ( Exception e ) { } } +","protected void initSystemZNode ( ) { try { persist ( getMasterZNodeParentPath ( ) , """" ) ; persist ( getWorkerZNodeParentPath ( ) , """" ) ; persist ( getDeadZNodeParentPath ( ) , """" ) ; logger . info ( ""initialize server nodes success."" ) ; } catch ( Exception e ) { logger . error ( ""init system znode failed"" , e ) ; } } +" +673,"public void onError ( Throwable throwable ) { try { channel . close ( ) ; } catch ( IOException e ) { } resultFuture . completeExceptionally ( throwable ) ; } +","public void onError ( Throwable throwable ) { try { channel . close ( ) ; } catch ( IOException e ) { LOGGER . log ( Level . WARNING , ""Received an onError"" , e ) ; } resultFuture . completeExceptionally ( throwable ) ; } +" +674,"public void refresh ( ) { try { TokenRefreshResponse refreshResponse = tokenRefresher . getNewToken ( refreshToken ) ; if ( refreshResponse == null ) { throw new InvalidOauthTokenException ( ) ; } apiToken = refreshResponse . getAccessToken ( ) ; tokenExpiration = new TokenExpiration ( refreshResponse . getExpiresIn ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } +","public void refresh ( ) { try { TokenRefreshResponse refreshResponse = tokenRefresher . getNewToken ( refreshToken ) ; if ( refreshResponse == null ) { throw new InvalidOauthTokenException ( ) ; } apiToken = refreshResponse . getAccessToken ( ) ; tokenExpiration = new TokenExpiration ( refreshResponse . getExpiresIn ( ) ) ; } catch ( IOException e ) { LOG . error ( ""Exception while attempting to refresh access token"" ) ; throw new RuntimeException ( e ) ; } } +" +675,"private void handleLinkButtonNotPressed ( LinkButtonException ex ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_ERROR , ""@text/offline.conf-error-press-pairing-button"" ) ; } +","private void handleLinkButtonNotPressed ( LinkButtonException ex ) { logger . debug ( ""Failed creating new user on Hue bridge: {}"" , ex . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_ERROR , ""@text/offline.conf-error-press-pairing-button"" ) ; } +" +676,"public static void tryDeleteFile ( @ Nullable File file ) { if ( file != null ) { if ( ! file . delete ( ) ) { file . deleteOnExit ( ) ; } } } +","public static void tryDeleteFile ( @ Nullable File file ) { if ( file != null ) { if ( ! file . delete ( ) ) { log . warn ( ""unable to remove file {}, marked for delete on exit"" , file . getAbsolutePath ( ) ) ; file . deleteOnExit ( ) ; } } } +" +677,"private static Value min ( Value value ) { if ( value . type == ValueType . LIST ) { Value min = null ; for ( Value v : value . getList ( ) ) { if ( min == null ) { min = v ; } else if ( min . type . canBeLong ( ) && v . type . canBeLong ( ) ) { min = min . getLong ( ) <= v . getLong ( ) ? min : v ; } else if ( min . type . canBeDouble ( ) && v . type . canBeDouble ( ) ) { min = min . getDouble ( ) <= v . getDouble ( ) ? min : v ; } else { throw new IllegalArgumentException ( ""min("" + value + "")"" ) ; } } if ( min == null ) { throw new IllegalArgumentException ( ""min of 0 values"" ) ; } return min ; } else { return value ; } } +","private static Value min ( Value value ) { if ( value . type == ValueType . LIST ) { Value min = null ; for ( Value v : value . getList ( ) ) { if ( min == null ) { min = v ; } else if ( min . type . canBeLong ( ) && v . type . canBeLong ( ) ) { min = min . getLong ( ) <= v . getLong ( ) ? min : v ; } else if ( min . type . canBeDouble ( ) && v . type . canBeDouble ( ) ) { min = min . getDouble ( ) <= v . getDouble ( ) ? min : v ; } else { throw new IllegalArgumentException ( ""min("" + value + "")"" ) ; } } if ( min == null ) { throw new IllegalArgumentException ( ""min of 0 values"" ) ; } return min ; } else { log . warn ( ""Computing min from single value"" ) ; return value ; } } +" +678,"public void processReadyCommand ( final Command cmd ) { final ReadyCommand ready = ( ReadyCommand ) cmd ; Boolean humanReadable = ready . getEnableHumanReadableSizes ( ) ; if ( humanReadable != null ) { NumbersUtil . enableHumanReadableSizes = humanReadable ; } if ( ready . getHostId ( ) != null ) { setId ( ready . getHostId ( ) ) ; } processManagementServerList ( ready . getMsHostList ( ) , ready . getLbAlgorithm ( ) , ready . getLbCheckInterval ( ) ) ; s_logger . info ( ""Ready command is processed for agent id = "" + getId ( ) ) ; } +","public void processReadyCommand ( final Command cmd ) { final ReadyCommand ready = ( ReadyCommand ) cmd ; Boolean humanReadable = ready . getEnableHumanReadableSizes ( ) ; if ( humanReadable != null ) { NumbersUtil . enableHumanReadableSizes = humanReadable ; } s_logger . info ( ""Processing agent ready command, agent id = "" + ready . getHostId ( ) ) ; if ( ready . getHostId ( ) != null ) { setId ( ready . getHostId ( ) ) ; } processManagementServerList ( ready . getMsHostList ( ) , ready . getLbAlgorithm ( ) , ready . getLbCheckInterval ( ) ) ; s_logger . info ( ""Ready command is processed for agent id = "" + getId ( ) ) ; } +" +679,"public void processReadyCommand ( final Command cmd ) { final ReadyCommand ready = ( ReadyCommand ) cmd ; Boolean humanReadable = ready . getEnableHumanReadableSizes ( ) ; if ( humanReadable != null ) { NumbersUtil . enableHumanReadableSizes = humanReadable ; } s_logger . info ( ""Processing agent ready command, agent id = "" + ready . getHostId ( ) ) ; if ( ready . getHostId ( ) != null ) { setId ( ready . getHostId ( ) ) ; } processManagementServerList ( ready . getMsHostList ( ) , ready . getLbAlgorithm ( ) , ready . getLbCheckInterval ( ) ) ; } +","public void processReadyCommand ( final Command cmd ) { final ReadyCommand ready = ( ReadyCommand ) cmd ; Boolean humanReadable = ready . getEnableHumanReadableSizes ( ) ; if ( humanReadable != null ) { NumbersUtil . enableHumanReadableSizes = humanReadable ; } s_logger . info ( ""Processing agent ready command, agent id = "" + ready . getHostId ( ) ) ; if ( ready . getHostId ( ) != null ) { setId ( ready . getHostId ( ) ) ; } processManagementServerList ( ready . getMsHostList ( ) , ready . getLbAlgorithm ( ) , ready . getLbCheckInterval ( ) ) ; s_logger . info ( ""Ready command is processed for agent id = "" + getId ( ) ) ; } +" +680,"public static com . liferay . layout . page . template . model . LayoutPageTemplateCollection addLayoutPageTemplateCollection ( HttpPrincipal httpPrincipal , long groupId , String name , String description , com . liferay . portal . kernel . service . ServiceContext serviceContext ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( LayoutPageTemplateCollectionServiceUtil . class , ""addLayoutPageTemplateCollection"" , _addLayoutPageTemplateCollectionParameterTypes0 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , groupId , name , description , serviceContext ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( com . liferay . layout . page . template . model . LayoutPageTemplateCollection ) returnObj ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { throw systemException ; } } +","public static com . liferay . layout . page . template . model . LayoutPageTemplateCollection addLayoutPageTemplateCollection ( HttpPrincipal httpPrincipal , long groupId , String name , String description , com . liferay . portal . kernel . service . ServiceContext serviceContext ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( LayoutPageTemplateCollectionServiceUtil . class , ""addLayoutPageTemplateCollection"" , _addLayoutPageTemplateCollectionParameterTypes0 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , groupId , name , description , serviceContext ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( com . liferay . layout . page . template . model . LayoutPageTemplateCollection ) returnObj ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { _log . error ( systemException , systemException ) ; throw systemException ; } } +" +681,"OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document jobDocument ) throws CatalogDBException { long tmpStartTime = startQuery ( ) ; String jobId = jobDocument . getString ( QueryParams . ID . key ( ) ) ; long jobUid = jobDocument . getLong ( PRIVATE_UID ) ; long studyUid = jobDocument . getLong ( PRIVATE_STUDY_UID ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new Status ( Status . DELETED ) , ""status"" ) , jobDocument ) ; Bson query = new Document ( ) . append ( QueryParams . ID . key ( ) , jobId ) . append ( PRIVATE_STUDY_UID , studyUid ) ; deletedJobCollection . update ( clientSession , query , new Document ( ""$set"" , jobDocument ) , new QueryOptions ( MongoDBCollection . UPSERT , true ) ) ; query = new Document ( ) . append ( PRIVATE_UID , jobUid ) . append ( PRIVATE_STUDY_UID , studyUid ) ; DataResult remove = jobCollection . remove ( clientSession , query , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""Job "" + jobId + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""Job "" + jobId + "" could not be deleted"" ) ; } logger . debug ( ""Job {} successfully deleted"" , jobId ) ; return endWrite ( tmpStartTime , 1 , 0 , 0 , 1 , null ) ; } +","OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document jobDocument ) throws CatalogDBException { long tmpStartTime = startQuery ( ) ; String jobId = jobDocument . getString ( QueryParams . ID . key ( ) ) ; long jobUid = jobDocument . getLong ( PRIVATE_UID ) ; long studyUid = jobDocument . getLong ( PRIVATE_STUDY_UID ) ; logger . debug ( ""Deleting job {} ({})"" , jobId , jobUid ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new Status ( Status . DELETED ) , ""status"" ) , jobDocument ) ; Bson query = new Document ( ) . append ( QueryParams . ID . key ( ) , jobId ) . append ( PRIVATE_STUDY_UID , studyUid ) ; deletedJobCollection . update ( clientSession , query , new Document ( ""$set"" , jobDocument ) , new QueryOptions ( MongoDBCollection . UPSERT , true ) ) ; query = new Document ( ) . append ( PRIVATE_UID , jobUid ) . append ( PRIVATE_STUDY_UID , studyUid ) ; DataResult remove = jobCollection . remove ( clientSession , query , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""Job "" + jobId + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""Job "" + jobId + "" could not be deleted"" ) ; } logger . debug ( ""Job {} successfully deleted"" , jobId ) ; return endWrite ( tmpStartTime , 1 , 0 , 0 , 1 , null ) ; } +" +682,"OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document jobDocument ) throws CatalogDBException { long tmpStartTime = startQuery ( ) ; String jobId = jobDocument . getString ( QueryParams . ID . key ( ) ) ; long jobUid = jobDocument . getLong ( PRIVATE_UID ) ; long studyUid = jobDocument . getLong ( PRIVATE_STUDY_UID ) ; logger . debug ( ""Deleting job {} ({})"" , jobId , jobUid ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new Status ( Status . DELETED ) , ""status"" ) , jobDocument ) ; Bson query = new Document ( ) . append ( QueryParams . ID . key ( ) , jobId ) . append ( PRIVATE_STUDY_UID , studyUid ) ; deletedJobCollection . update ( clientSession , query , new Document ( ""$set"" , jobDocument ) , new QueryOptions ( MongoDBCollection . UPSERT , true ) ) ; query = new Document ( ) . append ( PRIVATE_UID , jobUid ) . append ( PRIVATE_STUDY_UID , studyUid ) ; DataResult remove = jobCollection . remove ( clientSession , query , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""Job "" + jobId + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""Job "" + jobId + "" could not be deleted"" ) ; } return endWrite ( tmpStartTime , 1 , 0 , 0 , 1 , null ) ; } +","OpenCGAResult < Object > privateDelete ( ClientSession clientSession , Document jobDocument ) throws CatalogDBException { long tmpStartTime = startQuery ( ) ; String jobId = jobDocument . getString ( QueryParams . ID . key ( ) ) ; long jobUid = jobDocument . getLong ( PRIVATE_UID ) ; long studyUid = jobDocument . getLong ( PRIVATE_STUDY_UID ) ; logger . debug ( ""Deleting job {} ({})"" , jobId , jobUid ) ; nestedPut ( QueryParams . INTERNAL_STATUS . key ( ) , getMongoDBDocument ( new Status ( Status . DELETED ) , ""status"" ) , jobDocument ) ; Bson query = new Document ( ) . append ( QueryParams . ID . key ( ) , jobId ) . append ( PRIVATE_STUDY_UID , studyUid ) ; deletedJobCollection . update ( clientSession , query , new Document ( ""$set"" , jobDocument ) , new QueryOptions ( MongoDBCollection . UPSERT , true ) ) ; query = new Document ( ) . append ( PRIVATE_UID , jobUid ) . append ( PRIVATE_STUDY_UID , studyUid ) ; DataResult remove = jobCollection . remove ( clientSession , query , null ) ; if ( remove . getNumMatches ( ) == 0 ) { throw new CatalogDBException ( ""Job "" + jobId + "" not found"" ) ; } if ( remove . getNumDeleted ( ) == 0 ) { throw new CatalogDBException ( ""Job "" + jobId + "" could not be deleted"" ) ; } logger . debug ( ""Job {} successfully deleted"" , jobId ) ; return endWrite ( tmpStartTime , 1 , 0 , 0 , 1 , null ) ; } +" +683,"protected < T > T deserialize ( String content , Class < T > type ) { if ( content == null || content . isEmpty ( ) ) { return null ; } try { return marshaller . unmarshall ( content , type ) ; } catch ( MarshallingException e ) { throw new KieServicesException ( ""Error while deserializing data received from server!"" , e ) ; } } +","protected < T > T deserialize ( String content , Class < T > type ) { logger . debug ( ""About to deserialize content: \n '{}' \n into type: '{}'"" , content , type ) ; if ( content == null || content . isEmpty ( ) ) { return null ; } try { return marshaller . unmarshall ( content , type ) ; } catch ( MarshallingException e ) { throw new KieServicesException ( ""Error while deserializing data received from server!"" , e ) ; } } +" +684,"private void takeAndProcessTask ( ) { while ( true ) { try { } catch ( InterruptedException e ) { log . warn ( ""BuildCoordinator thread interrupted. Possibly the system is being shut down"" , e ) ; break ; } } } +","private void takeAndProcessTask ( ) { while ( true ) { try { buildQueue . take ( task -> { log . info ( ""Build task: "" + task + "", will pick up next task"" ) ; processBuildTask ( task ) ; } ) ; } catch ( InterruptedException e ) { log . warn ( ""BuildCoordinator thread interrupted. Possibly the system is being shut down"" , e ) ; break ; } } } +" +685,"private void takeAndProcessTask ( ) { while ( true ) { try { buildQueue . take ( task -> { processBuildTask ( task ) ; } ) ; } catch ( InterruptedException e ) { log . warn ( ""BuildCoordinator thread interrupted. Possibly the system is being shut down"" , e ) ; break ; } } } +","private void takeAndProcessTask ( ) { while ( true ) { try { buildQueue . take ( task -> { log . info ( ""Build task: "" + task + "", will pick up next task"" ) ; processBuildTask ( task ) ; } ) ; } catch ( InterruptedException e ) { log . warn ( ""BuildCoordinator thread interrupted. Possibly the system is being shut down"" , e ) ; break ; } } } +" +686,"private void takeAndProcessTask ( ) { while ( true ) { try { buildQueue . take ( task -> { log . info ( ""Build task: "" + task + "", will pick up next task"" ) ; processBuildTask ( task ) ; } ) ; } catch ( InterruptedException e ) { break ; } } } +","private void takeAndProcessTask ( ) { while ( true ) { try { buildQueue . take ( task -> { log . info ( ""Build task: "" + task + "", will pick up next task"" ) ; processBuildTask ( task ) ; } ) ; } catch ( InterruptedException e ) { log . warn ( ""BuildCoordinator thread interrupted. Possibly the system is being shut down"" , e ) ; break ; } } } +" +687,"public void doServeResource ( PortletWindow portletWindow , HttpServletRequest request , HttpServletResponse response ) throws PortletException , IOException , PortletContainerException { ensureInitialized ( ) ; debugWithName ( ""Resource request received for portlet: "" + portletWindow . getPortletDefinition ( ) . getPortletName ( ) ) ; PortletRequestContextService rcService = getContainerServices ( ) . getPortletRequestContextService ( ) ; PortletEnvironmentService envService = getContainerServices ( ) . getPortletEnvironmentService ( ) ; PortletInvokerService invoker = getContainerServices ( ) . getPortletInvokerService ( ) ; PortletResourceRequestContext requestContext = rcService . getPortletResourceRequestContext ( this , request , response , portletWindow ) ; PortletResourceResponseContext responseContext = rcService . getPortletResourceResponseContext ( this , request , response , portletWindow , requestContext ) ; responseContext . setPropsAllowed ( true ) ; ResourceRequest portletRequest = envService . createResourceRequest ( requestContext , responseContext ) ; ResourceResponse portletResponse = envService . createResourceResponse ( responseContext , requestContext . getCacheability ( ) ) ; requestContext . setResponse ( portletResponse ) ; FilterManager filterManager = filterInitialisation ( portletWindow , PortletRequest . RESOURCE_PHASE ) ; try { invoker . serveResource ( requestContext , portletRequest , portletResponse , filterManager ) ; } finally { if ( ! request . isAsyncSupported ( ) || ! request . isAsyncStarted ( ) ) { responseContext . close ( ) ; responseContext . release ( ) ; } else { } } debugWithName ( ""Portlet resource done for: "" + portletWindow . getPortletDefinition ( ) . getPortletName ( ) ) ; } +","public void doServeResource ( PortletWindow portletWindow , HttpServletRequest request , HttpServletResponse response ) throws PortletException , IOException , PortletContainerException { ensureInitialized ( ) ; debugWithName ( ""Resource request received for portlet: "" + portletWindow . getPortletDefinition ( ) . getPortletName ( ) ) ; PortletRequestContextService rcService = getContainerServices ( ) . getPortletRequestContextService ( ) ; PortletEnvironmentService envService = getContainerServices ( ) . getPortletEnvironmentService ( ) ; PortletInvokerService invoker = getContainerServices ( ) . getPortletInvokerService ( ) ; PortletResourceRequestContext requestContext = rcService . getPortletResourceRequestContext ( this , request , response , portletWindow ) ; PortletResourceResponseContext responseContext = rcService . getPortletResourceResponseContext ( this , request , response , portletWindow , requestContext ) ; responseContext . setPropsAllowed ( true ) ; ResourceRequest portletRequest = envService . createResourceRequest ( requestContext , responseContext ) ; ResourceResponse portletResponse = envService . createResourceResponse ( responseContext , requestContext . getCacheability ( ) ) ; requestContext . setResponse ( portletResponse ) ; FilterManager filterManager = filterInitialisation ( portletWindow , PortletRequest . RESOURCE_PHASE ) ; try { invoker . serveResource ( requestContext , portletRequest , portletResponse , filterManager ) ; } finally { if ( ! request . isAsyncSupported ( ) || ! request . isAsyncStarted ( ) ) { responseContext . close ( ) ; responseContext . release ( ) ; } else { LOG . debug ( ""Async started for resource request. responseContext not released."" ) ; } } debugWithName ( ""Portlet resource done for: "" + portletWindow . getPortletDefinition ( ) . getPortletName ( ) ) ; } +" +688,"public static com . liferay . push . notifications . model . PushNotificationsDeviceSoap deletePushNotificationsDevice ( String token ) throws RemoteException { try { com . liferay . push . notifications . model . PushNotificationsDevice returnValue = PushNotificationsDeviceServiceUtil . deletePushNotificationsDevice ( token ) ; return com . liferay . push . notifications . model . PushNotificationsDeviceSoap . toSoapModel ( returnValue ) ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } } +","public static com . liferay . push . notifications . model . PushNotificationsDeviceSoap deletePushNotificationsDevice ( String token ) throws RemoteException { try { com . liferay . push . notifications . model . PushNotificationsDevice returnValue = PushNotificationsDeviceServiceUtil . deletePushNotificationsDevice ( token ) ; return com . liferay . push . notifications . model . PushNotificationsDeviceSoap . toSoapModel ( returnValue ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } } +" +689,"@ Test public void testCounty ( ) throws Exception { String county = ""'clackamas'"" ; String query = GroupedNormalField . LARGE_COUNTIES . name ( ) + EQ_OP + county ; runTest ( query , query ) ; } +","@ Test public void testCounty ( ) throws Exception { log . info ( ""------ testCounty ------"" ) ; String county = ""'clackamas'"" ; String query = GroupedNormalField . LARGE_COUNTIES . name ( ) + EQ_OP + county ; runTest ( query , query ) ; } +" +690,"public String toString ( String tabs , double thresholdEntropy , double thresholdP , int thresholdCount ) { if ( getTotalCount ( ) == 0 ) return """" ; StringBuilder sb = new StringBuilder ( ) ; double p [ ] = p ( ) ; for ( int idx = 0 ; idx < 4 ; idx ++ ) { char base = BASES [ idx ] ; AcgtTree n = nodes [ idx ] ; if ( n != null ) { sb . append ( String . format ( ""%s%s%s: %d\te:%4.3f\tp:%4.2f\n"" , tabs , name , base , counts [ idx ] , n . entropy ( ) , p [ idx ] ) ) ; if ( ( ( n . entropy ( ) <= thresholdEntropy ) || ( p [ idx ] >= thresholdP ) ) && ( counts [ idx ] >= thresholdCount ) ) { sb . append ( n . toString ( tabs + ""\t"" , thresholdEntropy , thresholdP , thresholdCount ) ) ; } } } return sb . toString ( ) ; } +","public String toString ( String tabs , double thresholdEntropy , double thresholdP , int thresholdCount ) { if ( getTotalCount ( ) == 0 ) return """" ; StringBuilder sb = new StringBuilder ( ) ; double p [ ] = p ( ) ; for ( int idx = 0 ; idx < 4 ; idx ++ ) { char base = BASES [ idx ] ; AcgtTree n = nodes [ idx ] ; if ( n != null ) { sb . append ( String . format ( ""%s%s%s: %d\te:%4.3f\tp:%4.2f\n"" , tabs , name , base , counts [ idx ] , n . entropy ( ) , p [ idx ] ) ) ; if ( ( ( n . entropy ( ) <= thresholdEntropy ) || ( p [ idx ] >= thresholdP ) ) && ( counts [ idx ] >= thresholdCount ) ) { Log . debug ( ""Name:"" + n . name + ""\tIdx:"" + + idx + ""\tEntropy: "" + n . entropy ( ) + ""\tP:"" + p [ idx ] + ""\tCount:"" + counts [ idx ] ) ; sb . append ( n . toString ( tabs + ""\t"" , thresholdEntropy , thresholdP , thresholdCount ) ) ; } } } return sb . toString ( ) ; } +" +691,"public boolean verify ( byte [ ] document , Signature signature , Credential credential ) { Objects . requireNonNull ( document ) ; SigData sigData ; SignatureCredential signatureCredential ; signatureCredential = getSignatureCredential ( credential ) ; sigData = getSigData ( signature ) ; if ( log . isTraceEnabled ( ) ) { } sigData . setVerified ( Crypto . verify ( sigData . bytes ( ) , document , signatureCredential . getKey ( ) ) ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( ""#MULTI_SIG# verify isVerified={} signature={}"" , sigData . isVerified ( ) , sigData . getHexString ( ) ) ; } return sigData . isVerified ( ) ; } +","public boolean verify ( byte [ ] document , Signature signature , Credential credential ) { Objects . requireNonNull ( document ) ; SigData sigData ; SignatureCredential signatureCredential ; signatureCredential = getSignatureCredential ( credential ) ; sigData = getSigData ( signature ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( ""#MULTI_SIG# verify signature={} publicKey={} document={}"" , Convert . toHexString ( sigData . bytes ( ) ) , Convert . toHexString ( signatureCredential . getKey ( ) ) , Convert . toHexString ( document ) ) ; } sigData . setVerified ( Crypto . verify ( sigData . bytes ( ) , document , signatureCredential . getKey ( ) ) ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( ""#MULTI_SIG# verify isVerified={} signature={}"" , sigData . isVerified ( ) , sigData . getHexString ( ) ) ; } return sigData . isVerified ( ) ; } +" +692,"public boolean verify ( byte [ ] document , Signature signature , Credential credential ) { Objects . requireNonNull ( document ) ; SigData sigData ; SignatureCredential signatureCredential ; signatureCredential = getSignatureCredential ( credential ) ; sigData = getSigData ( signature ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( ""#MULTI_SIG# verify signature={} publicKey={} document={}"" , Convert . toHexString ( sigData . bytes ( ) ) , Convert . toHexString ( signatureCredential . getKey ( ) ) , Convert . toHexString ( document ) ) ; } sigData . setVerified ( Crypto . verify ( sigData . bytes ( ) , document , signatureCredential . getKey ( ) ) ) ; if ( log . isTraceEnabled ( ) ) { } return sigData . isVerified ( ) ; } +","public boolean verify ( byte [ ] document , Signature signature , Credential credential ) { Objects . requireNonNull ( document ) ; SigData sigData ; SignatureCredential signatureCredential ; signatureCredential = getSignatureCredential ( credential ) ; sigData = getSigData ( signature ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( ""#MULTI_SIG# verify signature={} publicKey={} document={}"" , Convert . toHexString ( sigData . bytes ( ) ) , Convert . toHexString ( signatureCredential . getKey ( ) ) , Convert . toHexString ( document ) ) ; } sigData . setVerified ( Crypto . verify ( sigData . bytes ( ) , document , signatureCredential . getKey ( ) ) ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( ""#MULTI_SIG# verify isVerified={} signature={}"" , sigData . isVerified ( ) , sigData . getHexString ( ) ) ; } return sigData . isVerified ( ) ; } +" +693,"public void start ( ) { try { Class . forName ( this . dbConsumerConfiguration . getDriver ( ) ) ; connection = DriverManager . getConnection ( this . dbConsumerConfiguration . getUrl ( ) , this . dbConsumerConfiguration . getUsername ( ) , this . dbConsumerConfiguration . getPassword ( ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } +","public void start ( ) { try { Class . forName ( this . dbConsumerConfiguration . getDriver ( ) ) ; connection = DriverManager . getConnection ( this . dbConsumerConfiguration . getUrl ( ) , this . dbConsumerConfiguration . getUsername ( ) , this . dbConsumerConfiguration . getPassword ( ) ) ; logger . info ( "" - Started embedded managed component [DbMessageProvider]"" ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } } +" +694,"public boolean isValidRequestState ( final WebContext context ) { final String state = context . getRequestParameter ( AuthorizeRequestParam . STATE ) ; final Object sessionState = context . getSessionAttribute ( getName ( ) + SESSION_STATE_PARAMETER ) ; logger . debug ( ""Session context state: '{}'"" , sessionState ) ; final boolean emptySessionState = StringHelper . isEmptyString ( sessionState ) ; if ( emptySessionState ) { return false ; } final boolean result = StringHelper . equals ( state , ( String ) sessionState ) ; logger . debug ( ""Is valid state: '{}'"" , result ) ; return result ; } +","public boolean isValidRequestState ( final WebContext context ) { final String state = context . getRequestParameter ( AuthorizeRequestParam . STATE ) ; logger . debug ( ""oxAuth request state: '{}'"" , state ) ; final Object sessionState = context . getSessionAttribute ( getName ( ) + SESSION_STATE_PARAMETER ) ; logger . debug ( ""Session context state: '{}'"" , sessionState ) ; final boolean emptySessionState = StringHelper . isEmptyString ( sessionState ) ; if ( emptySessionState ) { return false ; } final boolean result = StringHelper . equals ( state , ( String ) sessionState ) ; logger . debug ( ""Is valid state: '{}'"" , result ) ; return result ; } +" +695,"public boolean isValidRequestState ( final WebContext context ) { final String state = context . getRequestParameter ( AuthorizeRequestParam . STATE ) ; logger . debug ( ""oxAuth request state: '{}'"" , state ) ; final Object sessionState = context . getSessionAttribute ( getName ( ) + SESSION_STATE_PARAMETER ) ; final boolean emptySessionState = StringHelper . isEmptyString ( sessionState ) ; if ( emptySessionState ) { return false ; } final boolean result = StringHelper . equals ( state , ( String ) sessionState ) ; logger . debug ( ""Is valid state: '{}'"" , result ) ; return result ; } +","public boolean isValidRequestState ( final WebContext context ) { final String state = context . getRequestParameter ( AuthorizeRequestParam . STATE ) ; logger . debug ( ""oxAuth request state: '{}'"" , state ) ; final Object sessionState = context . getSessionAttribute ( getName ( ) + SESSION_STATE_PARAMETER ) ; logger . debug ( ""Session context state: '{}'"" , sessionState ) ; final boolean emptySessionState = StringHelper . isEmptyString ( sessionState ) ; if ( emptySessionState ) { return false ; } final boolean result = StringHelper . equals ( state , ( String ) sessionState ) ; logger . debug ( ""Is valid state: '{}'"" , result ) ; return result ; } +" +696,"public boolean isValidRequestState ( final WebContext context ) { final String state = context . getRequestParameter ( AuthorizeRequestParam . STATE ) ; logger . debug ( ""oxAuth request state: '{}'"" , state ) ; final Object sessionState = context . getSessionAttribute ( getName ( ) + SESSION_STATE_PARAMETER ) ; logger . debug ( ""Session context state: '{}'"" , sessionState ) ; final boolean emptySessionState = StringHelper . isEmptyString ( sessionState ) ; if ( emptySessionState ) { return false ; } final boolean result = StringHelper . equals ( state , ( String ) sessionState ) ; return result ; } +","public boolean isValidRequestState ( final WebContext context ) { final String state = context . getRequestParameter ( AuthorizeRequestParam . STATE ) ; logger . debug ( ""oxAuth request state: '{}'"" , state ) ; final Object sessionState = context . getSessionAttribute ( getName ( ) + SESSION_STATE_PARAMETER ) ; logger . debug ( ""Session context state: '{}'"" , sessionState ) ; final boolean emptySessionState = StringHelper . isEmptyString ( sessionState ) ; if ( emptySessionState ) { return false ; } final boolean result = StringHelper . equals ( state , ( String ) sessionState ) ; logger . debug ( ""Is valid state: '{}'"" , result ) ; return result ; } +" +697,"private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +","private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +" +698,"private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +","private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +" +699,"private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +","private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +" +700,"private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +","private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +" +701,"private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +","private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +" +702,"private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +","private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +" +703,"private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { } } chain . daCandidates = candidates ; } +","private void calculateBestImplementationDACandidates ( final AbstractNodeTemplate nodeTemplate , final List < AbstractNodeTypeImplementation > impls , final List < IPlanBuilderPrePhaseDAPlugin < ? > > plugins , final List < AbstractNodeTemplate > infraNodes , final OperationChain chain ) { final List < DANodeTypeImplCandidate > candidates = new ArrayList < > ( ) ; for ( final AbstractNodeTypeImplementation impl : impls ) { LOG . debug ( ""Checking DAs of NodeTypeImpl {} and NodeTemplate {}"" , impl . getName ( ) , nodeTemplate . getId ( ) ) ; final DANodeTypeImplCandidate candidate = new DANodeTypeImplCandidate ( nodeTemplate , impl ) ; final List < AbstractDeploymentArtifact > effectiveDAs = calculateEffectiveDAs ( nodeTemplate , impl ) ; for ( final AbstractDeploymentArtifact da : effectiveDAs ) { LOG . debug ( ""Checking whether DA {} can be deployed"" , da . getName ( ) ) ; for ( final AbstractNodeTemplate infraNode : infraNodes ) { LOG . debug ( ""Checking if DA {} can be deployed on InfraNode {}"" , da . getName ( ) , infraNode . getId ( ) ) ; for ( final IPlanBuilderPrePhaseDAPlugin plugin : plugins ) { LOG . debug ( ""Checking with Plugin {}"" , plugin . getID ( ) ) ; if ( plugin . canHandle ( da , infraNode . getType ( ) ) ) { LOG . debug ( ""Adding Plugin, can handle DA on InfraNode"" ) ; candidate . add ( da , infraNode , plugin ) ; } } } } if ( candidate . isValid ( ) ) { LOG . debug ( ""Generated Candidate was valid, adding to all Candidates"" ) ; candidates . add ( candidate ) ; } else { LOG . debug ( ""Generated Candidate was invalid, don't add to all Candidates"" ) ; } } chain . daCandidates = candidates ; } +" +704,"public static Integer getNullLabelID ( int attributeID ) { String sql = ""select nominalLabelID from nominal_label where nominalAttributeID="" + attributeID + "" and nominalLabelCode='null'"" ; Connection conn = null ; PreparedStatement query = null ; ResultSet result = null ; try { conn = getMySqlConnection ( ) ; query = conn . prepareStatement ( sql ) ; result = query . executeQuery ( ) ; if ( result . next ( ) ) { return result . getInt ( 1 ) ; } } catch ( SQLException ex ) { } finally { close ( result ) ; close ( query ) ; close ( conn ) ; } return null ; } +","public static Integer getNullLabelID ( int attributeID ) { String sql = ""select nominalLabelID from nominal_label where nominalAttributeID="" + attributeID + "" and nominalLabelCode='null'"" ; Connection conn = null ; PreparedStatement query = null ; ResultSet result = null ; try { conn = getMySqlConnection ( ) ; query = conn . prepareStatement ( sql ) ; result = query . executeQuery ( ) ; if ( result . next ( ) ) { return result . getInt ( 1 ) ; } } catch ( SQLException ex ) { logger . error ( ""Error in executing SQL statement: "" + sql , ex ) ; } finally { close ( result ) ; close ( query ) ; close ( conn ) ; } return null ; } +" +705,"public Void call ( Object arg ) throws Exception { return null ; } +","public Void call ( Object arg ) throws Exception { _logger . info ( ""Shutdown of asynchbase client complete."" ) ; return null ; } +" +706,"private void jsonError ( String message ) { setResponse ( ""{\""error\"":\"""" + message + ""\""}"" ) ; } +","private void jsonError ( String message ) { setResponse ( ""{\""error\"":\"""" + message + ""\""}"" ) ; LOG . debug ( ""Error message return from RPC call: {}"" , message ) ; } +" +707,"private CredentialsProvider determineCredentialsProvider ( ) { final CredentialsProvider provider = innerBuilder . getCredentialsProvider ( ) ; if ( provider == null ) { ensureNotNull ( ""cluster id"" , clusterId ) ; ensureNotNull ( ""client id"" , clientId ) ; ensureNotNull ( ""client secret"" , clientSecret ) ; final OAuthCredentialsProviderBuilder builder = new OAuthCredentialsProviderBuilder ( ) ; return builder . audience ( String . format ( ""%s.%s"" , clusterId , BASE_ADDRESS ) ) . clientId ( clientId ) . clientSecret ( clientSecret ) . authorizationServerUrl ( BASE_AUTH_URL ) . build ( ) ; } else { return provider ; } } +","private CredentialsProvider determineCredentialsProvider ( ) { final CredentialsProvider provider = innerBuilder . getCredentialsProvider ( ) ; if ( provider == null ) { ensureNotNull ( ""cluster id"" , clusterId ) ; ensureNotNull ( ""client id"" , clientId ) ; ensureNotNull ( ""client secret"" , clientSecret ) ; final OAuthCredentialsProviderBuilder builder = new OAuthCredentialsProviderBuilder ( ) ; return builder . audience ( String . format ( ""%s.%s"" , clusterId , BASE_ADDRESS ) ) . clientId ( clientId ) . clientSecret ( clientSecret ) . authorizationServerUrl ( BASE_AUTH_URL ) . build ( ) ; } else { Loggers . LOGGER . debug ( ""Expected to use 'cluster id', 'client id' and 'client secret' to set credentials provider in the client cloud builder, "" + ""but overwriting with explicitly defined credentials provider."" ) ; return provider ; } } +" +708,"public Maybe < Object > getConfigRaw ( ConfigKey < ? > key , boolean includeInherited ) { if ( ownConfig . containsKey ( key ) ) { return Maybe . of ( ownConfig . get ( key ) ) ; } for ( String deprecatedName : key . getDeprecatedNames ( ) ) { ConfigKey < ? > deprecatedKey = ConfigKeys . newConfigKeyRenamed ( deprecatedName , key ) ; if ( ownConfig . containsKey ( deprecatedKey ) ) { return Maybe . of ( ownConfig . get ( deprecatedKey ) ) ; } } if ( key instanceof AbstractStructuredConfigKey ) { Object result = ( ( AbstractStructuredConfigKey ) key ) . rawValue ( ownConfig ) ; if ( result instanceof Iterable ) { if ( ! ( ( Iterable ) result ) . iterator ( ) . hasNext ( ) ) return Maybe . absent ( ""No value for structured collection key "" + key ) ; } else if ( result instanceof Map ) { if ( ( ( Map ) result ) . isEmpty ( ) ) return Maybe . absent ( ""No value for structured map key "" + key ) ; } else { LOG . warn ( ""Unsupported structured config key "" + key + ""; may return default empty value if unset"" ) ; } return Maybe . ofDisallowingNull ( result ) ; } if ( ! includeInherited || getParent ( ) == null ) return Maybe . absent ( ) ; return getParentInternal ( ) . config ( ) . getInternalConfigMap ( ) . getConfigRaw ( key , includeInherited ) ; } +","public Maybe < Object > getConfigRaw ( ConfigKey < ? > key , boolean includeInherited ) { if ( ownConfig . containsKey ( key ) ) { return Maybe . of ( ownConfig . get ( key ) ) ; } for ( String deprecatedName : key . getDeprecatedNames ( ) ) { ConfigKey < ? > deprecatedKey = ConfigKeys . newConfigKeyRenamed ( deprecatedName , key ) ; if ( ownConfig . containsKey ( deprecatedKey ) ) { LOG . warn ( ""Retrieving value with deprecated config key name '"" + deprecatedName + ""' for key "" + key ) ; return Maybe . of ( ownConfig . get ( deprecatedKey ) ) ; } } if ( key instanceof AbstractStructuredConfigKey ) { Object result = ( ( AbstractStructuredConfigKey ) key ) . rawValue ( ownConfig ) ; if ( result instanceof Iterable ) { if ( ! ( ( Iterable ) result ) . iterator ( ) . hasNext ( ) ) return Maybe . absent ( ""No value for structured collection key "" + key ) ; } else if ( result instanceof Map ) { if ( ( ( Map ) result ) . isEmpty ( ) ) return Maybe . absent ( ""No value for structured map key "" + key ) ; } else { LOG . warn ( ""Unsupported structured config key "" + key + ""; may return default empty value if unset"" ) ; } return Maybe . ofDisallowingNull ( result ) ; } if ( ! includeInherited || getParent ( ) == null ) return Maybe . absent ( ) ; return getParentInternal ( ) . config ( ) . getInternalConfigMap ( ) . getConfigRaw ( key , includeInherited ) ; } +" +709,"public Maybe < Object > getConfigRaw ( ConfigKey < ? > key , boolean includeInherited ) { if ( ownConfig . containsKey ( key ) ) { return Maybe . of ( ownConfig . get ( key ) ) ; } for ( String deprecatedName : key . getDeprecatedNames ( ) ) { ConfigKey < ? > deprecatedKey = ConfigKeys . newConfigKeyRenamed ( deprecatedName , key ) ; if ( ownConfig . containsKey ( deprecatedKey ) ) { LOG . warn ( ""Retrieving value with deprecated config key name '"" + deprecatedName + ""' for key "" + key ) ; return Maybe . of ( ownConfig . get ( deprecatedKey ) ) ; } } if ( key instanceof AbstractStructuredConfigKey ) { Object result = ( ( AbstractStructuredConfigKey ) key ) . rawValue ( ownConfig ) ; if ( result instanceof Iterable ) { if ( ! ( ( Iterable ) result ) . iterator ( ) . hasNext ( ) ) return Maybe . absent ( ""No value for structured collection key "" + key ) ; } else if ( result instanceof Map ) { if ( ( ( Map ) result ) . isEmpty ( ) ) return Maybe . absent ( ""No value for structured map key "" + key ) ; } else { } return Maybe . ofDisallowingNull ( result ) ; } if ( ! includeInherited || getParent ( ) == null ) return Maybe . absent ( ) ; return getParentInternal ( ) . config ( ) . getInternalConfigMap ( ) . getConfigRaw ( key , includeInherited ) ; } +","public Maybe < Object > getConfigRaw ( ConfigKey < ? > key , boolean includeInherited ) { if ( ownConfig . containsKey ( key ) ) { return Maybe . of ( ownConfig . get ( key ) ) ; } for ( String deprecatedName : key . getDeprecatedNames ( ) ) { ConfigKey < ? > deprecatedKey = ConfigKeys . newConfigKeyRenamed ( deprecatedName , key ) ; if ( ownConfig . containsKey ( deprecatedKey ) ) { LOG . warn ( ""Retrieving value with deprecated config key name '"" + deprecatedName + ""' for key "" + key ) ; return Maybe . of ( ownConfig . get ( deprecatedKey ) ) ; } } if ( key instanceof AbstractStructuredConfigKey ) { Object result = ( ( AbstractStructuredConfigKey ) key ) . rawValue ( ownConfig ) ; if ( result instanceof Iterable ) { if ( ! ( ( Iterable ) result ) . iterator ( ) . hasNext ( ) ) return Maybe . absent ( ""No value for structured collection key "" + key ) ; } else if ( result instanceof Map ) { if ( ( ( Map ) result ) . isEmpty ( ) ) return Maybe . absent ( ""No value for structured map key "" + key ) ; } else { LOG . warn ( ""Unsupported structured config key "" + key + ""; may return default empty value if unset"" ) ; } return Maybe . ofDisallowingNull ( result ) ; } if ( ! includeInherited || getParent ( ) == null ) return Maybe . absent ( ) ; return getParentInternal ( ) . config ( ) . getInternalConfigMap ( ) . getConfigRaw ( key , includeInherited ) ; } +" +710,"public LayoutPageTemplateStructure remove ( Serializable primaryKey ) throws NoSuchPageTemplateStructureException { Session session = null ; try { session = openSession ( ) ; LayoutPageTemplateStructure layoutPageTemplateStructure = ( LayoutPageTemplateStructure ) session . get ( LayoutPageTemplateStructureImpl . class , primaryKey ) ; if ( layoutPageTemplateStructure == null ) { if ( _log . isDebugEnabled ( ) ) { } throw new NoSuchPageTemplateStructureException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( layoutPageTemplateStructure ) ; } catch ( NoSuchPageTemplateStructureException noSuchEntityException ) { throw noSuchEntityException ; } catch ( Exception exception ) { throw processException ( exception ) ; } finally { closeSession ( session ) ; } } +","public LayoutPageTemplateStructure remove ( Serializable primaryKey ) throws NoSuchPageTemplateStructureException { Session session = null ; try { session = openSession ( ) ; LayoutPageTemplateStructure layoutPageTemplateStructure = ( LayoutPageTemplateStructure ) session . get ( LayoutPageTemplateStructureImpl . class , primaryKey ) ; if ( layoutPageTemplateStructure == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchPageTemplateStructureException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( layoutPageTemplateStructure ) ; } catch ( NoSuchPageTemplateStructureException noSuchEntityException ) { throw noSuchEntityException ; } catch ( Exception exception ) { throw processException ( exception ) ; } finally { closeSession ( session ) ; } } +" +711,"public static PubsubSubscription fromPath ( String path ) { if ( path . startsWith ( SUBSCRIPTION_RANDOM_TEST_PREFIX ) || path . startsWith ( SUBSCRIPTION_STARTING_SIGNAL ) ) { return new PubsubSubscription ( Type . FAKE , """" , path ) ; } String projectName , subscriptionName ; Matcher v1beta1Match = V1BETA1_SUBSCRIPTION_REGEXP . matcher ( path ) ; if ( v1beta1Match . matches ( ) ) { projectName = v1beta1Match . group ( 1 ) ; subscriptionName = v1beta1Match . group ( 2 ) ; } else { Matcher match = SUBSCRIPTION_REGEXP . matcher ( path ) ; if ( ! match . matches ( ) ) { throw new IllegalArgumentException ( ""Pubsub subscription is not in "" + ""projects//subscriptions/ format: "" + path ) ; } projectName = match . group ( 1 ) ; subscriptionName = match . group ( 2 ) ; } validateProjectName ( projectName ) ; validatePubsubName ( subscriptionName ) ; return new PubsubSubscription ( Type . NORMAL , projectName , subscriptionName ) ; } +","public static PubsubSubscription fromPath ( String path ) { if ( path . startsWith ( SUBSCRIPTION_RANDOM_TEST_PREFIX ) || path . startsWith ( SUBSCRIPTION_STARTING_SIGNAL ) ) { return new PubsubSubscription ( Type . FAKE , """" , path ) ; } String projectName , subscriptionName ; Matcher v1beta1Match = V1BETA1_SUBSCRIPTION_REGEXP . matcher ( path ) ; if ( v1beta1Match . matches ( ) ) { LOG . warn ( ""Saw subscription in v1beta1 format. Subscriptions should be in the format "" + ""projects//subscriptions/"" ) ; projectName = v1beta1Match . group ( 1 ) ; subscriptionName = v1beta1Match . group ( 2 ) ; } else { Matcher match = SUBSCRIPTION_REGEXP . matcher ( path ) ; if ( ! match . matches ( ) ) { throw new IllegalArgumentException ( ""Pubsub subscription is not in "" + ""projects//subscriptions/ format: "" + path ) ; } projectName = match . group ( 1 ) ; subscriptionName = match . group ( 2 ) ; } validateProjectName ( projectName ) ; validatePubsubName ( subscriptionName ) ; return new PubsubSubscription ( Type . NORMAL , projectName , subscriptionName ) ; } +" +712,"public void shutdown ( ) { for ( int i = 0 ; i < numberOfPartition ; i ++ ) { executors . get ( i ) . shutdown ( ) ; } try { for ( int i = 0 ; i < numberOfPartition ; i ++ ) { executors . get ( i ) . awaitTermination ( threadTerminationMaxTime , TimeUnit . NANOSECONDS ) ; } } catch ( InterruptedException e ) { } } +","public void shutdown ( ) { for ( int i = 0 ; i < numberOfPartition ; i ++ ) { executors . get ( i ) . shutdown ( ) ; } try { for ( int i = 0 ; i < numberOfPartition ; i ++ ) { executors . get ( i ) . awaitTermination ( threadTerminationMaxTime , TimeUnit . NANOSECONDS ) ; } } catch ( InterruptedException e ) { LOGGER . error ( ""Error while stopping bootstrap consumer"" , e ) ; } } +" +713,"public void opening ( String table , String shard , BlurIndex index ) { Map < String , String > properties = _configuration . getProperties ( ) ; for ( Entry < String , String > entry : properties . entrySet ( ) ) { if ( isFilterAlias ( entry . getKey ( ) ) ) { String value = entry . getValue ( ) ; if ( value == null || value . isEmpty ( ) ) { continue ; } String filterAlias = getFilterAlias ( entry . getKey ( ) ) ; String filterQuery = value ; Map < String , String > map = getThisTablesMap ( table ) ; map . put ( filterAlias , filterQuery ) ; } } } +","public void opening ( String table , String shard , BlurIndex index ) { Map < String , String > properties = _configuration . getProperties ( ) ; for ( Entry < String , String > entry : properties . entrySet ( ) ) { if ( isFilterAlias ( entry . getKey ( ) ) ) { String value = entry . getValue ( ) ; if ( value == null || value . isEmpty ( ) ) { continue ; } String filterAlias = getFilterAlias ( entry . getKey ( ) ) ; String filterQuery = value ; Map < String , String > map = getThisTablesMap ( table ) ; LOG . info ( ""Loading filter alias [{0}] with query [{1}] for table [{2}]"" , filterAlias , filterQuery , table ) ; map . put ( filterAlias , filterQuery ) ; } } } +" +714,"private MongoClientURI parseURL ( String url , Properties defaults ) { if ( url == null ) { return null ; } if ( ! StringUtils . startsWithIgnoreCase ( url , PREFIX ) ) { return null ; } if ( defaults . getProperty ( ""user"" ) != null && defaults . getProperty ( ""password"" ) != null ) { url = url . replace ( PREFIX , PREFIX + defaults . getProperty ( ""user"" ) + "":"" + defaults . getProperty ( ""password"" ) + ""@"" ) ; } String options = MongoClientPropertyHelper . formatProperties ( defaults ) ; LOGGER . debug ( ""the options:{}"" , options ) ; try { return new MongoClientURI ( options == null ? url : url + ""?"" + options ) ; } catch ( Exception e ) { LOGGER . error ( ""parseURLError"" , e ) ; return null ; } } +","private MongoClientURI parseURL ( String url , Properties defaults ) { if ( url == null ) { return null ; } if ( ! StringUtils . startsWithIgnoreCase ( url , PREFIX ) ) { return null ; } if ( defaults . getProperty ( ""user"" ) != null && defaults . getProperty ( ""password"" ) != null ) { url = url . replace ( PREFIX , PREFIX + defaults . getProperty ( ""user"" ) + "":"" + defaults . getProperty ( ""password"" ) + ""@"" ) ; } LOGGER . info ( ""Mongodb url:"" + url ) ; String options = MongoClientPropertyHelper . formatProperties ( defaults ) ; LOGGER . debug ( ""the options:{}"" , options ) ; try { return new MongoClientURI ( options == null ? url : url + ""?"" + options ) ; } catch ( Exception e ) { LOGGER . error ( ""parseURLError"" , e ) ; return null ; } } +" +715,"private MongoClientURI parseURL ( String url , Properties defaults ) { if ( url == null ) { return null ; } if ( ! StringUtils . startsWithIgnoreCase ( url , PREFIX ) ) { return null ; } if ( defaults . getProperty ( ""user"" ) != null && defaults . getProperty ( ""password"" ) != null ) { url = url . replace ( PREFIX , PREFIX + defaults . getProperty ( ""user"" ) + "":"" + defaults . getProperty ( ""password"" ) + ""@"" ) ; } LOGGER . info ( ""Mongodb url:"" + url ) ; String options = MongoClientPropertyHelper . formatProperties ( defaults ) ; try { return new MongoClientURI ( options == null ? url : url + ""?"" + options ) ; } catch ( Exception e ) { LOGGER . error ( ""parseURLError"" , e ) ; return null ; } } +","private MongoClientURI parseURL ( String url , Properties defaults ) { if ( url == null ) { return null ; } if ( ! StringUtils . startsWithIgnoreCase ( url , PREFIX ) ) { return null ; } if ( defaults . getProperty ( ""user"" ) != null && defaults . getProperty ( ""password"" ) != null ) { url = url . replace ( PREFIX , PREFIX + defaults . getProperty ( ""user"" ) + "":"" + defaults . getProperty ( ""password"" ) + ""@"" ) ; } LOGGER . info ( ""Mongodb url:"" + url ) ; String options = MongoClientPropertyHelper . formatProperties ( defaults ) ; LOGGER . debug ( ""the options:{}"" , options ) ; try { return new MongoClientURI ( options == null ? url : url + ""?"" + options ) ; } catch ( Exception e ) { LOGGER . error ( ""parseURLError"" , e ) ; return null ; } } +" +716,"private MongoClientURI parseURL ( String url , Properties defaults ) { if ( url == null ) { return null ; } if ( ! StringUtils . startsWithIgnoreCase ( url , PREFIX ) ) { return null ; } if ( defaults . getProperty ( ""user"" ) != null && defaults . getProperty ( ""password"" ) != null ) { url = url . replace ( PREFIX , PREFIX + defaults . getProperty ( ""user"" ) + "":"" + defaults . getProperty ( ""password"" ) + ""@"" ) ; } LOGGER . info ( ""Mongodb url:"" + url ) ; String options = MongoClientPropertyHelper . formatProperties ( defaults ) ; LOGGER . debug ( ""the options:{}"" , options ) ; try { return new MongoClientURI ( options == null ? url : url + ""?"" + options ) ; } catch ( Exception e ) { return null ; } } +","private MongoClientURI parseURL ( String url , Properties defaults ) { if ( url == null ) { return null ; } if ( ! StringUtils . startsWithIgnoreCase ( url , PREFIX ) ) { return null ; } if ( defaults . getProperty ( ""user"" ) != null && defaults . getProperty ( ""password"" ) != null ) { url = url . replace ( PREFIX , PREFIX + defaults . getProperty ( ""user"" ) + "":"" + defaults . getProperty ( ""password"" ) + ""@"" ) ; } LOGGER . info ( ""Mongodb url:"" + url ) ; String options = MongoClientPropertyHelper . formatProperties ( defaults ) ; LOGGER . debug ( ""the options:{}"" , options ) ; try { return new MongoClientURI ( options == null ? url : url + ""?"" + options ) ; } catch ( Exception e ) { LOGGER . error ( ""parseURLError"" , e ) ; return null ; } } +" +717,"private CacheEntry getCache ( ProceedingJoinPoint pjp ) { String name = getCacheName ( pjp ) ; CacheEntry entry = _entries . get ( name ) ; if ( entry == null ) { Method method = _cacheableMethodKeyFactoryManager . getMatchingMethodForJoinPoint ( pjp ) ; CacheableMethodKeyFactory keyFactory = getKeyFactory ( pjp , method ) ; boolean valueSerializable = isValueSerializable ( pjp , method ) ; Cache cache = _cacheManager . getCache ( name ) ; if ( cache == null ) { cache = createCache ( pjp , name ) ; if ( cache == null ) { if ( ! _cacheManager . cacheExists ( name ) ) try { _cacheManager . addCache ( name ) ; } catch ( ObjectExistsException oee ) { _log . error ( ""Cache already exists: "" + name ) ; } cache = _cacheManager . getCache ( name ) ; } else { try { _cacheManager . addCache ( cache ) ; } catch ( ObjectExistsException oee ) { _log . error ( ""Cache already exists: "" + name ) ; } } } synchronized ( _entries ) { entry = new CacheEntry ( keyFactory , valueSerializable , cache ) ; if ( _entries . containsKey ( name ) ) { } else { _entries . put ( name , entry ) ; } } } return _entries . get ( name ) ; } +","private CacheEntry getCache ( ProceedingJoinPoint pjp ) { String name = getCacheName ( pjp ) ; CacheEntry entry = _entries . get ( name ) ; if ( entry == null ) { Method method = _cacheableMethodKeyFactoryManager . getMatchingMethodForJoinPoint ( pjp ) ; CacheableMethodKeyFactory keyFactory = getKeyFactory ( pjp , method ) ; boolean valueSerializable = isValueSerializable ( pjp , method ) ; Cache cache = _cacheManager . getCache ( name ) ; if ( cache == null ) { cache = createCache ( pjp , name ) ; if ( cache == null ) { if ( ! _cacheManager . cacheExists ( name ) ) try { _cacheManager . addCache ( name ) ; } catch ( ObjectExistsException oee ) { _log . error ( ""Cache already exists: "" + name ) ; } cache = _cacheManager . getCache ( name ) ; } else { try { _cacheManager . addCache ( cache ) ; } catch ( ObjectExistsException oee ) { _log . error ( ""Cache already exists: "" + name ) ; } } } synchronized ( _entries ) { entry = new CacheEntry ( keyFactory , valueSerializable , cache ) ; if ( _entries . containsKey ( name ) ) { _log . warn ( ""concurrent attempt to create cache = "" + name ) ; } else { _entries . put ( name , entry ) ; } } } return _entries . get ( name ) ; } +" +718,"private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +","private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +" +719,"private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +","private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +" +720,"private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +","private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +" +721,"private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +","private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +" +722,"private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { } return result ; } +","private List < Log > getLogsFromOneLogDataFile ( File file , Pair < Long , Long > startAndEndOffset ) { List < Log > result = new ArrayList < > ( ) ; if ( file . getName ( ) . equals ( getCurrentLogDataFile ( ) . getName ( ) ) ) { forceFlushLogBufferWithoutCloseFile ( ) ; } try ( FileInputStream fileInputStream = new FileInputStream ( file ) ; BufferedInputStream bufferedInputStream = new BufferedInputStream ( fileInputStream ) ) { long bytesSkip = bufferedInputStream . skip ( startAndEndOffset . left ) ; if ( bytesSkip != startAndEndOffset . left ) { logger . error ( ""read file={} failed when skip {} bytes, actual skip bytes={}"" , file . getAbsoluteFile ( ) , startAndEndOffset . left , bytesSkip ) ; return result ; } logger . debug ( ""start to read file={} and skip {} bytes, startOffset={}, endOffset={}, fileLength={}"" , file . getAbsoluteFile ( ) , bytesSkip , startAndEndOffset . left , startAndEndOffset . right , file . length ( ) ) ; long currentReadOffset = bytesSkip ; while ( currentReadOffset <= startAndEndOffset . right ) { logger . debug ( ""read file={}, currentReadOffset={}, end offset={}"" , file . getAbsoluteFile ( ) , currentReadOffset , startAndEndOffset . right ) ; int logSize = ReadWriteIOUtils . readInt ( bufferedInputStream ) ; Log log = null ; log = parser . parse ( ByteBuffer . wrap ( ReadWriteIOUtils . readBytes ( bufferedInputStream , logSize ) ) ) ; result . add ( log ) ; currentReadOffset = currentReadOffset + Integer . BYTES + logSize ; } } catch ( UnknownLogTypeException e ) { logger . error ( ""Unknown log detected "" , e ) ; } catch ( IOException e ) { logger . error ( ""Cannot read log from file={} "" , file . getAbsoluteFile ( ) , e ) ; } return result ; } +" +723,"private void cleanSCMNodes ( String sourceTool , String almKeyProcessedIndex , String almKeysIndex ) { GraphDBHandler dbHandler = new GraphDBHandler ( ) ; try { int processedRecords = 1 ; while ( processedRecords > 0 ) { long st = System . currentTimeMillis ( ) ; String scmCleanUpCypher = ""MATCH (n:SCM:DATA) where not n:RAW and exists(n."" + almKeyProcessedIndex + "") "" + ""WITH distinct n limit "" + dataBatchSize + "" remove n."" + almKeyProcessedIndex + "" return count(n)"" ; GraphResponse response = dbHandler . executeCypherQuery ( scmCleanUpCypher ) ; processedRecords = response . getJson ( ) . get ( ConfigOptions . RESULTS ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( ""data"" ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( ""row"" ) . getAsJsonArray ( ) . get ( 0 ) . getAsInt ( ) ; } } catch ( InsightsCustomException e ) { log . error ( ""Unable to extract "" + sourceTool + "" keys from SCM Commit messages"" , e ) ; } } +","private void cleanSCMNodes ( String sourceTool , String almKeyProcessedIndex , String almKeysIndex ) { GraphDBHandler dbHandler = new GraphDBHandler ( ) ; try { int processedRecords = 1 ; while ( processedRecords > 0 ) { long st = System . currentTimeMillis ( ) ; String scmCleanUpCypher = ""MATCH (n:SCM:DATA) where not n:RAW and exists(n."" + almKeyProcessedIndex + "") "" + ""WITH distinct n limit "" + dataBatchSize + "" remove n."" + almKeyProcessedIndex + "" return count(n)"" ; GraphResponse response = dbHandler . executeCypherQuery ( scmCleanUpCypher ) ; processedRecords = response . getJson ( ) . get ( ConfigOptions . RESULTS ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( ""data"" ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( ""row"" ) . getAsJsonArray ( ) . get ( 0 ) . getAsInt ( ) ; log . debug ( ""Processed "" + processedRecords + "" SCM records, time taken: "" + ( System . currentTimeMillis ( ) - st ) + "" ms"" ) ; } } catch ( InsightsCustomException e ) { log . error ( ""Unable to extract "" + sourceTool + "" keys from SCM Commit messages"" , e ) ; } } +" +724,"private void cleanSCMNodes ( String sourceTool , String almKeyProcessedIndex , String almKeysIndex ) { GraphDBHandler dbHandler = new GraphDBHandler ( ) ; try { int processedRecords = 1 ; while ( processedRecords > 0 ) { long st = System . currentTimeMillis ( ) ; String scmCleanUpCypher = ""MATCH (n:SCM:DATA) where not n:RAW and exists(n."" + almKeyProcessedIndex + "") "" + ""WITH distinct n limit "" + dataBatchSize + "" remove n."" + almKeyProcessedIndex + "" return count(n)"" ; GraphResponse response = dbHandler . executeCypherQuery ( scmCleanUpCypher ) ; processedRecords = response . getJson ( ) . get ( ConfigOptions . RESULTS ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( ""data"" ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( ""row"" ) . getAsJsonArray ( ) . get ( 0 ) . getAsInt ( ) ; log . debug ( ""Processed "" + processedRecords + "" SCM records, time taken: "" + ( System . currentTimeMillis ( ) - st ) + "" ms"" ) ; } } catch ( InsightsCustomException e ) { } } +","private void cleanSCMNodes ( String sourceTool , String almKeyProcessedIndex , String almKeysIndex ) { GraphDBHandler dbHandler = new GraphDBHandler ( ) ; try { int processedRecords = 1 ; while ( processedRecords > 0 ) { long st = System . currentTimeMillis ( ) ; String scmCleanUpCypher = ""MATCH (n:SCM:DATA) where not n:RAW and exists(n."" + almKeyProcessedIndex + "") "" + ""WITH distinct n limit "" + dataBatchSize + "" remove n."" + almKeyProcessedIndex + "" return count(n)"" ; GraphResponse response = dbHandler . executeCypherQuery ( scmCleanUpCypher ) ; processedRecords = response . getJson ( ) . get ( ConfigOptions . RESULTS ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( ""data"" ) . getAsJsonArray ( ) . get ( 0 ) . getAsJsonObject ( ) . get ( ""row"" ) . getAsJsonArray ( ) . get ( 0 ) . getAsInt ( ) ; log . debug ( ""Processed "" + processedRecords + "" SCM records, time taken: "" + ( System . currentTimeMillis ( ) - st ) + "" ms"" ) ; } } catch ( InsightsCustomException e ) { log . error ( ""Unable to extract "" + sourceTool + "" keys from SCM Commit messages"" , e ) ; } } +" +725,"public void activate ( BeanManager bm ) { for ( Class < ? > c : class2Anno . keySet ( ) ) { sortedAnnotatedClassNames . add ( c . getCanonicalName ( ) ) ; } Collections . sort ( sortedAnnotatedClassNames ) ; for ( Class < ? > cls : class2Anno . keySet ( ) ) { Set < Bean < ? > > beans = bm . getBeans ( cls ) ; Bean < ? > bean = bm . resolve ( beans ) ; assert bean != null ; context2Anno . put ( bean , class2Anno . get ( cls ) ) ; } if ( isTrace ) { StringBuilder txt = new StringBuilder ( 128 ) ; txt . append ( ""RedirectScopedBeanHolder configuration."" ) ; txt . append ( "" Annotated Beans: "" ) ; txt . append ( getConfigAsString ( ) ) ; } } +","public void activate ( BeanManager bm ) { for ( Class < ? > c : class2Anno . keySet ( ) ) { sortedAnnotatedClassNames . add ( c . getCanonicalName ( ) ) ; } Collections . sort ( sortedAnnotatedClassNames ) ; for ( Class < ? > cls : class2Anno . keySet ( ) ) { Set < Bean < ? > > beans = bm . getBeans ( cls ) ; Bean < ? > bean = bm . resolve ( beans ) ; assert bean != null ; context2Anno . put ( bean , class2Anno . get ( cls ) ) ; } if ( isTrace ) { StringBuilder txt = new StringBuilder ( 128 ) ; txt . append ( ""RedirectScopedBeanHolder configuration."" ) ; txt . append ( "" Annotated Beans: "" ) ; txt . append ( getConfigAsString ( ) ) ; LOG . debug ( txt . toString ( ) ) ; } } +" +726,"private void processUndeployRequest ( UUID nodeId , GridDeploymentRequest req ) { if ( log . isDebugEnabled ( ) ) ctx . deploy ( ) . undeployTask ( nodeId , req . resourceName ( ) ) ; } +","private void processUndeployRequest ( UUID nodeId , GridDeploymentRequest req ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Received undeploy request [nodeId="" + nodeId + "", req="" + req + ']' ) ; ctx . deploy ( ) . undeployTask ( nodeId , req . resourceName ( ) ) ; } +" +727,"public ShellyDeviceProfile getDeviceProfile ( String thingType ) throws ShellyApiException { String json = request ( SHELLY_URL_SETTINGS ) ; if ( json . contains ( ""\""type\"":\""SHDM-"" ) ) { json = fixDimmerJson ( json ) ; } profile . initialize ( thingType , json ) ; profile . thingName = profile . hostname ; if ( profile . isLight && ( profile . numMeters == 0 ) ) { logger . debug ( ""{}: Get number of meters from light status"" , thingName ) ; ShellyStatusLight status = getLightStatus ( ) ; profile . numMeters = status . meters != null ? status . meters . size ( ) : 0 ; } if ( profile . isSense ) { profile . irCodes = getIRCodeList ( ) ; logger . debug ( ""{}: Sense stored key list loaded, {} entries."" , thingName , profile . irCodes . size ( ) ) ; } return profile ; } +","public ShellyDeviceProfile getDeviceProfile ( String thingType ) throws ShellyApiException { String json = request ( SHELLY_URL_SETTINGS ) ; if ( json . contains ( ""\""type\"":\""SHDM-"" ) ) { logger . trace ( ""{}: Detected a Shelly Dimmer: fix Json (replace lights[] tag with dimmers[]"" , thingName ) ; json = fixDimmerJson ( json ) ; } profile . initialize ( thingType , json ) ; profile . thingName = profile . hostname ; if ( profile . isLight && ( profile . numMeters == 0 ) ) { logger . debug ( ""{}: Get number of meters from light status"" , thingName ) ; ShellyStatusLight status = getLightStatus ( ) ; profile . numMeters = status . meters != null ? status . meters . size ( ) : 0 ; } if ( profile . isSense ) { profile . irCodes = getIRCodeList ( ) ; logger . debug ( ""{}: Sense stored key list loaded, {} entries."" , thingName , profile . irCodes . size ( ) ) ; } return profile ; } +" +728,"public ShellyDeviceProfile getDeviceProfile ( String thingType ) throws ShellyApiException { String json = request ( SHELLY_URL_SETTINGS ) ; if ( json . contains ( ""\""type\"":\""SHDM-"" ) ) { logger . trace ( ""{}: Detected a Shelly Dimmer: fix Json (replace lights[] tag with dimmers[]"" , thingName ) ; json = fixDimmerJson ( json ) ; } profile . initialize ( thingType , json ) ; profile . thingName = profile . hostname ; if ( profile . isLight && ( profile . numMeters == 0 ) ) { ShellyStatusLight status = getLightStatus ( ) ; profile . numMeters = status . meters != null ? status . meters . size ( ) : 0 ; } if ( profile . isSense ) { profile . irCodes = getIRCodeList ( ) ; logger . debug ( ""{}: Sense stored key list loaded, {} entries."" , thingName , profile . irCodes . size ( ) ) ; } return profile ; } +","public ShellyDeviceProfile getDeviceProfile ( String thingType ) throws ShellyApiException { String json = request ( SHELLY_URL_SETTINGS ) ; if ( json . contains ( ""\""type\"":\""SHDM-"" ) ) { logger . trace ( ""{}: Detected a Shelly Dimmer: fix Json (replace lights[] tag with dimmers[]"" , thingName ) ; json = fixDimmerJson ( json ) ; } profile . initialize ( thingType , json ) ; profile . thingName = profile . hostname ; if ( profile . isLight && ( profile . numMeters == 0 ) ) { logger . debug ( ""{}: Get number of meters from light status"" , thingName ) ; ShellyStatusLight status = getLightStatus ( ) ; profile . numMeters = status . meters != null ? status . meters . size ( ) : 0 ; } if ( profile . isSense ) { profile . irCodes = getIRCodeList ( ) ; logger . debug ( ""{}: Sense stored key list loaded, {} entries."" , thingName , profile . irCodes . size ( ) ) ; } return profile ; } +" +729,"public ShellyDeviceProfile getDeviceProfile ( String thingType ) throws ShellyApiException { String json = request ( SHELLY_URL_SETTINGS ) ; if ( json . contains ( ""\""type\"":\""SHDM-"" ) ) { logger . trace ( ""{}: Detected a Shelly Dimmer: fix Json (replace lights[] tag with dimmers[]"" , thingName ) ; json = fixDimmerJson ( json ) ; } profile . initialize ( thingType , json ) ; profile . thingName = profile . hostname ; if ( profile . isLight && ( profile . numMeters == 0 ) ) { logger . debug ( ""{}: Get number of meters from light status"" , thingName ) ; ShellyStatusLight status = getLightStatus ( ) ; profile . numMeters = status . meters != null ? status . meters . size ( ) : 0 ; } if ( profile . isSense ) { profile . irCodes = getIRCodeList ( ) ; } return profile ; } +","public ShellyDeviceProfile getDeviceProfile ( String thingType ) throws ShellyApiException { String json = request ( SHELLY_URL_SETTINGS ) ; if ( json . contains ( ""\""type\"":\""SHDM-"" ) ) { logger . trace ( ""{}: Detected a Shelly Dimmer: fix Json (replace lights[] tag with dimmers[]"" , thingName ) ; json = fixDimmerJson ( json ) ; } profile . initialize ( thingType , json ) ; profile . thingName = profile . hostname ; if ( profile . isLight && ( profile . numMeters == 0 ) ) { logger . debug ( ""{}: Get number of meters from light status"" , thingName ) ; ShellyStatusLight status = getLightStatus ( ) ; profile . numMeters = status . meters != null ? status . meters . size ( ) : 0 ; } if ( profile . isSense ) { profile . irCodes = getIRCodeList ( ) ; logger . debug ( ""{}: Sense stored key list loaded, {} entries."" , thingName , profile . irCodes . size ( ) ) ; } return profile ; } +" +730,"public void add ( Integer columnIndex , Object columnValue , Long columnCount , StructField columnField ) { StandardColumnStatistics newColumnStatistics ; if ( ! columnStatisticsMap . containsKey ( columnIndex ) ) { DataType columnDataType = columnField . dataType ( ) ; switch ( columnDataType . simpleString ( ) ) { case ""tinyint"" : newColumnStatistics = new ByteColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""smallint"" : newColumnStatistics = new ShortColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""int"" : newColumnStatistics = new IntegerColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""bigint"" : newColumnStatistics = new LongColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""float"" : newColumnStatistics = new FloatColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""double"" : newColumnStatistics = new DoubleColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""string"" : newColumnStatistics = new StringColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""boolean"" : newColumnStatistics = new BooleanColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""date"" : newColumnStatistics = new DateColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""timestamp"" : newColumnStatistics = new TimestampColumnStatistics ( columnField , profilerConfiguration ) ; break ; default : String decimalTypeRegex = ""decimal\\S+"" ; if ( columnDataType . simpleString ( ) . matches ( decimalTypeRegex ) ) { newColumnStatistics = new BigDecimalColumnStatistics ( columnField , profilerConfiguration ) ; } else { if ( log . isWarnEnabled ( ) ) { } newColumnStatistics = new UnsupportedColumnStatistics ( columnField , profilerConfiguration ) ; } } columnStatisticsMap . put ( columnIndex , newColumnStatistics ) ; } StandardColumnStatistics currentColumnStatistics = columnStatisticsMap . get ( columnIndex ) ; currentColumnStatistics . accomodate ( columnValue , columnCount ) ; } +","public void add ( Integer columnIndex , Object columnValue , Long columnCount , StructField columnField ) { StandardColumnStatistics newColumnStatistics ; if ( ! columnStatisticsMap . containsKey ( columnIndex ) ) { DataType columnDataType = columnField . dataType ( ) ; switch ( columnDataType . simpleString ( ) ) { case ""tinyint"" : newColumnStatistics = new ByteColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""smallint"" : newColumnStatistics = new ShortColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""int"" : newColumnStatistics = new IntegerColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""bigint"" : newColumnStatistics = new LongColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""float"" : newColumnStatistics = new FloatColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""double"" : newColumnStatistics = new DoubleColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""string"" : newColumnStatistics = new StringColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""boolean"" : newColumnStatistics = new BooleanColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""date"" : newColumnStatistics = new DateColumnStatistics ( columnField , profilerConfiguration ) ; break ; case ""timestamp"" : newColumnStatistics = new TimestampColumnStatistics ( columnField , profilerConfiguration ) ; break ; default : String decimalTypeRegex = ""decimal\\S+"" ; if ( columnDataType . simpleString ( ) . matches ( decimalTypeRegex ) ) { newColumnStatistics = new BigDecimalColumnStatistics ( columnField , profilerConfiguration ) ; } else { if ( log . isWarnEnabled ( ) ) { log . warn ( ""[PROFILER-INFO] Unsupported data type: {}"" , columnDataType . simpleString ( ) ) ; } newColumnStatistics = new UnsupportedColumnStatistics ( columnField , profilerConfiguration ) ; } } columnStatisticsMap . put ( columnIndex , newColumnStatistics ) ; } StandardColumnStatistics currentColumnStatistics = columnStatisticsMap . get ( columnIndex ) ; currentColumnStatistics . accomodate ( columnValue , columnCount ) ; } +" +731,"protected void notifyListenersOfValue ( final VType value ) { try { if ( ! value_notification_lock . tryLock ( 20 , TimeUnit . SECONDS ) ) throw new Exception ( ""Timeout"" ) ; } catch ( Exception ex ) { return ; } try { last_value = value ; for ( ValueEventHandler . Subscription sub : value_subs ) { try { sub . update ( value ) ; } catch ( Throwable ex ) { logger . log ( Level . WARNING , name + "" value update error"" , ex ) ; } } } finally { value_notification_lock . unlock ( ) ; } } +","protected void notifyListenersOfValue ( final VType value ) { try { if ( ! value_notification_lock . tryLock ( 20 , TimeUnit . SECONDS ) ) throw new Exception ( ""Timeout"" ) ; } catch ( Exception ex ) { logger . log ( Level . SEVERE , ""Cannot lock "" + name , ex ) ; return ; } try { last_value = value ; for ( ValueEventHandler . Subscription sub : value_subs ) { try { sub . update ( value ) ; } catch ( Throwable ex ) { logger . log ( Level . WARNING , name + "" value update error"" , ex ) ; } } } finally { value_notification_lock . unlock ( ) ; } } +" +732,"protected void notifyListenersOfValue ( final VType value ) { try { if ( ! value_notification_lock . tryLock ( 20 , TimeUnit . SECONDS ) ) throw new Exception ( ""Timeout"" ) ; } catch ( Exception ex ) { logger . log ( Level . SEVERE , ""Cannot lock "" + name , ex ) ; return ; } try { last_value = value ; for ( ValueEventHandler . Subscription sub : value_subs ) { try { sub . update ( value ) ; } catch ( Throwable ex ) { } } } finally { value_notification_lock . unlock ( ) ; } } +","protected void notifyListenersOfValue ( final VType value ) { try { if ( ! value_notification_lock . tryLock ( 20 , TimeUnit . SECONDS ) ) throw new Exception ( ""Timeout"" ) ; } catch ( Exception ex ) { logger . log ( Level . SEVERE , ""Cannot lock "" + name , ex ) ; return ; } try { last_value = value ; for ( ValueEventHandler . Subscription sub : value_subs ) { try { sub . update ( value ) ; } catch ( Throwable ex ) { logger . log ( Level . WARNING , name + "" value update error"" , ex ) ; } } } finally { value_notification_lock . unlock ( ) ; } } +" +733,"public void basicBridgeInvalidate ( Object key , Object p_callbackArg , ClientProxyMembershipID memberId , boolean fromClient , EntryEventImpl clientEvent ) throws TimeoutException , EntryNotFoundException , CacheWriterException { Lock lock = getDistributedLockIfGlobal ( key ) ; try { super . basicBridgeInvalidate ( key , p_callbackArg , memberId , fromClient , clientEvent ) ; } finally { if ( lock != null ) { lock . unlock ( ) ; } } } +","public void basicBridgeInvalidate ( Object key , Object p_callbackArg , ClientProxyMembershipID memberId , boolean fromClient , EntryEventImpl clientEvent ) throws TimeoutException , EntryNotFoundException , CacheWriterException { Lock lock = getDistributedLockIfGlobal ( key ) ; try { super . basicBridgeInvalidate ( key , p_callbackArg , memberId , fromClient , clientEvent ) ; } finally { if ( lock != null ) { logger . debug ( ""releasing distributed lock on {}"" , key ) ; lock . unlock ( ) ; } } } +" +734,"private void computeLayout ( final Graphics2D gc , final Rectangle bounds , final double min , final double max ) { final int x_axis_height = x_axis . getDesiredPixelSize ( bounds , gc ) ; final int y_axis_height = bounds . height - x_axis_height ; final int y_axis_width = y_axis . getDesiredPixelSize ( new Rectangle ( 0 , 0 , bounds . width , y_axis_height ) , gc ) ; image_area = new Rectangle ( y_axis_width , 0 , bounds . width - y_axis_width , bounds . height - x_axis_height ) ; if ( show_colormap ) { colorbar_area = new Rectangle ( bounds . width - colorbar_size , colorbar_size , colorbar_size , image_area . height - 2 * colorbar_size ) ; final int cb_axis_width = colorbar_axis . getDesiredPixelSize ( colorbar_area , gc ) ; colorbar_axis . setBounds ( colorbar_area . x , colorbar_area . y , cb_axis_width , colorbar_area . height ) ; colorbar_area . x += cb_axis_width ; colorbar_area . width -= cb_axis_width ; if ( image_area . width > cb_axis_width + colorbar_area . width ) image_area . width -= cb_axis_width + colorbar_area . width ; else colorbar_area = null ; } else colorbar_area = null ; y_axis . setBounds ( 0 , 0 , y_axis_width , image_area . height ) ; x_axis . setBounds ( image_area . x , image_area . height , image_area . width , x_axis_height ) ; } +","private void computeLayout ( final Graphics2D gc , final Rectangle bounds , final double min , final double max ) { logger . log ( Level . FINE , ""computeLayout"" ) ; final int x_axis_height = x_axis . getDesiredPixelSize ( bounds , gc ) ; final int y_axis_height = bounds . height - x_axis_height ; final int y_axis_width = y_axis . getDesiredPixelSize ( new Rectangle ( 0 , 0 , bounds . width , y_axis_height ) , gc ) ; image_area = new Rectangle ( y_axis_width , 0 , bounds . width - y_axis_width , bounds . height - x_axis_height ) ; if ( show_colormap ) { colorbar_area = new Rectangle ( bounds . width - colorbar_size , colorbar_size , colorbar_size , image_area . height - 2 * colorbar_size ) ; final int cb_axis_width = colorbar_axis . getDesiredPixelSize ( colorbar_area , gc ) ; colorbar_axis . setBounds ( colorbar_area . x , colorbar_area . y , cb_axis_width , colorbar_area . height ) ; colorbar_area . x += cb_axis_width ; colorbar_area . width -= cb_axis_width ; if ( image_area . width > cb_axis_width + colorbar_area . width ) image_area . width -= cb_axis_width + colorbar_area . width ; else colorbar_area = null ; } else colorbar_area = null ; y_axis . setBounds ( 0 , 0 , y_axis_width , image_area . height ) ; x_axis . setBounds ( image_area . x , image_area . height , image_area . width , x_axis_height ) ; } +" +735,"public void setDescription ( String description ) { if ( LOG . isDebugEnabled ( ) ) } +","public void setDescription ( String description ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( ""{} is {}"" , this , description ) ; } +" +736,"private static void startCleanupThread ( final AccumuloConfiguration conf , final Supplier < Map < String , String > > contextConfigSupplier ) { ThreadPools . createGeneralScheduledExecutorService ( conf ) . scheduleWithFixedDelay ( Threads . createNamedRunnable ( className + ""-cleanup"" , ( ) -> { Set < String > contextsInUse = contextConfigSupplier . get ( ) . keySet ( ) . stream ( ) . map ( p -> p . substring ( Property . VFS_CONTEXT_CLASSPATH_PROPERTY . getKey ( ) . length ( ) ) ) . collect ( Collectors . toSet ( ) ) ; LOG . trace ( ""{}-cleanup thread, contexts in use: {}"" , className , contextsInUse ) ; AccumuloVFSClassLoader . removeUnusedContexts ( contextsInUse ) ; } ) , 60_000 , 60_000 , TimeUnit . MILLISECONDS ) ; LOG . debug ( ""Context cleanup timer started at 60s intervals"" ) ; } +","private static void startCleanupThread ( final AccumuloConfiguration conf , final Supplier < Map < String , String > > contextConfigSupplier ) { ThreadPools . createGeneralScheduledExecutorService ( conf ) . scheduleWithFixedDelay ( Threads . createNamedRunnable ( className + ""-cleanup"" , ( ) -> { LOG . trace ( ""{}-cleanup thread, properties: {}"" , className , conf ) ; Set < String > contextsInUse = contextConfigSupplier . get ( ) . keySet ( ) . stream ( ) . map ( p -> p . substring ( Property . VFS_CONTEXT_CLASSPATH_PROPERTY . getKey ( ) . length ( ) ) ) . collect ( Collectors . toSet ( ) ) ; LOG . trace ( ""{}-cleanup thread, contexts in use: {}"" , className , contextsInUse ) ; AccumuloVFSClassLoader . removeUnusedContexts ( contextsInUse ) ; } ) , 60_000 , 60_000 , TimeUnit . MILLISECONDS ) ; LOG . debug ( ""Context cleanup timer started at 60s intervals"" ) ; } +" +737,"private static void startCleanupThread ( final AccumuloConfiguration conf , final Supplier < Map < String , String > > contextConfigSupplier ) { ThreadPools . createGeneralScheduledExecutorService ( conf ) . scheduleWithFixedDelay ( Threads . createNamedRunnable ( className + ""-cleanup"" , ( ) -> { LOG . trace ( ""{}-cleanup thread, properties: {}"" , className , conf ) ; Set < String > contextsInUse = contextConfigSupplier . get ( ) . keySet ( ) . stream ( ) . map ( p -> p . substring ( Property . VFS_CONTEXT_CLASSPATH_PROPERTY . getKey ( ) . length ( ) ) ) . collect ( Collectors . toSet ( ) ) ; AccumuloVFSClassLoader . removeUnusedContexts ( contextsInUse ) ; } ) , 60_000 , 60_000 , TimeUnit . MILLISECONDS ) ; LOG . debug ( ""Context cleanup timer started at 60s intervals"" ) ; } +","private static void startCleanupThread ( final AccumuloConfiguration conf , final Supplier < Map < String , String > > contextConfigSupplier ) { ThreadPools . createGeneralScheduledExecutorService ( conf ) . scheduleWithFixedDelay ( Threads . createNamedRunnable ( className + ""-cleanup"" , ( ) -> { LOG . trace ( ""{}-cleanup thread, properties: {}"" , className , conf ) ; Set < String > contextsInUse = contextConfigSupplier . get ( ) . keySet ( ) . stream ( ) . map ( p -> p . substring ( Property . VFS_CONTEXT_CLASSPATH_PROPERTY . getKey ( ) . length ( ) ) ) . collect ( Collectors . toSet ( ) ) ; LOG . trace ( ""{}-cleanup thread, contexts in use: {}"" , className , contextsInUse ) ; AccumuloVFSClassLoader . removeUnusedContexts ( contextsInUse ) ; } ) , 60_000 , 60_000 , TimeUnit . MILLISECONDS ) ; LOG . debug ( ""Context cleanup timer started at 60s intervals"" ) ; } +" +738,"private static void startCleanupThread ( final AccumuloConfiguration conf , final Supplier < Map < String , String > > contextConfigSupplier ) { ThreadPools . createGeneralScheduledExecutorService ( conf ) . scheduleWithFixedDelay ( Threads . createNamedRunnable ( className + ""-cleanup"" , ( ) -> { LOG . trace ( ""{}-cleanup thread, properties: {}"" , className , conf ) ; Set < String > contextsInUse = contextConfigSupplier . get ( ) . keySet ( ) . stream ( ) . map ( p -> p . substring ( Property . VFS_CONTEXT_CLASSPATH_PROPERTY . getKey ( ) . length ( ) ) ) . collect ( Collectors . toSet ( ) ) ; LOG . trace ( ""{}-cleanup thread, contexts in use: {}"" , className , contextsInUse ) ; AccumuloVFSClassLoader . removeUnusedContexts ( contextsInUse ) ; } ) , 60_000 , 60_000 , TimeUnit . MILLISECONDS ) ; } +","private static void startCleanupThread ( final AccumuloConfiguration conf , final Supplier < Map < String , String > > contextConfigSupplier ) { ThreadPools . createGeneralScheduledExecutorService ( conf ) . scheduleWithFixedDelay ( Threads . createNamedRunnable ( className + ""-cleanup"" , ( ) -> { LOG . trace ( ""{}-cleanup thread, properties: {}"" , className , conf ) ; Set < String > contextsInUse = contextConfigSupplier . get ( ) . keySet ( ) . stream ( ) . map ( p -> p . substring ( Property . VFS_CONTEXT_CLASSPATH_PROPERTY . getKey ( ) . length ( ) ) ) . collect ( Collectors . toSet ( ) ) ; LOG . trace ( ""{}-cleanup thread, contexts in use: {}"" , className , contextsInUse ) ; AccumuloVFSClassLoader . removeUnusedContexts ( contextsInUse ) ; } ) , 60_000 , 60_000 , TimeUnit . MILLISECONDS ) ; LOG . debug ( ""Context cleanup timer started at 60s intervals"" ) ; } +" +739,"@ Transactional ( propagation = Propagation . NOT_SUPPORTED ) @ Override public void markAsDeleted ( final O obj ) throws AccessException { Validate . notNull ( obj ) ; if ( obj . getId ( ) == null ) { final String msg = ""Could not delete object unless id is not given:"" + obj . toString ( ) ; throw new RuntimeException ( msg ) ; } accessChecker . checkRestrictedOrDemoUser ( ) ; final O dbObj = em . find ( clazz , obj . getId ( ) ) ; checkLoggedInUserDeleteAccess ( obj , dbObj ) ; internalMarkAsDeleted ( obj ) ; } +","@ Transactional ( propagation = Propagation . NOT_SUPPORTED ) @ Override public void markAsDeleted ( final O obj ) throws AccessException { Validate . notNull ( obj ) ; if ( obj . getId ( ) == null ) { final String msg = ""Could not delete object unless id is not given:"" + obj . toString ( ) ; log . error ( msg ) ; throw new RuntimeException ( msg ) ; } accessChecker . checkRestrictedOrDemoUser ( ) ; final O dbObj = em . find ( clazz , obj . getId ( ) ) ; checkLoggedInUserDeleteAccess ( obj , dbObj ) ; internalMarkAsDeleted ( obj ) ; } +" +740,"protected void processFilter ( HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse , FilterChain filterChain ) throws Exception { httpServletRequest . setAttribute ( SKIP_FILTER , Boolean . TRUE ) ; String key = getCacheKey ( httpServletRequest ) ; String pAuth = httpServletRequest . getParameter ( ""p_auth"" ) ; if ( Validator . isNotNull ( pAuth ) ) { try { AuthTokenUtil . checkCSRFToken ( httpServletRequest , CacheFilter . class . getName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isDebugEnabled ( ) ) { } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } key = StringUtil . replace ( key , StringUtil . toUpperCase ( pAuth ) , ""VALID"" ) ; } long companyId = PortalInstances . getCompanyId ( httpServletRequest ) ; CacheResponseData cacheResponseData = CacheUtil . getCacheResponseData ( companyId , key ) ; if ( ( cacheResponseData == null ) || ! cacheResponseData . isValid ( ) ) { if ( ! _isValidCache ( cacheResponseData ) || ! isCacheableData ( companyId , httpServletRequest ) ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key ) ; } if ( cacheResponseData == null ) { if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request with invalid state "" + key ) ; } CacheUtil . putCacheResponseData ( companyId , key , new CacheResponseData ( ) ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request "" + key ) ; } BufferCacheServletResponse bufferCacheServletResponse = new BufferCacheServletResponse ( httpServletResponse ) ; processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , bufferCacheServletResponse , filterChain ) ; cacheResponseData = new CacheResponseData ( bufferCacheServletResponse ) ; LastPath lastPath = ( LastPath ) httpServletRequest . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { cacheResponseData . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } String cacheControl = GetterUtil . getString ( bufferCacheServletResponse . getHeader ( HttpHeaders . CACHE_CONTROL ) ) ; if ( isCacheableResponse ( bufferCacheServletResponse ) && ! cacheControl . contains ( HttpHeaders . PRAGMA_NO_CACHE_VALUE ) && isCacheableRequest ( httpServletRequest ) ) { CacheUtil . putCacheResponseData ( companyId , key , cacheResponseData ) ; } } else { LastPath lastPath = ( LastPath ) cacheResponseData . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { HttpSession session = httpServletRequest . getSession ( ) ; session . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } } CacheResponseUtil . write ( httpServletResponse , cacheResponseData ) ; } +","protected void processFilter ( HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse , FilterChain filterChain ) throws Exception { httpServletRequest . setAttribute ( SKIP_FILTER , Boolean . TRUE ) ; String key = getCacheKey ( httpServletRequest ) ; String pAuth = httpServletRequest . getParameter ( ""p_auth"" ) ; if ( Validator . isNotNull ( pAuth ) ) { try { AuthTokenUtil . checkCSRFToken ( httpServletRequest , CacheFilter . class . getName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key + "", invalid token received"" , portalException ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } key = StringUtil . replace ( key , StringUtil . toUpperCase ( pAuth ) , ""VALID"" ) ; } long companyId = PortalInstances . getCompanyId ( httpServletRequest ) ; CacheResponseData cacheResponseData = CacheUtil . getCacheResponseData ( companyId , key ) ; if ( ( cacheResponseData == null ) || ! cacheResponseData . isValid ( ) ) { if ( ! _isValidCache ( cacheResponseData ) || ! isCacheableData ( companyId , httpServletRequest ) ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key ) ; } if ( cacheResponseData == null ) { if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request with invalid state "" + key ) ; } CacheUtil . putCacheResponseData ( companyId , key , new CacheResponseData ( ) ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request "" + key ) ; } BufferCacheServletResponse bufferCacheServletResponse = new BufferCacheServletResponse ( httpServletResponse ) ; processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , bufferCacheServletResponse , filterChain ) ; cacheResponseData = new CacheResponseData ( bufferCacheServletResponse ) ; LastPath lastPath = ( LastPath ) httpServletRequest . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { cacheResponseData . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } String cacheControl = GetterUtil . getString ( bufferCacheServletResponse . getHeader ( HttpHeaders . CACHE_CONTROL ) ) ; if ( isCacheableResponse ( bufferCacheServletResponse ) && ! cacheControl . contains ( HttpHeaders . PRAGMA_NO_CACHE_VALUE ) && isCacheableRequest ( httpServletRequest ) ) { CacheUtil . putCacheResponseData ( companyId , key , cacheResponseData ) ; } } else { LastPath lastPath = ( LastPath ) cacheResponseData . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { HttpSession session = httpServletRequest . getSession ( ) ; session . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } } CacheResponseUtil . write ( httpServletResponse , cacheResponseData ) ; } +" +741,"protected void processFilter ( HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse , FilterChain filterChain ) throws Exception { httpServletRequest . setAttribute ( SKIP_FILTER , Boolean . TRUE ) ; String key = getCacheKey ( httpServletRequest ) ; String pAuth = httpServletRequest . getParameter ( ""p_auth"" ) ; if ( Validator . isNotNull ( pAuth ) ) { try { AuthTokenUtil . checkCSRFToken ( httpServletRequest , CacheFilter . class . getName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key + "", invalid token received"" , portalException ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } key = StringUtil . replace ( key , StringUtil . toUpperCase ( pAuth ) , ""VALID"" ) ; } long companyId = PortalInstances . getCompanyId ( httpServletRequest ) ; CacheResponseData cacheResponseData = CacheUtil . getCacheResponseData ( companyId , key ) ; if ( ( cacheResponseData == null ) || ! cacheResponseData . isValid ( ) ) { if ( ! _isValidCache ( cacheResponseData ) || ! isCacheableData ( companyId , httpServletRequest ) ) { if ( _log . isDebugEnabled ( ) ) { } if ( cacheResponseData == null ) { if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request with invalid state "" + key ) ; } CacheUtil . putCacheResponseData ( companyId , key , new CacheResponseData ( ) ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request "" + key ) ; } BufferCacheServletResponse bufferCacheServletResponse = new BufferCacheServletResponse ( httpServletResponse ) ; processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , bufferCacheServletResponse , filterChain ) ; cacheResponseData = new CacheResponseData ( bufferCacheServletResponse ) ; LastPath lastPath = ( LastPath ) httpServletRequest . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { cacheResponseData . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } String cacheControl = GetterUtil . getString ( bufferCacheServletResponse . getHeader ( HttpHeaders . CACHE_CONTROL ) ) ; if ( isCacheableResponse ( bufferCacheServletResponse ) && ! cacheControl . contains ( HttpHeaders . PRAGMA_NO_CACHE_VALUE ) && isCacheableRequest ( httpServletRequest ) ) { CacheUtil . putCacheResponseData ( companyId , key , cacheResponseData ) ; } } else { LastPath lastPath = ( LastPath ) cacheResponseData . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { HttpSession session = httpServletRequest . getSession ( ) ; session . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } } CacheResponseUtil . write ( httpServletResponse , cacheResponseData ) ; } +","protected void processFilter ( HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse , FilterChain filterChain ) throws Exception { httpServletRequest . setAttribute ( SKIP_FILTER , Boolean . TRUE ) ; String key = getCacheKey ( httpServletRequest ) ; String pAuth = httpServletRequest . getParameter ( ""p_auth"" ) ; if ( Validator . isNotNull ( pAuth ) ) { try { AuthTokenUtil . checkCSRFToken ( httpServletRequest , CacheFilter . class . getName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key + "", invalid token received"" , portalException ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } key = StringUtil . replace ( key , StringUtil . toUpperCase ( pAuth ) , ""VALID"" ) ; } long companyId = PortalInstances . getCompanyId ( httpServletRequest ) ; CacheResponseData cacheResponseData = CacheUtil . getCacheResponseData ( companyId , key ) ; if ( ( cacheResponseData == null ) || ! cacheResponseData . isValid ( ) ) { if ( ! _isValidCache ( cacheResponseData ) || ! isCacheableData ( companyId , httpServletRequest ) ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key ) ; } if ( cacheResponseData == null ) { if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request with invalid state "" + key ) ; } CacheUtil . putCacheResponseData ( companyId , key , new CacheResponseData ( ) ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request "" + key ) ; } BufferCacheServletResponse bufferCacheServletResponse = new BufferCacheServletResponse ( httpServletResponse ) ; processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , bufferCacheServletResponse , filterChain ) ; cacheResponseData = new CacheResponseData ( bufferCacheServletResponse ) ; LastPath lastPath = ( LastPath ) httpServletRequest . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { cacheResponseData . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } String cacheControl = GetterUtil . getString ( bufferCacheServletResponse . getHeader ( HttpHeaders . CACHE_CONTROL ) ) ; if ( isCacheableResponse ( bufferCacheServletResponse ) && ! cacheControl . contains ( HttpHeaders . PRAGMA_NO_CACHE_VALUE ) && isCacheableRequest ( httpServletRequest ) ) { CacheUtil . putCacheResponseData ( companyId , key , cacheResponseData ) ; } } else { LastPath lastPath = ( LastPath ) cacheResponseData . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { HttpSession session = httpServletRequest . getSession ( ) ; session . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } } CacheResponseUtil . write ( httpServletResponse , cacheResponseData ) ; } +" +742,"protected void processFilter ( HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse , FilterChain filterChain ) throws Exception { httpServletRequest . setAttribute ( SKIP_FILTER , Boolean . TRUE ) ; String key = getCacheKey ( httpServletRequest ) ; String pAuth = httpServletRequest . getParameter ( ""p_auth"" ) ; if ( Validator . isNotNull ( pAuth ) ) { try { AuthTokenUtil . checkCSRFToken ( httpServletRequest , CacheFilter . class . getName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key + "", invalid token received"" , portalException ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } key = StringUtil . replace ( key , StringUtil . toUpperCase ( pAuth ) , ""VALID"" ) ; } long companyId = PortalInstances . getCompanyId ( httpServletRequest ) ; CacheResponseData cacheResponseData = CacheUtil . getCacheResponseData ( companyId , key ) ; if ( ( cacheResponseData == null ) || ! cacheResponseData . isValid ( ) ) { if ( ! _isValidCache ( cacheResponseData ) || ! isCacheableData ( companyId , httpServletRequest ) ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key ) ; } if ( cacheResponseData == null ) { if ( _log . isInfoEnabled ( ) ) { } CacheUtil . putCacheResponseData ( companyId , key , new CacheResponseData ( ) ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request "" + key ) ; } BufferCacheServletResponse bufferCacheServletResponse = new BufferCacheServletResponse ( httpServletResponse ) ; processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , bufferCacheServletResponse , filterChain ) ; cacheResponseData = new CacheResponseData ( bufferCacheServletResponse ) ; LastPath lastPath = ( LastPath ) httpServletRequest . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { cacheResponseData . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } String cacheControl = GetterUtil . getString ( bufferCacheServletResponse . getHeader ( HttpHeaders . CACHE_CONTROL ) ) ; if ( isCacheableResponse ( bufferCacheServletResponse ) && ! cacheControl . contains ( HttpHeaders . PRAGMA_NO_CACHE_VALUE ) && isCacheableRequest ( httpServletRequest ) ) { CacheUtil . putCacheResponseData ( companyId , key , cacheResponseData ) ; } } else { LastPath lastPath = ( LastPath ) cacheResponseData . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { HttpSession session = httpServletRequest . getSession ( ) ; session . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } } CacheResponseUtil . write ( httpServletResponse , cacheResponseData ) ; } +","protected void processFilter ( HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse , FilterChain filterChain ) throws Exception { httpServletRequest . setAttribute ( SKIP_FILTER , Boolean . TRUE ) ; String key = getCacheKey ( httpServletRequest ) ; String pAuth = httpServletRequest . getParameter ( ""p_auth"" ) ; if ( Validator . isNotNull ( pAuth ) ) { try { AuthTokenUtil . checkCSRFToken ( httpServletRequest , CacheFilter . class . getName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key + "", invalid token received"" , portalException ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } key = StringUtil . replace ( key , StringUtil . toUpperCase ( pAuth ) , ""VALID"" ) ; } long companyId = PortalInstances . getCompanyId ( httpServletRequest ) ; CacheResponseData cacheResponseData = CacheUtil . getCacheResponseData ( companyId , key ) ; if ( ( cacheResponseData == null ) || ! cacheResponseData . isValid ( ) ) { if ( ! _isValidCache ( cacheResponseData ) || ! isCacheableData ( companyId , httpServletRequest ) ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key ) ; } if ( cacheResponseData == null ) { if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request with invalid state "" + key ) ; } CacheUtil . putCacheResponseData ( companyId , key , new CacheResponseData ( ) ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request "" + key ) ; } BufferCacheServletResponse bufferCacheServletResponse = new BufferCacheServletResponse ( httpServletResponse ) ; processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , bufferCacheServletResponse , filterChain ) ; cacheResponseData = new CacheResponseData ( bufferCacheServletResponse ) ; LastPath lastPath = ( LastPath ) httpServletRequest . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { cacheResponseData . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } String cacheControl = GetterUtil . getString ( bufferCacheServletResponse . getHeader ( HttpHeaders . CACHE_CONTROL ) ) ; if ( isCacheableResponse ( bufferCacheServletResponse ) && ! cacheControl . contains ( HttpHeaders . PRAGMA_NO_CACHE_VALUE ) && isCacheableRequest ( httpServletRequest ) ) { CacheUtil . putCacheResponseData ( companyId , key , cacheResponseData ) ; } } else { LastPath lastPath = ( LastPath ) cacheResponseData . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { HttpSession session = httpServletRequest . getSession ( ) ; session . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } } CacheResponseUtil . write ( httpServletResponse , cacheResponseData ) ; } +" +743,"protected void processFilter ( HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse , FilterChain filterChain ) throws Exception { httpServletRequest . setAttribute ( SKIP_FILTER , Boolean . TRUE ) ; String key = getCacheKey ( httpServletRequest ) ; String pAuth = httpServletRequest . getParameter ( ""p_auth"" ) ; if ( Validator . isNotNull ( pAuth ) ) { try { AuthTokenUtil . checkCSRFToken ( httpServletRequest , CacheFilter . class . getName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key + "", invalid token received"" , portalException ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } key = StringUtil . replace ( key , StringUtil . toUpperCase ( pAuth ) , ""VALID"" ) ; } long companyId = PortalInstances . getCompanyId ( httpServletRequest ) ; CacheResponseData cacheResponseData = CacheUtil . getCacheResponseData ( companyId , key ) ; if ( ( cacheResponseData == null ) || ! cacheResponseData . isValid ( ) ) { if ( ! _isValidCache ( cacheResponseData ) || ! isCacheableData ( companyId , httpServletRequest ) ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key ) ; } if ( cacheResponseData == null ) { if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request with invalid state "" + key ) ; } CacheUtil . putCacheResponseData ( companyId , key , new CacheResponseData ( ) ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } if ( _log . isInfoEnabled ( ) ) { } BufferCacheServletResponse bufferCacheServletResponse = new BufferCacheServletResponse ( httpServletResponse ) ; processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , bufferCacheServletResponse , filterChain ) ; cacheResponseData = new CacheResponseData ( bufferCacheServletResponse ) ; LastPath lastPath = ( LastPath ) httpServletRequest . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { cacheResponseData . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } String cacheControl = GetterUtil . getString ( bufferCacheServletResponse . getHeader ( HttpHeaders . CACHE_CONTROL ) ) ; if ( isCacheableResponse ( bufferCacheServletResponse ) && ! cacheControl . contains ( HttpHeaders . PRAGMA_NO_CACHE_VALUE ) && isCacheableRequest ( httpServletRequest ) ) { CacheUtil . putCacheResponseData ( companyId , key , cacheResponseData ) ; } } else { LastPath lastPath = ( LastPath ) cacheResponseData . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { HttpSession session = httpServletRequest . getSession ( ) ; session . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } } CacheResponseUtil . write ( httpServletResponse , cacheResponseData ) ; } +","protected void processFilter ( HttpServletRequest httpServletRequest , HttpServletResponse httpServletResponse , FilterChain filterChain ) throws Exception { httpServletRequest . setAttribute ( SKIP_FILTER , Boolean . TRUE ) ; String key = getCacheKey ( httpServletRequest ) ; String pAuth = httpServletRequest . getParameter ( ""p_auth"" ) ; if ( Validator . isNotNull ( pAuth ) ) { try { AuthTokenUtil . checkCSRFToken ( httpServletRequest , CacheFilter . class . getName ( ) ) ; } catch ( PortalException portalException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key + "", invalid token received"" , portalException ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } key = StringUtil . replace ( key , StringUtil . toUpperCase ( pAuth ) , ""VALID"" ) ; } long companyId = PortalInstances . getCompanyId ( httpServletRequest ) ; CacheResponseData cacheResponseData = CacheUtil . getCacheResponseData ( companyId , key ) ; if ( ( cacheResponseData == null ) || ! cacheResponseData . isValid ( ) ) { if ( ! _isValidCache ( cacheResponseData ) || ! isCacheableData ( companyId , httpServletRequest ) ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ""Request is not cacheable "" + key ) ; } if ( cacheResponseData == null ) { if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request with invalid state "" + key ) ; } CacheUtil . putCacheResponseData ( companyId , key , new CacheResponseData ( ) ) ; } processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , httpServletResponse , filterChain ) ; return ; } if ( _log . isInfoEnabled ( ) ) { _log . info ( ""Caching request "" + key ) ; } BufferCacheServletResponse bufferCacheServletResponse = new BufferCacheServletResponse ( httpServletResponse ) ; processFilter ( CacheFilter . class . getName ( ) , httpServletRequest , bufferCacheServletResponse , filterChain ) ; cacheResponseData = new CacheResponseData ( bufferCacheServletResponse ) ; LastPath lastPath = ( LastPath ) httpServletRequest . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { cacheResponseData . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } String cacheControl = GetterUtil . getString ( bufferCacheServletResponse . getHeader ( HttpHeaders . CACHE_CONTROL ) ) ; if ( isCacheableResponse ( bufferCacheServletResponse ) && ! cacheControl . contains ( HttpHeaders . PRAGMA_NO_CACHE_VALUE ) && isCacheableRequest ( httpServletRequest ) ) { CacheUtil . putCacheResponseData ( companyId , key , cacheResponseData ) ; } } else { LastPath lastPath = ( LastPath ) cacheResponseData . getAttribute ( WebKeys . LAST_PATH ) ; if ( lastPath != null ) { HttpSession session = httpServletRequest . getSession ( ) ; session . setAttribute ( WebKeys . LAST_PATH , lastPath ) ; } } CacheResponseUtil . write ( httpServletResponse , cacheResponseData ) ; } +" +744,"public static void createWithParents ( ZooKeeper zk , String node , byte [ ] data ) throws KeeperException { try { if ( node == null ) { return ; } zk . create ( node , data , createACL ( zk , node ) , CreateMode . PERSISTENT ) ; } catch ( KeeperException . NodeExistsException nee ) { return ; } catch ( KeeperException . NoNodeException nne ) { createWithParents ( zk , getParent ( node ) ) ; createWithParents ( zk , node , data ) ; } catch ( InterruptedException ie ) { } } +","public static void createWithParents ( ZooKeeper zk , String node , byte [ ] data ) throws KeeperException { try { if ( node == null ) { return ; } zk . create ( node , data , createACL ( zk , node ) , CreateMode . PERSISTENT ) ; } catch ( KeeperException . NodeExistsException nee ) { return ; } catch ( KeeperException . NoNodeException nne ) { createWithParents ( zk , getParent ( node ) ) ; createWithParents ( zk , node , data ) ; } catch ( InterruptedException ie ) { LOG . warn ( ""Fail to create node: "" + node , ie ) ; } } +" +745,"@ POST @ Path ( ""/updateMetadata"" ) @ Produces ( MediaType . TEXT_XML ) @ RestQuery ( name = ""update"" , description = ""Update metadata of an published media package. "" + ""This endpoint does not update any media files. If you want to update the whole media package, use the "" + ""publish endpoint."" , returnDescription = ""The job that can be used to update the metadata of an media package"" , restParameters = { @ RestParameter ( name = ""mediapackage"" , isRequired = true , description = ""The updated media package"" , type = Type . TEXT ) , @ RestParameter ( name = ""channel"" , isRequired = true , description = ""The channel name"" , type = Type . STRING ) , @ RestParameter ( name = ""flavors"" , isRequired = true , description = ""The element flavors to be updated, separated by '"" + SEPARATOR + ""'"" , type = Type . STRING ) , @ RestParameter ( name = ""tags"" , isRequired = true , description = ""The element tags to be updated, separated by '"" + SEPARATOR + ""'"" , type = Type . STRING ) , @ RestParameter ( name = ""checkAvailability"" , isRequired = false , description = ""Whether to check for availability"" , type = Type . BOOLEAN , defaultValue = ""true"" ) } , responses = { @ RestResponse ( responseCode = SC_OK , description = ""An XML representation of the publication job"" ) } ) public Response updateMetadata ( @ FormParam ( ""mediapackage"" ) String mediaPackageXml , @ FormParam ( ""channel"" ) String channel , @ FormParam ( ""flavors"" ) String flavors , @ FormParam ( ""tags"" ) String tags , @ FormParam ( ""checkAvailability"" ) @ DefaultValue ( ""true"" ) boolean checkAvailability ) throws Exception { final Job job ; try { final MediaPackage mediaPackage = MediaPackageParser . getFromXml ( mediaPackageXml ) ; job = service . updateMetadata ( mediaPackage , channel , split ( flavors ) , split ( tags ) , checkAvailability ) ; } catch ( IllegalArgumentException e ) { return Response . status ( Status . BAD_REQUEST ) . build ( ) ; } catch ( Exception e ) { logger . warn ( ""Error publishing element"" , e ) ; return Response . serverError ( ) . build ( ) ; } return Response . ok ( new JaxbJob ( job ) ) . build ( ) ; } +","@ POST @ Path ( ""/updateMetadata"" ) @ Produces ( MediaType . TEXT_XML ) @ RestQuery ( name = ""update"" , description = ""Update metadata of an published media package. "" + ""This endpoint does not update any media files. If you want to update the whole media package, use the "" + ""publish endpoint."" , returnDescription = ""The job that can be used to update the metadata of an media package"" , restParameters = { @ RestParameter ( name = ""mediapackage"" , isRequired = true , description = ""The updated media package"" , type = Type . TEXT ) , @ RestParameter ( name = ""channel"" , isRequired = true , description = ""The channel name"" , type = Type . STRING ) , @ RestParameter ( name = ""flavors"" , isRequired = true , description = ""The element flavors to be updated, separated by '"" + SEPARATOR + ""'"" , type = Type . STRING ) , @ RestParameter ( name = ""tags"" , isRequired = true , description = ""The element tags to be updated, separated by '"" + SEPARATOR + ""'"" , type = Type . STRING ) , @ RestParameter ( name = ""checkAvailability"" , isRequired = false , description = ""Whether to check for availability"" , type = Type . BOOLEAN , defaultValue = ""true"" ) } , responses = { @ RestResponse ( responseCode = SC_OK , description = ""An XML representation of the publication job"" ) } ) public Response updateMetadata ( @ FormParam ( ""mediapackage"" ) String mediaPackageXml , @ FormParam ( ""channel"" ) String channel , @ FormParam ( ""flavors"" ) String flavors , @ FormParam ( ""tags"" ) String tags , @ FormParam ( ""checkAvailability"" ) @ DefaultValue ( ""true"" ) boolean checkAvailability ) throws Exception { final Job job ; try { final MediaPackage mediaPackage = MediaPackageParser . getFromXml ( mediaPackageXml ) ; job = service . updateMetadata ( mediaPackage , channel , split ( flavors ) , split ( tags ) , checkAvailability ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Unable to create an update metadata job"" , e ) ; return Response . status ( Status . BAD_REQUEST ) . build ( ) ; } catch ( Exception e ) { logger . warn ( ""Error publishing element"" , e ) ; return Response . serverError ( ) . build ( ) ; } return Response . ok ( new JaxbJob ( job ) ) . build ( ) ; } +" +746,"@ POST @ Path ( ""/updateMetadata"" ) @ Produces ( MediaType . TEXT_XML ) @ RestQuery ( name = ""update"" , description = ""Update metadata of an published media package. "" + ""This endpoint does not update any media files. If you want to update the whole media package, use the "" + ""publish endpoint."" , returnDescription = ""The job that can be used to update the metadata of an media package"" , restParameters = { @ RestParameter ( name = ""mediapackage"" , isRequired = true , description = ""The updated media package"" , type = Type . TEXT ) , @ RestParameter ( name = ""channel"" , isRequired = true , description = ""The channel name"" , type = Type . STRING ) , @ RestParameter ( name = ""flavors"" , isRequired = true , description = ""The element flavors to be updated, separated by '"" + SEPARATOR + ""'"" , type = Type . STRING ) , @ RestParameter ( name = ""tags"" , isRequired = true , description = ""The element tags to be updated, separated by '"" + SEPARATOR + ""'"" , type = Type . STRING ) , @ RestParameter ( name = ""checkAvailability"" , isRequired = false , description = ""Whether to check for availability"" , type = Type . BOOLEAN , defaultValue = ""true"" ) } , responses = { @ RestResponse ( responseCode = SC_OK , description = ""An XML representation of the publication job"" ) } ) public Response updateMetadata ( @ FormParam ( ""mediapackage"" ) String mediaPackageXml , @ FormParam ( ""channel"" ) String channel , @ FormParam ( ""flavors"" ) String flavors , @ FormParam ( ""tags"" ) String tags , @ FormParam ( ""checkAvailability"" ) @ DefaultValue ( ""true"" ) boolean checkAvailability ) throws Exception { final Job job ; try { final MediaPackage mediaPackage = MediaPackageParser . getFromXml ( mediaPackageXml ) ; job = service . updateMetadata ( mediaPackage , channel , split ( flavors ) , split ( tags ) , checkAvailability ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Unable to create an update metadata job"" , e ) ; return Response . status ( Status . BAD_REQUEST ) . build ( ) ; } catch ( Exception e ) { return Response . serverError ( ) . build ( ) ; } return Response . ok ( new JaxbJob ( job ) ) . build ( ) ; } +","@ POST @ Path ( ""/updateMetadata"" ) @ Produces ( MediaType . TEXT_XML ) @ RestQuery ( name = ""update"" , description = ""Update metadata of an published media package. "" + ""This endpoint does not update any media files. If you want to update the whole media package, use the "" + ""publish endpoint."" , returnDescription = ""The job that can be used to update the metadata of an media package"" , restParameters = { @ RestParameter ( name = ""mediapackage"" , isRequired = true , description = ""The updated media package"" , type = Type . TEXT ) , @ RestParameter ( name = ""channel"" , isRequired = true , description = ""The channel name"" , type = Type . STRING ) , @ RestParameter ( name = ""flavors"" , isRequired = true , description = ""The element flavors to be updated, separated by '"" + SEPARATOR + ""'"" , type = Type . STRING ) , @ RestParameter ( name = ""tags"" , isRequired = true , description = ""The element tags to be updated, separated by '"" + SEPARATOR + ""'"" , type = Type . STRING ) , @ RestParameter ( name = ""checkAvailability"" , isRequired = false , description = ""Whether to check for availability"" , type = Type . BOOLEAN , defaultValue = ""true"" ) } , responses = { @ RestResponse ( responseCode = SC_OK , description = ""An XML representation of the publication job"" ) } ) public Response updateMetadata ( @ FormParam ( ""mediapackage"" ) String mediaPackageXml , @ FormParam ( ""channel"" ) String channel , @ FormParam ( ""flavors"" ) String flavors , @ FormParam ( ""tags"" ) String tags , @ FormParam ( ""checkAvailability"" ) @ DefaultValue ( ""true"" ) boolean checkAvailability ) throws Exception { final Job job ; try { final MediaPackage mediaPackage = MediaPackageParser . getFromXml ( mediaPackageXml ) ; job = service . updateMetadata ( mediaPackage , channel , split ( flavors ) , split ( tags ) , checkAvailability ) ; } catch ( IllegalArgumentException e ) { logger . debug ( ""Unable to create an update metadata job"" , e ) ; return Response . status ( Status . BAD_REQUEST ) . build ( ) ; } catch ( Exception e ) { logger . warn ( ""Error publishing element"" , e ) ; return Response . serverError ( ) . build ( ) ; } return Response . ok ( new JaxbJob ( job ) ) . build ( ) ; } +" +747,"public boolean apply ( UUID nodeId , Object msg ) { assertEquals ( ignite2 , g ) ; try { if ( ! nodeId . equals ( ignite1 . cluster ( ) . localNode ( ) . id ( ) ) ) { log . error ( ""Unexpected sender node: "" + nodeId ) ; error . set ( true ) ; return false ; } if ( ! MSG_3 . equals ( msg ) ) { log . error ( ""Unexpected message "" + msg + "" for topic: default"" ) ; error . set ( true ) ; return false ; } rcvMsgs . add ( msg ) ; return true ; } finally { rcvLatch . countDown ( ) ; } } +","public boolean apply ( UUID nodeId , Object msg ) { assertEquals ( ignite2 , g ) ; try { log . info ( ""Received new message [msg="" + msg + "", senderNodeId="" + nodeId + "", topic=default]"" ) ; if ( ! nodeId . equals ( ignite1 . cluster ( ) . localNode ( ) . id ( ) ) ) { log . error ( ""Unexpected sender node: "" + nodeId ) ; error . set ( true ) ; return false ; } if ( ! MSG_3 . equals ( msg ) ) { log . error ( ""Unexpected message "" + msg + "" for topic: default"" ) ; error . set ( true ) ; return false ; } rcvMsgs . add ( msg ) ; return true ; } finally { rcvLatch . countDown ( ) ; } } +" +748,"public boolean apply ( UUID nodeId , Object msg ) { assertEquals ( ignite2 , g ) ; try { log . info ( ""Received new message [msg="" + msg + "", senderNodeId="" + nodeId + "", topic=default]"" ) ; if ( ! nodeId . equals ( ignite1 . cluster ( ) . localNode ( ) . id ( ) ) ) { error . set ( true ) ; return false ; } if ( ! MSG_3 . equals ( msg ) ) { log . error ( ""Unexpected message "" + msg + "" for topic: default"" ) ; error . set ( true ) ; return false ; } rcvMsgs . add ( msg ) ; return true ; } finally { rcvLatch . countDown ( ) ; } } +","public boolean apply ( UUID nodeId , Object msg ) { assertEquals ( ignite2 , g ) ; try { log . info ( ""Received new message [msg="" + msg + "", senderNodeId="" + nodeId + "", topic=default]"" ) ; if ( ! nodeId . equals ( ignite1 . cluster ( ) . localNode ( ) . id ( ) ) ) { log . error ( ""Unexpected sender node: "" + nodeId ) ; error . set ( true ) ; return false ; } if ( ! MSG_3 . equals ( msg ) ) { log . error ( ""Unexpected message "" + msg + "" for topic: default"" ) ; error . set ( true ) ; return false ; } rcvMsgs . add ( msg ) ; return true ; } finally { rcvLatch . countDown ( ) ; } } +" +749,"public boolean apply ( UUID nodeId , Object msg ) { assertEquals ( ignite2 , g ) ; try { log . info ( ""Received new message [msg="" + msg + "", senderNodeId="" + nodeId + "", topic=default]"" ) ; if ( ! nodeId . equals ( ignite1 . cluster ( ) . localNode ( ) . id ( ) ) ) { log . error ( ""Unexpected sender node: "" + nodeId ) ; error . set ( true ) ; return false ; } if ( ! MSG_3 . equals ( msg ) ) { error . set ( true ) ; return false ; } rcvMsgs . add ( msg ) ; return true ; } finally { rcvLatch . countDown ( ) ; } } +","public boolean apply ( UUID nodeId , Object msg ) { assertEquals ( ignite2 , g ) ; try { log . info ( ""Received new message [msg="" + msg + "", senderNodeId="" + nodeId + "", topic=default]"" ) ; if ( ! nodeId . equals ( ignite1 . cluster ( ) . localNode ( ) . id ( ) ) ) { log . error ( ""Unexpected sender node: "" + nodeId ) ; error . set ( true ) ; return false ; } if ( ! MSG_3 . equals ( msg ) ) { log . error ( ""Unexpected message "" + msg + "" for topic: default"" ) ; error . set ( true ) ; return false ; } rcvMsgs . add ( msg ) ; return true ; } finally { rcvLatch . countDown ( ) ; } } +" +750,"public void startup ( ) { transientProducerScheduler . start ( ) ; for ( CloudJobConfigurationPOJO each : configService . loadAll ( ) ) { schedule ( each ) ; } } +","public void startup ( ) { log . info ( ""Start producer manager"" ) ; transientProducerScheduler . start ( ) ; for ( CloudJobConfigurationPOJO each : configService . loadAll ( ) ) { schedule ( each ) ; } } +" +751,"private boolean readCommitsEnabled ( String commitEnabled ) { boolean isCommitEnabled = false ; try { isCommitEnabled = Boolean . parseBoolean ( readParameterValue ( commitEnabled ) ) ; } catch ( ParameterNotFoundException e ) { } return isCommitEnabled ; } +","private boolean readCommitsEnabled ( String commitEnabled ) { boolean isCommitEnabled = false ; try { isCommitEnabled = Boolean . parseBoolean ( readParameterValue ( commitEnabled ) ) ; } catch ( ParameterNotFoundException e ) { log . error ( "" Commits enabled payload parameter is not found"" ) ; } return isCommitEnabled ; } +" +752,"public void setOutputProperties ( final Properties properties ) { super . setOutputProperties ( properties ) ; final String option = outputProperties . getProperty ( EXistOutputKeys . INDENT_SPACES , ""4"" ) ; try { indentAmount = Integer . parseInt ( option ) ; } catch ( final NumberFormatException e ) { } indent = ""yes"" . equals ( outputProperties . getProperty ( OutputKeys . INDENT , ""no"" ) ) ; } +","public void setOutputProperties ( final Properties properties ) { super . setOutputProperties ( properties ) ; final String option = outputProperties . getProperty ( EXistOutputKeys . INDENT_SPACES , ""4"" ) ; try { indentAmount = Integer . parseInt ( option ) ; } catch ( final NumberFormatException e ) { LOG . warn ( ""Invalid indentation value: '{}'"" , option ) ; } indent = ""yes"" . equals ( outputProperties . getProperty ( OutputKeys . INDENT , ""no"" ) ) ; } +" +753,"@ SuppressWarnings ( ""unchecked"" ) private void execute ( Entity target , String command , String type , String memberId , boolean highlight ) { if ( Entities . isNoLongerManaged ( target ) ) return ; Lifecycle state = target . getAttribute ( Attributes . SERVICE_STATE_ACTUAL ) ; if ( state == Lifecycle . STOPPING || state == Lifecycle . STOPPED ) return ; Collection < ? extends Location > locations = Locations . getLocationsCheckingAncestors ( target . getLocations ( ) , target ) ; Maybe < SshMachineLocation > machine = Machines . findUniqueMachineLocation ( locations , SshMachineLocation . class ) ; if ( machine . isAbsentOrNull ( ) ) { return ; } LOG . info ( ""Executing command on {}: {}"" , machine . get ( ) , command ) ; String executionDir = config ( ) . get ( EXECUTION_DIR ) ; String sshCommand = SshCommandSensor . makeCommandExecutingInDirectory ( command , executionDir , target ) ; Map < String , Object > env = MutableMap . of ( ) ; env . putAll ( MutableMap . copyOf ( entity . config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . putAll ( MutableMap . copyOf ( config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . put ( EVENT_TYPE , type ) ; env . put ( MEMBER_ID , memberId ) ; try { env = ( Map < String , Object > ) Tasks . resolveDeepValueWithoutCoercion ( env , getExecutionContext ( ) ) ; } catch ( InterruptedException | ExecutionException e ) { throw Exceptions . propagate ( e ) ; } ShellEnvironmentSerializer serializer = new ShellEnvironmentSerializer ( getManagementContext ( ) ) ; SshEffectorTasks . SshEffectorTaskFactory < String > task = SshEffectorTasks . ssh ( sshCommand ) . machine ( machine . get ( ) ) . requiringZeroAndReturningStdout ( ) . summary ( ""group-"" + CaseFormat . UPPER_UNDERSCORE . to ( CaseFormat . LOWER_HYPHEN , type ) ) . environmentVariables ( serializer . serialize ( env ) ) ; Task < String > taskI = DynamicTasks . submit ( task . newTask ( ) , target ) ; if ( highlight ) { highlightAction ( ""Run at "" + machine . get ( ) . getAddress ( ) . getHostAddress ( ) , taskI ) ; } String output = taskI . getUnchecked ( ) ; LOG . trace ( ""Command returned: {}"" , output ) ; } +","@ SuppressWarnings ( ""unchecked"" ) private void execute ( Entity target , String command , String type , String memberId , boolean highlight ) { if ( Entities . isNoLongerManaged ( target ) ) return ; Lifecycle state = target . getAttribute ( Attributes . SERVICE_STATE_ACTUAL ) ; if ( state == Lifecycle . STOPPING || state == Lifecycle . STOPPED ) return ; Collection < ? extends Location > locations = Locations . getLocationsCheckingAncestors ( target . getLocations ( ) , target ) ; Maybe < SshMachineLocation > machine = Machines . findUniqueMachineLocation ( locations , SshMachineLocation . class ) ; if ( machine . isAbsentOrNull ( ) ) { LOG . debug ( ""No machine available to execute command"" ) ; return ; } LOG . info ( ""Executing command on {}: {}"" , machine . get ( ) , command ) ; String executionDir = config ( ) . get ( EXECUTION_DIR ) ; String sshCommand = SshCommandSensor . makeCommandExecutingInDirectory ( command , executionDir , target ) ; Map < String , Object > env = MutableMap . of ( ) ; env . putAll ( MutableMap . copyOf ( entity . config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . putAll ( MutableMap . copyOf ( config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . put ( EVENT_TYPE , type ) ; env . put ( MEMBER_ID , memberId ) ; try { env = ( Map < String , Object > ) Tasks . resolveDeepValueWithoutCoercion ( env , getExecutionContext ( ) ) ; } catch ( InterruptedException | ExecutionException e ) { throw Exceptions . propagate ( e ) ; } ShellEnvironmentSerializer serializer = new ShellEnvironmentSerializer ( getManagementContext ( ) ) ; SshEffectorTasks . SshEffectorTaskFactory < String > task = SshEffectorTasks . ssh ( sshCommand ) . machine ( machine . get ( ) ) . requiringZeroAndReturningStdout ( ) . summary ( ""group-"" + CaseFormat . UPPER_UNDERSCORE . to ( CaseFormat . LOWER_HYPHEN , type ) ) . environmentVariables ( serializer . serialize ( env ) ) ; Task < String > taskI = DynamicTasks . submit ( task . newTask ( ) , target ) ; if ( highlight ) { highlightAction ( ""Run at "" + machine . get ( ) . getAddress ( ) . getHostAddress ( ) , taskI ) ; } String output = taskI . getUnchecked ( ) ; LOG . trace ( ""Command returned: {}"" , output ) ; } +" +754,"@ SuppressWarnings ( ""unchecked"" ) private void execute ( Entity target , String command , String type , String memberId , boolean highlight ) { if ( Entities . isNoLongerManaged ( target ) ) return ; Lifecycle state = target . getAttribute ( Attributes . SERVICE_STATE_ACTUAL ) ; if ( state == Lifecycle . STOPPING || state == Lifecycle . STOPPED ) return ; Collection < ? extends Location > locations = Locations . getLocationsCheckingAncestors ( target . getLocations ( ) , target ) ; Maybe < SshMachineLocation > machine = Machines . findUniqueMachineLocation ( locations , SshMachineLocation . class ) ; if ( machine . isAbsentOrNull ( ) ) { LOG . debug ( ""No machine available to execute command"" ) ; return ; } String executionDir = config ( ) . get ( EXECUTION_DIR ) ; String sshCommand = SshCommandSensor . makeCommandExecutingInDirectory ( command , executionDir , target ) ; Map < String , Object > env = MutableMap . of ( ) ; env . putAll ( MutableMap . copyOf ( entity . config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . putAll ( MutableMap . copyOf ( config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . put ( EVENT_TYPE , type ) ; env . put ( MEMBER_ID , memberId ) ; try { env = ( Map < String , Object > ) Tasks . resolveDeepValueWithoutCoercion ( env , getExecutionContext ( ) ) ; } catch ( InterruptedException | ExecutionException e ) { throw Exceptions . propagate ( e ) ; } ShellEnvironmentSerializer serializer = new ShellEnvironmentSerializer ( getManagementContext ( ) ) ; SshEffectorTasks . SshEffectorTaskFactory < String > task = SshEffectorTasks . ssh ( sshCommand ) . machine ( machine . get ( ) ) . requiringZeroAndReturningStdout ( ) . summary ( ""group-"" + CaseFormat . UPPER_UNDERSCORE . to ( CaseFormat . LOWER_HYPHEN , type ) ) . environmentVariables ( serializer . serialize ( env ) ) ; Task < String > taskI = DynamicTasks . submit ( task . newTask ( ) , target ) ; if ( highlight ) { highlightAction ( ""Run at "" + machine . get ( ) . getAddress ( ) . getHostAddress ( ) , taskI ) ; } String output = taskI . getUnchecked ( ) ; LOG . trace ( ""Command returned: {}"" , output ) ; } +","@ SuppressWarnings ( ""unchecked"" ) private void execute ( Entity target , String command , String type , String memberId , boolean highlight ) { if ( Entities . isNoLongerManaged ( target ) ) return ; Lifecycle state = target . getAttribute ( Attributes . SERVICE_STATE_ACTUAL ) ; if ( state == Lifecycle . STOPPING || state == Lifecycle . STOPPED ) return ; Collection < ? extends Location > locations = Locations . getLocationsCheckingAncestors ( target . getLocations ( ) , target ) ; Maybe < SshMachineLocation > machine = Machines . findUniqueMachineLocation ( locations , SshMachineLocation . class ) ; if ( machine . isAbsentOrNull ( ) ) { LOG . debug ( ""No machine available to execute command"" ) ; return ; } LOG . info ( ""Executing command on {}: {}"" , machine . get ( ) , command ) ; String executionDir = config ( ) . get ( EXECUTION_DIR ) ; String sshCommand = SshCommandSensor . makeCommandExecutingInDirectory ( command , executionDir , target ) ; Map < String , Object > env = MutableMap . of ( ) ; env . putAll ( MutableMap . copyOf ( entity . config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . putAll ( MutableMap . copyOf ( config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . put ( EVENT_TYPE , type ) ; env . put ( MEMBER_ID , memberId ) ; try { env = ( Map < String , Object > ) Tasks . resolveDeepValueWithoutCoercion ( env , getExecutionContext ( ) ) ; } catch ( InterruptedException | ExecutionException e ) { throw Exceptions . propagate ( e ) ; } ShellEnvironmentSerializer serializer = new ShellEnvironmentSerializer ( getManagementContext ( ) ) ; SshEffectorTasks . SshEffectorTaskFactory < String > task = SshEffectorTasks . ssh ( sshCommand ) . machine ( machine . get ( ) ) . requiringZeroAndReturningStdout ( ) . summary ( ""group-"" + CaseFormat . UPPER_UNDERSCORE . to ( CaseFormat . LOWER_HYPHEN , type ) ) . environmentVariables ( serializer . serialize ( env ) ) ; Task < String > taskI = DynamicTasks . submit ( task . newTask ( ) , target ) ; if ( highlight ) { highlightAction ( ""Run at "" + machine . get ( ) . getAddress ( ) . getHostAddress ( ) , taskI ) ; } String output = taskI . getUnchecked ( ) ; LOG . trace ( ""Command returned: {}"" , output ) ; } +" +755,"@ SuppressWarnings ( ""unchecked"" ) private void execute ( Entity target , String command , String type , String memberId , boolean highlight ) { if ( Entities . isNoLongerManaged ( target ) ) return ; Lifecycle state = target . getAttribute ( Attributes . SERVICE_STATE_ACTUAL ) ; if ( state == Lifecycle . STOPPING || state == Lifecycle . STOPPED ) return ; Collection < ? extends Location > locations = Locations . getLocationsCheckingAncestors ( target . getLocations ( ) , target ) ; Maybe < SshMachineLocation > machine = Machines . findUniqueMachineLocation ( locations , SshMachineLocation . class ) ; if ( machine . isAbsentOrNull ( ) ) { LOG . debug ( ""No machine available to execute command"" ) ; return ; } LOG . info ( ""Executing command on {}: {}"" , machine . get ( ) , command ) ; String executionDir = config ( ) . get ( EXECUTION_DIR ) ; String sshCommand = SshCommandSensor . makeCommandExecutingInDirectory ( command , executionDir , target ) ; Map < String , Object > env = MutableMap . of ( ) ; env . putAll ( MutableMap . copyOf ( entity . config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . putAll ( MutableMap . copyOf ( config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . put ( EVENT_TYPE , type ) ; env . put ( MEMBER_ID , memberId ) ; try { env = ( Map < String , Object > ) Tasks . resolveDeepValueWithoutCoercion ( env , getExecutionContext ( ) ) ; } catch ( InterruptedException | ExecutionException e ) { throw Exceptions . propagate ( e ) ; } ShellEnvironmentSerializer serializer = new ShellEnvironmentSerializer ( getManagementContext ( ) ) ; SshEffectorTasks . SshEffectorTaskFactory < String > task = SshEffectorTasks . ssh ( sshCommand ) . machine ( machine . get ( ) ) . requiringZeroAndReturningStdout ( ) . summary ( ""group-"" + CaseFormat . UPPER_UNDERSCORE . to ( CaseFormat . LOWER_HYPHEN , type ) ) . environmentVariables ( serializer . serialize ( env ) ) ; Task < String > taskI = DynamicTasks . submit ( task . newTask ( ) , target ) ; if ( highlight ) { highlightAction ( ""Run at "" + machine . get ( ) . getAddress ( ) . getHostAddress ( ) , taskI ) ; } String output = taskI . getUnchecked ( ) ; } +","@ SuppressWarnings ( ""unchecked"" ) private void execute ( Entity target , String command , String type , String memberId , boolean highlight ) { if ( Entities . isNoLongerManaged ( target ) ) return ; Lifecycle state = target . getAttribute ( Attributes . SERVICE_STATE_ACTUAL ) ; if ( state == Lifecycle . STOPPING || state == Lifecycle . STOPPED ) return ; Collection < ? extends Location > locations = Locations . getLocationsCheckingAncestors ( target . getLocations ( ) , target ) ; Maybe < SshMachineLocation > machine = Machines . findUniqueMachineLocation ( locations , SshMachineLocation . class ) ; if ( machine . isAbsentOrNull ( ) ) { LOG . debug ( ""No machine available to execute command"" ) ; return ; } LOG . info ( ""Executing command on {}: {}"" , machine . get ( ) , command ) ; String executionDir = config ( ) . get ( EXECUTION_DIR ) ; String sshCommand = SshCommandSensor . makeCommandExecutingInDirectory ( command , executionDir , target ) ; Map < String , Object > env = MutableMap . of ( ) ; env . putAll ( MutableMap . copyOf ( entity . config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . putAll ( MutableMap . copyOf ( config ( ) . get ( BrooklynConfigKeys . SHELL_ENVIRONMENT ) ) ) ; env . put ( EVENT_TYPE , type ) ; env . put ( MEMBER_ID , memberId ) ; try { env = ( Map < String , Object > ) Tasks . resolveDeepValueWithoutCoercion ( env , getExecutionContext ( ) ) ; } catch ( InterruptedException | ExecutionException e ) { throw Exceptions . propagate ( e ) ; } ShellEnvironmentSerializer serializer = new ShellEnvironmentSerializer ( getManagementContext ( ) ) ; SshEffectorTasks . SshEffectorTaskFactory < String > task = SshEffectorTasks . ssh ( sshCommand ) . machine ( machine . get ( ) ) . requiringZeroAndReturningStdout ( ) . summary ( ""group-"" + CaseFormat . UPPER_UNDERSCORE . to ( CaseFormat . LOWER_HYPHEN , type ) ) . environmentVariables ( serializer . serialize ( env ) ) ; Task < String > taskI = DynamicTasks . submit ( task . newTask ( ) , target ) ; if ( highlight ) { highlightAction ( ""Run at "" + machine . get ( ) . getAddress ( ) . getHostAddress ( ) , taskI ) ; } String output = taskI . getUnchecked ( ) ; LOG . trace ( ""Command returned: {}"" , output ) ; } +" +756,"protected Document requestAnnotations ( Document document ) throws GerbilException { HttpGet request = null ; try { String url = createRequestUrl ( document . getText ( ) ) ; request = createGetRequest ( url ) ; } catch ( Exception e ) { throw new GerbilException ( ""Couldn't create HTTP request."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } request . addHeader ( HttpHeaders . ACCEPT , ""text/turtle"" ) ; request . addHeader ( HttpHeaders . ACCEPT_CHARSET , ""UTF-8"" ) ; HttpEntity entity = null ; CloseableHttpResponse response = null ; try { response = sendRequest ( request ) ; entity = response . getEntity ( ) ; try { document = nifParser . getDocumentFromNIFReader ( replaceDenotesUri ( entity . getContent ( ) ) ) ; } catch ( Exception e ) { LOGGER . error ( ""Couldn't parse the response."" , e ) ; throw new GerbilException ( ""Couldn't parse the response."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } if ( document == null ) { throw new GerbilException ( ""The response didn't contain a document."" , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } } finally { closeRequest ( request ) ; if ( entity != null ) { try { EntityUtils . consume ( entity ) ; } catch ( IOException e1 ) { } } IOUtils . closeQuietly ( response ) ; } LOGGER . info ( ""Finished request for {}"" , document . getDocumentURI ( ) ) ; return document ; } +","protected Document requestAnnotations ( Document document ) throws GerbilException { LOGGER . info ( ""Started request for {}"" , document . getDocumentURI ( ) ) ; HttpGet request = null ; try { String url = createRequestUrl ( document . getText ( ) ) ; request = createGetRequest ( url ) ; } catch ( Exception e ) { throw new GerbilException ( ""Couldn't create HTTP request."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } request . addHeader ( HttpHeaders . ACCEPT , ""text/turtle"" ) ; request . addHeader ( HttpHeaders . ACCEPT_CHARSET , ""UTF-8"" ) ; HttpEntity entity = null ; CloseableHttpResponse response = null ; try { response = sendRequest ( request ) ; entity = response . getEntity ( ) ; try { document = nifParser . getDocumentFromNIFReader ( replaceDenotesUri ( entity . getContent ( ) ) ) ; } catch ( Exception e ) { LOGGER . error ( ""Couldn't parse the response."" , e ) ; throw new GerbilException ( ""Couldn't parse the response."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } if ( document == null ) { throw new GerbilException ( ""The response didn't contain a document."" , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } } finally { closeRequest ( request ) ; if ( entity != null ) { try { EntityUtils . consume ( entity ) ; } catch ( IOException e1 ) { } } IOUtils . closeQuietly ( response ) ; } LOGGER . info ( ""Finished request for {}"" , document . getDocumentURI ( ) ) ; return document ; } +" +757,"protected Document requestAnnotations ( Document document ) throws GerbilException { LOGGER . info ( ""Started request for {}"" , document . getDocumentURI ( ) ) ; HttpGet request = null ; try { String url = createRequestUrl ( document . getText ( ) ) ; request = createGetRequest ( url ) ; } catch ( Exception e ) { throw new GerbilException ( ""Couldn't create HTTP request."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } request . addHeader ( HttpHeaders . ACCEPT , ""text/turtle"" ) ; request . addHeader ( HttpHeaders . ACCEPT_CHARSET , ""UTF-8"" ) ; HttpEntity entity = null ; CloseableHttpResponse response = null ; try { response = sendRequest ( request ) ; entity = response . getEntity ( ) ; try { document = nifParser . getDocumentFromNIFReader ( replaceDenotesUri ( entity . getContent ( ) ) ) ; } catch ( Exception e ) { throw new GerbilException ( ""Couldn't parse the response."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } if ( document == null ) { throw new GerbilException ( ""The response didn't contain a document."" , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } } finally { closeRequest ( request ) ; if ( entity != null ) { try { EntityUtils . consume ( entity ) ; } catch ( IOException e1 ) { } } IOUtils . closeQuietly ( response ) ; } LOGGER . info ( ""Finished request for {}"" , document . getDocumentURI ( ) ) ; return document ; } +","protected Document requestAnnotations ( Document document ) throws GerbilException { LOGGER . info ( ""Started request for {}"" , document . getDocumentURI ( ) ) ; HttpGet request = null ; try { String url = createRequestUrl ( document . getText ( ) ) ; request = createGetRequest ( url ) ; } catch ( Exception e ) { throw new GerbilException ( ""Couldn't create HTTP request."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } request . addHeader ( HttpHeaders . ACCEPT , ""text/turtle"" ) ; request . addHeader ( HttpHeaders . ACCEPT_CHARSET , ""UTF-8"" ) ; HttpEntity entity = null ; CloseableHttpResponse response = null ; try { response = sendRequest ( request ) ; entity = response . getEntity ( ) ; try { document = nifParser . getDocumentFromNIFReader ( replaceDenotesUri ( entity . getContent ( ) ) ) ; } catch ( Exception e ) { LOGGER . error ( ""Couldn't parse the response."" , e ) ; throw new GerbilException ( ""Couldn't parse the response."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } if ( document == null ) { throw new GerbilException ( ""The response didn't contain a document."" , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } } finally { closeRequest ( request ) ; if ( entity != null ) { try { EntityUtils . consume ( entity ) ; } catch ( IOException e1 ) { } } IOUtils . closeQuietly ( response ) ; } LOGGER . info ( ""Finished request for {}"" , document . getDocumentURI ( ) ) ; return document ; } +" +758,"protected Document requestAnnotations ( Document document ) throws GerbilException { LOGGER . info ( ""Started request for {}"" , document . getDocumentURI ( ) ) ; HttpGet request = null ; try { String url = createRequestUrl ( document . getText ( ) ) ; request = createGetRequest ( url ) ; } catch ( Exception e ) { throw new GerbilException ( ""Couldn't create HTTP request."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } request . addHeader ( HttpHeaders . ACCEPT , ""text/turtle"" ) ; request . addHeader ( HttpHeaders . ACCEPT_CHARSET , ""UTF-8"" ) ; HttpEntity entity = null ; CloseableHttpResponse response = null ; try { response = sendRequest ( request ) ; entity = response . getEntity ( ) ; try { document = nifParser . getDocumentFromNIFReader ( replaceDenotesUri ( entity . getContent ( ) ) ) ; } catch ( Exception e ) { LOGGER . error ( ""Couldn't parse the response."" , e ) ; throw new GerbilException ( ""Couldn't parse the response."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } if ( document == null ) { throw new GerbilException ( ""The response didn't contain a document."" , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } } finally { closeRequest ( request ) ; if ( entity != null ) { try { EntityUtils . consume ( entity ) ; } catch ( IOException e1 ) { } } IOUtils . closeQuietly ( response ) ; } return document ; } +","protected Document requestAnnotations ( Document document ) throws GerbilException { LOGGER . info ( ""Started request for {}"" , document . getDocumentURI ( ) ) ; HttpGet request = null ; try { String url = createRequestUrl ( document . getText ( ) ) ; request = createGetRequest ( url ) ; } catch ( Exception e ) { throw new GerbilException ( ""Couldn't create HTTP request."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } request . addHeader ( HttpHeaders . ACCEPT , ""text/turtle"" ) ; request . addHeader ( HttpHeaders . ACCEPT_CHARSET , ""UTF-8"" ) ; HttpEntity entity = null ; CloseableHttpResponse response = null ; try { response = sendRequest ( request ) ; entity = response . getEntity ( ) ; try { document = nifParser . getDocumentFromNIFReader ( replaceDenotesUri ( entity . getContent ( ) ) ) ; } catch ( Exception e ) { LOGGER . error ( ""Couldn't parse the response."" , e ) ; throw new GerbilException ( ""Couldn't parse the response."" , e , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } if ( document == null ) { throw new GerbilException ( ""The response didn't contain a document."" , ErrorTypes . UNEXPECTED_EXCEPTION ) ; } } finally { closeRequest ( request ) ; if ( entity != null ) { try { EntityUtils . consume ( entity ) ; } catch ( IOException e1 ) { } } IOUtils . closeQuietly ( response ) ; } LOGGER . info ( ""Finished request for {}"" , document . getDocumentURI ( ) ) ; return document ; } +" +759,"protected TimeoutTask createTimeoutTask ( ) { if ( timeout <= 0 ) { return null ; } return TimeoutManager . getInstance ( ) . addTimeoutTarget ( ( ) -> { ComponentUtil . getProcessHelper ( ) . destroyProcess ( sessionId ) ; processTimeout = true ; } , timeout , false ) ; } +","protected TimeoutTask createTimeoutTask ( ) { if ( timeout <= 0 ) { return null ; } return TimeoutManager . getInstance ( ) . addTimeoutTarget ( ( ) -> { logger . warn ( ""Process is terminated due to {} second exceeded."" , timeout ) ; ComponentUtil . getProcessHelper ( ) . destroyProcess ( sessionId ) ; processTimeout = true ; } , timeout , false ) ; } +" +760,"private Set < String > getRequestingUserDepts ( ) { Set < String > requestingUserDepts = Sets . newHashSet ( ) ; ModerationService . Iface client = thriftClients . makeModerationClient ( ) ; try { requestingUserDepts = client . getRequestingUserDepts ( ) ; } catch ( TException e ) { } return requestingUserDepts ; } +","private Set < String > getRequestingUserDepts ( ) { Set < String > requestingUserDepts = Sets . newHashSet ( ) ; ModerationService . Iface client = thriftClients . makeModerationClient ( ) ; try { requestingUserDepts = client . getRequestingUserDepts ( ) ; } catch ( TException e ) { log . error ( ""Error geeting requesting user departments"" , e ) ; } return requestingUserDepts ; } +" +761,"@ SuppressWarnings ( ""unchecked"" ) @ Override public void writeData ( Object [ ] data ) throws HyracksDataException { if ( ! open ) { throw new HyracksDataException ( ""Writing to SerializingDataWriter that has not been opened"" ) ; } tb . reset ( ) ; for ( int i = 0 ; i < data . length ; ++ i ) { Object instance = data [ i ] ; if ( LOGGER . isTraceEnabled ( ) ) { } tb . addField ( recordDescriptor . getFields ( ) [ i ] , instance ) ; } FrameUtils . appendToWriter ( frameWriter , tupleAppender , tb . getFieldEndOffsets ( ) , tb . getByteArray ( ) , 0 , tb . getSize ( ) ) ; } +","@ SuppressWarnings ( ""unchecked"" ) @ Override public void writeData ( Object [ ] data ) throws HyracksDataException { if ( ! open ) { throw new HyracksDataException ( ""Writing to SerializingDataWriter that has not been opened"" ) ; } tb . reset ( ) ; for ( int i = 0 ; i < data . length ; ++ i ) { Object instance = data [ i ] ; if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( i + "" "" + LogRedactionUtil . userData ( instance . toString ( ) ) ) ; } tb . addField ( recordDescriptor . getFields ( ) [ i ] , instance ) ; } FrameUtils . appendToWriter ( frameWriter , tupleAppender , tb . getFieldEndOffsets ( ) , tb . getByteArray ( ) , 0 , tb . getSize ( ) ) ; } +" +762,"private void writeEventBatchToSerializer ( ) throws InterruptedException , WriteException { try { batch . clear ( ) ; } catch ( StreamingException e ) { throw new WriteException ( endPoint , txnBatch . getCurrentTxnId ( ) , e ) ; } catch ( TimeoutException e ) { throw new WriteException ( endPoint , txnBatch . getCurrentTxnId ( ) , e ) ; } } +","private void writeEventBatchToSerializer ( ) throws InterruptedException , WriteException { try { timedCall ( new CallRunner1 < Void > ( ) { @ Override public Void call ( ) throws InterruptedException , StreamingException { try { for ( Event event : batch ) { try { serializer . write ( txnBatch , event ) ; } catch ( SerializationError err ) { LOG . info ( ""Parse failed : {} : {}"" , err . getMessage ( ) , new String ( event . getBody ( ) ) ) ; } } return null ; } catch ( IOException e ) { throw new StreamingIOFailure ( e . getMessage ( ) , e ) ; } } } ) ; batch . clear ( ) ; } catch ( StreamingException e ) { throw new WriteException ( endPoint , txnBatch . getCurrentTxnId ( ) , e ) ; } catch ( TimeoutException e ) { throw new WriteException ( endPoint , txnBatch . getCurrentTxnId ( ) , e ) ; } } +" +763,"private void writeEventBatchToSerializer ( ) throws InterruptedException , WriteException { try { timedCall ( new CallRunner1 < Void > ( ) { @ Override public Void call ( ) throws InterruptedException , StreamingException { try { for ( Event event : batch ) { try { serializer . write ( txnBatch , event ) ; } catch ( SerializationError err ) { } } return null ; } catch ( IOException e ) { throw new StreamingIOFailure ( e . getMessage ( ) , e ) ; } } } ) ; batch . clear ( ) ; } catch ( StreamingException e ) { throw new WriteException ( endPoint , txnBatch . getCurrentTxnId ( ) , e ) ; } catch ( TimeoutException e ) { throw new WriteException ( endPoint , txnBatch . getCurrentTxnId ( ) , e ) ; } } +","private void writeEventBatchToSerializer ( ) throws InterruptedException , WriteException { try { timedCall ( new CallRunner1 < Void > ( ) { @ Override public Void call ( ) throws InterruptedException , StreamingException { try { for ( Event event : batch ) { try { serializer . write ( txnBatch , event ) ; } catch ( SerializationError err ) { LOG . info ( ""Parse failed : {} : {}"" , err . getMessage ( ) , new String ( event . getBody ( ) ) ) ; } } return null ; } catch ( IOException e ) { throw new StreamingIOFailure ( e . getMessage ( ) , e ) ; } } } ) ; batch . clear ( ) ; } catch ( StreamingException e ) { throw new WriteException ( endPoint , txnBatch . getCurrentTxnId ( ) , e ) ; } catch ( TimeoutException e ) { throw new WriteException ( endPoint , txnBatch . getCurrentTxnId ( ) , e ) ; } } +" +764,"@ RestAccessControl ( permission = Permission . SUPERUSER ) @ RequestMapping ( value = ""/{modelId}"" , produces = MediaType . APPLICATION_JSON_VALUE , method = RequestMethod . DELETE ) public ResponseEntity < SimpleRestResponse < Map < String , String > > > deleteContentModel ( @ PathVariable Long modelId ) { contentModelService . delete ( modelId ) ; Map < String , String > result = Collections . singletonMap ( ""modelId"" , String . valueOf ( modelId ) ) ; return ResponseEntity . ok ( new SimpleRestResponse < > ( result ) ) ; } +","@ RestAccessControl ( permission = Permission . SUPERUSER ) @ RequestMapping ( value = ""/{modelId}"" , produces = MediaType . APPLICATION_JSON_VALUE , method = RequestMethod . DELETE ) public ResponseEntity < SimpleRestResponse < Map < String , String > > > deleteContentModel ( @ PathVariable Long modelId ) { logger . info ( ""deleting content model {}"" , modelId ) ; contentModelService . delete ( modelId ) ; Map < String , String > result = Collections . singletonMap ( ""modelId"" , String . valueOf ( modelId ) ) ; return ResponseEntity . ok ( new SimpleRestResponse < > ( result ) ) ; } +" +765,"private void assertEvictionTime ( int expectedSeconds , int error ) { long startTime = System . currentTimeMillis ( ) ; while ( true ) { if ( dbPoolManager . getConnectionPool ( ) . getNumIdle ( ) == minIdleConnections ) { long waitSeconds = getSecondsSince ( startTime ) ; LOG . info ( ""Idle connections are evicted after {} seconds"" , waitSeconds ) ; assertInRange ( waitSeconds , expectedSeconds , expectedSeconds + error ) ; break ; } else { sleepMillis ( 500 ) ; } } } +","private void assertEvictionTime ( int expectedSeconds , int error ) { long startTime = System . currentTimeMillis ( ) ; while ( true ) { LOG . info ( ""Active connections: {}"" , dbPoolManager . getConnectionPool ( ) . getNumIdle ( ) ) ; if ( dbPoolManager . getConnectionPool ( ) . getNumIdle ( ) == minIdleConnections ) { long waitSeconds = getSecondsSince ( startTime ) ; LOG . info ( ""Idle connections are evicted after {} seconds"" , waitSeconds ) ; assertInRange ( waitSeconds , expectedSeconds , expectedSeconds + error ) ; break ; } else { sleepMillis ( 500 ) ; } } } +" +766,"private void assertEvictionTime ( int expectedSeconds , int error ) { long startTime = System . currentTimeMillis ( ) ; while ( true ) { LOG . info ( ""Active connections: {}"" , dbPoolManager . getConnectionPool ( ) . getNumIdle ( ) ) ; if ( dbPoolManager . getConnectionPool ( ) . getNumIdle ( ) == minIdleConnections ) { long waitSeconds = getSecondsSince ( startTime ) ; assertInRange ( waitSeconds , expectedSeconds , expectedSeconds + error ) ; break ; } else { sleepMillis ( 500 ) ; } } } +","private void assertEvictionTime ( int expectedSeconds , int error ) { long startTime = System . currentTimeMillis ( ) ; while ( true ) { LOG . info ( ""Active connections: {}"" , dbPoolManager . getConnectionPool ( ) . getNumIdle ( ) ) ; if ( dbPoolManager . getConnectionPool ( ) . getNumIdle ( ) == minIdleConnections ) { long waitSeconds = getSecondsSince ( startTime ) ; LOG . info ( ""Idle connections are evicted after {} seconds"" , waitSeconds ) ; assertInRange ( waitSeconds , expectedSeconds , expectedSeconds + error ) ; break ; } else { sleepMillis ( 500 ) ; } } } +" +767,"private void downloadAndCopyJars ( Set < String > jarsToDownload , Path destinationPath ) throws IOException { Set < String > copiedJars = new HashSet < > ( ) ; for ( String jar : jarsToDownload ) { if ( ! copiedJars . contains ( jar ) ) { Path jarPath = Paths . get ( jar ) ; if ( destinationPath == null || jarPath == null ) { throw new IllegalArgumentException ( ""null destinationPath or jarPath"" ) ; } Path jarFileName = jarPath . getFileName ( ) ; if ( jarFileName == null ) { throw new IllegalArgumentException ( ""null farFileName"" ) ; } File destPath = Paths . get ( destinationPath . toString ( ) , jarFileName . toString ( ) ) . toFile ( ) ; try ( InputStream src = fileStorage . download ( jar ) ; FileOutputStream dest = new FileOutputStream ( destPath ) ) { IOUtils . copy ( src , dest ) ; copiedJars . add ( jar ) ; } } } } +","private void downloadAndCopyJars ( Set < String > jarsToDownload , Path destinationPath ) throws IOException { Set < String > copiedJars = new HashSet < > ( ) ; for ( String jar : jarsToDownload ) { if ( ! copiedJars . contains ( jar ) ) { Path jarPath = Paths . get ( jar ) ; if ( destinationPath == null || jarPath == null ) { throw new IllegalArgumentException ( ""null destinationPath or jarPath"" ) ; } Path jarFileName = jarPath . getFileName ( ) ; if ( jarFileName == null ) { throw new IllegalArgumentException ( ""null farFileName"" ) ; } File destPath = Paths . get ( destinationPath . toString ( ) , jarFileName . toString ( ) ) . toFile ( ) ; try ( InputStream src = fileStorage . download ( jar ) ; FileOutputStream dest = new FileOutputStream ( destPath ) ) { IOUtils . copy ( src , dest ) ; copiedJars . add ( jar ) ; LOG . debug ( ""Jar {} copied to {}"" , jar , destPath ) ; } } } } +" +768,"public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +","public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +" +769,"public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +","public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +" +770,"public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +","public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +" +771,"public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +","public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +" +772,"public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +","public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +" +773,"public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +","public List < String > getURLsFromPage ( Document page ) { List < String > imageURLs = new ArrayList < > ( ) ; int x = 1 ; Elements pageImages = page . getElementsByClass ( ""c-tile"" ) ; for ( Element thumb : pageImages ) { if ( thumb . attr ( ""href"" ) . contains ( ""/comics/album/"" ) ) { String subUrl = ""https://www.8muses.com"" + thumb . attr ( ""href"" ) ; try { LOGGER . info ( ""Retrieving "" + subUrl ) ; sendUpdate ( STATUS . LOADING_RESOURCE , subUrl ) ; Document subPage = Http . url ( subUrl ) . get ( ) ; List < String > subalbumImages = getURLsFromPage ( subPage ) ; LOGGER . info ( ""Found "" + subalbumImages . size ( ) + "" images in subalbum"" ) ; } catch ( IOException e ) { LOGGER . warn ( ""Error while loading subalbum "" + subUrl , e ) ; } } else if ( thumb . attr ( ""href"" ) . contains ( ""/comics/picture/"" ) ) { LOGGER . info ( ""This page is a album"" ) ; LOGGER . info ( ""Ripping image"" ) ; if ( super . isStopped ( ) ) break ; String image = null ; if ( thumb . hasAttr ( ""data-cfsrc"" ) ) { image = thumb . attr ( ""data-cfsrc"" ) ; } else { String rawJson = deobfuscateJSON ( page . select ( ""script#ractive-public"" ) . html ( ) . replaceAll ( "">"" , "">"" ) . replaceAll ( ""<"" , ""<"" ) . replace ( ""&"" , ""&"" ) ) ; JSONObject json = new JSONObject ( rawJson ) ; try { for ( int i = 0 ; i != json . getJSONArray ( ""pictures"" ) . length ( ) ; i ++ ) { image = ""https://www.8muses.com/image/fl/"" + json . getJSONArray ( ""pictures"" ) . getJSONObject ( i ) . getString ( ""publicUri"" ) ; URL imageUrl = new URL ( image ) ; addURLToDownload ( imageUrl , getPrefixShort ( x ) , getSubdir ( page . select ( ""title"" ) . text ( ) ) , this . url . toExternalForm ( ) , cookies , """" , null , true ) ; x ++ ; if ( isThisATest ( ) ) { break ; } } return imageURLs ; } catch ( MalformedURLException e ) { LOGGER . error ( ""\"""" + image + ""\"" is malformed"" ) ; } } if ( ! image . contains ( ""8muses.com"" ) ) { continue ; } imageURLs . add ( image ) ; if ( isThisATest ( ) ) break ; } } return imageURLs ; } +" +774,"private void awaitInitializeProxy ( IgniteCacheProxyImpl < ? , ? > jcache ) { if ( jcache != null ) { CountDownLatch initLatch = jcache . getInitLatch ( ) ; try { while ( initLatch . getCount ( ) > 0 ) { initLatch . await ( 2000 , TimeUnit . MILLISECONDS ) ; if ( log . isInfoEnabled ( ) ) } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } } } +","private void awaitInitializeProxy ( IgniteCacheProxyImpl < ? , ? > jcache ) { if ( jcache != null ) { CountDownLatch initLatch = jcache . getInitLatch ( ) ; try { while ( initLatch . getCount ( ) > 0 ) { initLatch . await ( 2000 , TimeUnit . MILLISECONDS ) ; if ( log . isInfoEnabled ( ) ) log . info ( ""Failed to wait proxy initialization, cache="" + jcache . getName ( ) + "", localNodeId="" + ctx . localNodeId ( ) ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } } } +" +775,"public int getSymLinkDepth ( ) { int value = 0 ; try { value = Integer . parseInt ( line . getOptionValue ( ARGUMENT . SYM_LINK_DEPTH , ""0"" ) ) ; if ( value < 0 ) { value = 0 ; } } catch ( NumberFormatException ex ) { } return value ; } +","public int getSymLinkDepth ( ) { int value = 0 ; try { value = Integer . parseInt ( line . getOptionValue ( ARGUMENT . SYM_LINK_DEPTH , ""0"" ) ) ; if ( value < 0 ) { value = 0 ; } } catch ( NumberFormatException ex ) { LOGGER . debug ( ""Symbolic link was not a number"" ) ; } return value ; } +" +776,"protected Object getRawNodeValue ( final JsonNode fieldNode , final DataType dataType , final String fieldName ) throws IOException { if ( fieldNode == null || fieldNode . isNull ( ) ) { return null ; } if ( fieldNode . isNumber ( ) ) { return fieldNode . getNumberValue ( ) ; } if ( fieldNode . isBinary ( ) ) { return fieldNode . getBinaryValue ( ) ; } if ( fieldNode . isBoolean ( ) ) { return fieldNode . getBooleanValue ( ) ; } if ( fieldNode . isTextual ( ) ) { final String textValue = fieldNode . getTextValue ( ) ; if ( dataType == null ) { return textValue ; } switch ( dataType . getFieldType ( ) ) { case DATE : case TIME : case TIMESTAMP : try { return DataTypeUtils . convertType ( textValue , dataType , LAZY_DATE_FORMAT , LAZY_TIME_FORMAT , LAZY_TIMESTAMP_FORMAT , fieldName ) ; } catch ( final Exception e ) { return textValue ; } default : return textValue ; } } if ( fieldNode . isArray ( ) ) { final ArrayNode arrayNode = ( ArrayNode ) fieldNode ; final int numElements = arrayNode . size ( ) ; final Object [ ] arrayElements = new Object [ numElements ] ; int count = 0 ; final DataType elementDataType ; if ( dataType != null && dataType . getFieldType ( ) == RecordFieldType . ARRAY ) { final ArrayDataType arrayDataType = ( ArrayDataType ) dataType ; elementDataType = arrayDataType . getElementType ( ) ; } else if ( dataType != null && dataType . getFieldType ( ) == RecordFieldType . CHOICE ) { List < DataType > possibleSubTypes = ( ( ChoiceDataType ) dataType ) . getPossibleSubTypes ( ) ; for ( DataType possibleSubType : possibleSubTypes ) { if ( possibleSubType . getFieldType ( ) == RecordFieldType . ARRAY ) { ArrayDataType possibleArrayDataType = ( ArrayDataType ) possibleSubType ; DataType possibleElementType = possibleArrayDataType . getElementType ( ) ; final Object [ ] possibleArrayElements = new Object [ numElements ] ; int elementCounter = 0 ; for ( final JsonNode node : arrayNode ) { final Object value = getRawNodeValue ( node , possibleElementType , fieldName ) ; possibleArrayElements [ elementCounter ++ ] = value ; } if ( DataTypeUtils . isArrayTypeCompatible ( possibleArrayElements , possibleElementType , true ) ) { return possibleArrayElements ; } } } elementDataType = dataType ; } else { elementDataType = dataType ; } for ( final JsonNode node : arrayNode ) { final Object value = getRawNodeValue ( node , elementDataType , fieldName ) ; arrayElements [ count ++ ] = value ; } return arrayElements ; } if ( fieldNode . isObject ( ) ) { RecordSchema childSchema = null ; if ( dataType != null && RecordFieldType . MAP == dataType . getFieldType ( ) ) { return getMapFromRawValue ( fieldNode , dataType , fieldName ) ; } return getRecordFromRawValue ( fieldNode , dataType ) ; } return null ; } +","protected Object getRawNodeValue ( final JsonNode fieldNode , final DataType dataType , final String fieldName ) throws IOException { if ( fieldNode == null || fieldNode . isNull ( ) ) { return null ; } if ( fieldNode . isNumber ( ) ) { return fieldNode . getNumberValue ( ) ; } if ( fieldNode . isBinary ( ) ) { return fieldNode . getBinaryValue ( ) ; } if ( fieldNode . isBoolean ( ) ) { return fieldNode . getBooleanValue ( ) ; } if ( fieldNode . isTextual ( ) ) { final String textValue = fieldNode . getTextValue ( ) ; if ( dataType == null ) { return textValue ; } switch ( dataType . getFieldType ( ) ) { case DATE : case TIME : case TIMESTAMP : try { return DataTypeUtils . convertType ( textValue , dataType , LAZY_DATE_FORMAT , LAZY_TIME_FORMAT , LAZY_TIMESTAMP_FORMAT , fieldName ) ; } catch ( final Exception e ) { return textValue ; } default : return textValue ; } } if ( fieldNode . isArray ( ) ) { final ArrayNode arrayNode = ( ArrayNode ) fieldNode ; final int numElements = arrayNode . size ( ) ; final Object [ ] arrayElements = new Object [ numElements ] ; int count = 0 ; final DataType elementDataType ; if ( dataType != null && dataType . getFieldType ( ) == RecordFieldType . ARRAY ) { final ArrayDataType arrayDataType = ( ArrayDataType ) dataType ; elementDataType = arrayDataType . getElementType ( ) ; } else if ( dataType != null && dataType . getFieldType ( ) == RecordFieldType . CHOICE ) { List < DataType > possibleSubTypes = ( ( ChoiceDataType ) dataType ) . getPossibleSubTypes ( ) ; for ( DataType possibleSubType : possibleSubTypes ) { if ( possibleSubType . getFieldType ( ) == RecordFieldType . ARRAY ) { ArrayDataType possibleArrayDataType = ( ArrayDataType ) possibleSubType ; DataType possibleElementType = possibleArrayDataType . getElementType ( ) ; final Object [ ] possibleArrayElements = new Object [ numElements ] ; int elementCounter = 0 ; for ( final JsonNode node : arrayNode ) { final Object value = getRawNodeValue ( node , possibleElementType , fieldName ) ; possibleArrayElements [ elementCounter ++ ] = value ; } if ( DataTypeUtils . isArrayTypeCompatible ( possibleArrayElements , possibleElementType , true ) ) { return possibleArrayElements ; } } } logger . debug ( ""Couldn't find proper schema for '{}'. This could lead to some fields filtered out."" , fieldName ) ; elementDataType = dataType ; } else { elementDataType = dataType ; } for ( final JsonNode node : arrayNode ) { final Object value = getRawNodeValue ( node , elementDataType , fieldName ) ; arrayElements [ count ++ ] = value ; } return arrayElements ; } if ( fieldNode . isObject ( ) ) { RecordSchema childSchema = null ; if ( dataType != null && RecordFieldType . MAP == dataType . getFieldType ( ) ) { return getMapFromRawValue ( fieldNode , dataType , fieldName ) ; } return getRecordFromRawValue ( fieldNode , dataType ) ; } return null ; } +" +777,"public < T extends HttpUpgradeHandler > T upgrade ( Class < T > handlerClass ) throws IOException , ServletException { Response response = _channel . getResponse ( ) ; if ( response . getStatus ( ) != HttpStatus . SWITCHING_PROTOCOLS_101 ) throw new IllegalStateException ( ""Response status should be 101"" ) ; if ( response . getHeader ( ""Upgrade"" ) == null ) throw new IllegalStateException ( ""Missing Upgrade header"" ) ; if ( ! ""Upgrade"" . equalsIgnoreCase ( response . getHeader ( ""Connection"" ) ) ) throw new IllegalStateException ( ""Invalid Connection header"" ) ; if ( response . isCommitted ( ) ) throw new IllegalStateException ( ""Cannot upgrade committed response"" ) ; if ( _metaData == null || _metaData . getHttpVersion ( ) != HttpVersion . HTTP_1_1 ) throw new IllegalStateException ( ""Only requests over HTTP/1.1 can be upgraded"" ) ; ServletOutputStream outputStream = response . getOutputStream ( ) ; ServletInputStream inputStream = getInputStream ( ) ; HttpChannelOverHttp httpChannel11 = ( HttpChannelOverHttp ) _channel ; HttpConnection httpConnection = ( HttpConnection ) _channel . getConnection ( ) ; T upgradeHandler ; try { upgradeHandler = handlerClass . getDeclaredConstructor ( ) . newInstance ( ) ; } catch ( Exception e ) { throw new ServletException ( ""Unable to instantiate handler class"" , e ) ; } httpChannel11 . servletUpgrade ( ) ; AsyncContext asyncContext = forceStartAsync ( ) ; outputStream . flush ( ) ; httpConnection . getGenerator ( ) . servletUpgrade ( ) ; httpConnection . addEventListener ( new Connection . Listener ( ) { @ Override public void onClosed ( Connection connection ) { try { asyncContext . complete ( ) ; } catch ( Exception e ) { } try { upgradeHandler . destroy ( ) ; } catch ( Exception e ) { LOG . warn ( ""error during upgrade HttpUpgradeHandler destroy"" , e ) ; } } @ Override public void onOpened ( Connection connection ) { } } ) ; upgradeHandler . init ( new WebConnection ( ) { @ Override public void close ( ) throws Exception { try { inputStream . close ( ) ; } finally { outputStream . close ( ) ; } } @ Override public ServletInputStream getInputStream ( ) { return inputStream ; } @ Override public ServletOutputStream getOutputStream ( ) { return outputStream ; } } ) ; return upgradeHandler ; } +","public < T extends HttpUpgradeHandler > T upgrade ( Class < T > handlerClass ) throws IOException , ServletException { Response response = _channel . getResponse ( ) ; if ( response . getStatus ( ) != HttpStatus . SWITCHING_PROTOCOLS_101 ) throw new IllegalStateException ( ""Response status should be 101"" ) ; if ( response . getHeader ( ""Upgrade"" ) == null ) throw new IllegalStateException ( ""Missing Upgrade header"" ) ; if ( ! ""Upgrade"" . equalsIgnoreCase ( response . getHeader ( ""Connection"" ) ) ) throw new IllegalStateException ( ""Invalid Connection header"" ) ; if ( response . isCommitted ( ) ) throw new IllegalStateException ( ""Cannot upgrade committed response"" ) ; if ( _metaData == null || _metaData . getHttpVersion ( ) != HttpVersion . HTTP_1_1 ) throw new IllegalStateException ( ""Only requests over HTTP/1.1 can be upgraded"" ) ; ServletOutputStream outputStream = response . getOutputStream ( ) ; ServletInputStream inputStream = getInputStream ( ) ; HttpChannelOverHttp httpChannel11 = ( HttpChannelOverHttp ) _channel ; HttpConnection httpConnection = ( HttpConnection ) _channel . getConnection ( ) ; T upgradeHandler ; try { upgradeHandler = handlerClass . getDeclaredConstructor ( ) . newInstance ( ) ; } catch ( Exception e ) { throw new ServletException ( ""Unable to instantiate handler class"" , e ) ; } httpChannel11 . servletUpgrade ( ) ; AsyncContext asyncContext = forceStartAsync ( ) ; outputStream . flush ( ) ; httpConnection . getGenerator ( ) . servletUpgrade ( ) ; httpConnection . addEventListener ( new Connection . Listener ( ) { @ Override public void onClosed ( Connection connection ) { try { asyncContext . complete ( ) ; } catch ( Exception e ) { LOG . warn ( ""error during upgrade AsyncContext complete"" , e ) ; } try { upgradeHandler . destroy ( ) ; } catch ( Exception e ) { LOG . warn ( ""error during upgrade HttpUpgradeHandler destroy"" , e ) ; } } @ Override public void onOpened ( Connection connection ) { } } ) ; upgradeHandler . init ( new WebConnection ( ) { @ Override public void close ( ) throws Exception { try { inputStream . close ( ) ; } finally { outputStream . close ( ) ; } } @ Override public ServletInputStream getInputStream ( ) { return inputStream ; } @ Override public ServletOutputStream getOutputStream ( ) { return outputStream ; } } ) ; return upgradeHandler ; } +" +778,"public < T extends HttpUpgradeHandler > T upgrade ( Class < T > handlerClass ) throws IOException , ServletException { Response response = _channel . getResponse ( ) ; if ( response . getStatus ( ) != HttpStatus . SWITCHING_PROTOCOLS_101 ) throw new IllegalStateException ( ""Response status should be 101"" ) ; if ( response . getHeader ( ""Upgrade"" ) == null ) throw new IllegalStateException ( ""Missing Upgrade header"" ) ; if ( ! ""Upgrade"" . equalsIgnoreCase ( response . getHeader ( ""Connection"" ) ) ) throw new IllegalStateException ( ""Invalid Connection header"" ) ; if ( response . isCommitted ( ) ) throw new IllegalStateException ( ""Cannot upgrade committed response"" ) ; if ( _metaData == null || _metaData . getHttpVersion ( ) != HttpVersion . HTTP_1_1 ) throw new IllegalStateException ( ""Only requests over HTTP/1.1 can be upgraded"" ) ; ServletOutputStream outputStream = response . getOutputStream ( ) ; ServletInputStream inputStream = getInputStream ( ) ; HttpChannelOverHttp httpChannel11 = ( HttpChannelOverHttp ) _channel ; HttpConnection httpConnection = ( HttpConnection ) _channel . getConnection ( ) ; T upgradeHandler ; try { upgradeHandler = handlerClass . getDeclaredConstructor ( ) . newInstance ( ) ; } catch ( Exception e ) { throw new ServletException ( ""Unable to instantiate handler class"" , e ) ; } httpChannel11 . servletUpgrade ( ) ; AsyncContext asyncContext = forceStartAsync ( ) ; outputStream . flush ( ) ; httpConnection . getGenerator ( ) . servletUpgrade ( ) ; httpConnection . addEventListener ( new Connection . Listener ( ) { @ Override public void onClosed ( Connection connection ) { try { asyncContext . complete ( ) ; } catch ( Exception e ) { LOG . warn ( ""error during upgrade AsyncContext complete"" , e ) ; } try { upgradeHandler . destroy ( ) ; } catch ( Exception e ) { } } @ Override public void onOpened ( Connection connection ) { } } ) ; upgradeHandler . init ( new WebConnection ( ) { @ Override public void close ( ) throws Exception { try { inputStream . close ( ) ; } finally { outputStream . close ( ) ; } } @ Override public ServletInputStream getInputStream ( ) { return inputStream ; } @ Override public ServletOutputStream getOutputStream ( ) { return outputStream ; } } ) ; return upgradeHandler ; } +","public < T extends HttpUpgradeHandler > T upgrade ( Class < T > handlerClass ) throws IOException , ServletException { Response response = _channel . getResponse ( ) ; if ( response . getStatus ( ) != HttpStatus . SWITCHING_PROTOCOLS_101 ) throw new IllegalStateException ( ""Response status should be 101"" ) ; if ( response . getHeader ( ""Upgrade"" ) == null ) throw new IllegalStateException ( ""Missing Upgrade header"" ) ; if ( ! ""Upgrade"" . equalsIgnoreCase ( response . getHeader ( ""Connection"" ) ) ) throw new IllegalStateException ( ""Invalid Connection header"" ) ; if ( response . isCommitted ( ) ) throw new IllegalStateException ( ""Cannot upgrade committed response"" ) ; if ( _metaData == null || _metaData . getHttpVersion ( ) != HttpVersion . HTTP_1_1 ) throw new IllegalStateException ( ""Only requests over HTTP/1.1 can be upgraded"" ) ; ServletOutputStream outputStream = response . getOutputStream ( ) ; ServletInputStream inputStream = getInputStream ( ) ; HttpChannelOverHttp httpChannel11 = ( HttpChannelOverHttp ) _channel ; HttpConnection httpConnection = ( HttpConnection ) _channel . getConnection ( ) ; T upgradeHandler ; try { upgradeHandler = handlerClass . getDeclaredConstructor ( ) . newInstance ( ) ; } catch ( Exception e ) { throw new ServletException ( ""Unable to instantiate handler class"" , e ) ; } httpChannel11 . servletUpgrade ( ) ; AsyncContext asyncContext = forceStartAsync ( ) ; outputStream . flush ( ) ; httpConnection . getGenerator ( ) . servletUpgrade ( ) ; httpConnection . addEventListener ( new Connection . Listener ( ) { @ Override public void onClosed ( Connection connection ) { try { asyncContext . complete ( ) ; } catch ( Exception e ) { LOG . warn ( ""error during upgrade AsyncContext complete"" , e ) ; } try { upgradeHandler . destroy ( ) ; } catch ( Exception e ) { LOG . warn ( ""error during upgrade HttpUpgradeHandler destroy"" , e ) ; } } @ Override public void onOpened ( Connection connection ) { } } ) ; upgradeHandler . init ( new WebConnection ( ) { @ Override public void close ( ) throws Exception { try { inputStream . close ( ) ; } finally { outputStream . close ( ) ; } } @ Override public ServletInputStream getInputStream ( ) { return inputStream ; } @ Override public ServletOutputStream getOutputStream ( ) { return outputStream ; } } ) ; return upgradeHandler ; } +" +779,"protected final Object getEndpointInternal ( MessageContext messageContext ) throws Exception { String key = getLookupKeyForMessage ( messageContext ) ; if ( ! StringUtils . hasLength ( key ) ) { return null ; } if ( logger . isDebugEnabled ( ) ) { } return lookupEndpoint ( key ) ; } +","protected final Object getEndpointInternal ( MessageContext messageContext ) throws Exception { String key = getLookupKeyForMessage ( messageContext ) ; if ( ! StringUtils . hasLength ( key ) ) { return null ; } if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""Looking up endpoint for ["" + key + ""]"" ) ; } return lookupEndpoint ( key ) ; } +" +780,"private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +","private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +" +781,"private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +","private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +" +782,"private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +","private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +" +783,"private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +","private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +" +784,"private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +","private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +" +785,"private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +","private void deferLedgerLockRelease ( final long ledgerId ) { long gracePeriod = this . openLedgerRereplicationGracePeriod ; TimerTask timerTask = new TimerTask ( ) { @ Override public void run ( ) { LedgerHandle lh = null ; try { lh = admin . openLedgerNoRecovery ( ledgerId ) ; if ( isLastSegmentOpenAndMissingBookies ( lh ) ) { lh = admin . openLedger ( ledgerId ) ; } Set < LedgerFragment > fragments = getUnderreplicatedFragments ( lh ) ; for ( LedgerFragment fragment : fragments ) { if ( ! fragment . isClosed ( ) ) { lh = admin . openLedger ( ledgerId ) ; break ; } } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException "" + ""while replicating fragments"" , e ) ; } catch ( BKNoSuchLedgerExistsException bknsle ) { LOG . debug ( ""Ledger was deleted, safe to continue"" , bknsle ) ; } catch ( BKException e ) { LOG . error ( ""BKException while fencing the ledger"" + "" for rereplication of postponed ledgers"" , e ) ; } finally { try { if ( lh != null ) { lh . close ( ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOG . info ( ""InterruptedException while closing "" + ""ledger"" , e ) ; } catch ( BKException e ) { LOG . warn ( ""BKException while closing ledger "" , e ) ; } finally { try { underreplicationManager . releaseUnderreplicatedLedger ( ledgerId ) ; } catch ( UnavailableException e ) { shutdown ( ) ; LOG . error ( ""UnavailableException "" + ""while replicating fragments"" , e ) ; } } } } } ; pendingReplicationTimer . schedule ( timerTask , gracePeriod ) ; } +" +786,"boolean isDataStoreFactoryAvaiable ( String className ) { Class < DataStoreFactorySpi > clazz = getDataStoreFactorySpi ( className ) ; if ( clazz == null ) { return false ; } DataStoreFactorySpi factory = null ; try { factory = clazz . getDeclaredConstructor ( ) . newInstance ( ) ; } catch ( Exception e ) { if ( LOGGER . isLoggable ( Level . FINE ) ) { } } if ( factory == null ) { return false ; } return factory . isAvailable ( ) ; } +","boolean isDataStoreFactoryAvaiable ( String className ) { Class < DataStoreFactorySpi > clazz = getDataStoreFactorySpi ( className ) ; if ( clazz == null ) { return false ; } DataStoreFactorySpi factory = null ; try { factory = clazz . getDeclaredConstructor ( ) . newInstance ( ) ; } catch ( Exception e ) { if ( LOGGER . isLoggable ( Level . FINE ) ) { LOGGER . log ( Level . FINE , ""Error creating DataStore factory: "" + className , e ) ; } } if ( factory == null ) { return false ; } return factory . isAvailable ( ) ; } +" +787,"public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +","public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +" +788,"public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +","public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +" +789,"public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +","public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +" +790,"public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +","public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +" +791,"public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +","public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +" +792,"public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +","public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +" +793,"public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +","public static void main ( String [ ] args ) { try { restConfig = ConfigManager . getRestConfiguration ( ) ; cellConfig = ConfigManager . getCellConfiguration ( ) ; List apiIds = createGlobalAPIs ( ) ; publishGlobalAPIs ( apiIds ) ; generateApiConfigJson ( ) ; log . info ( ""Global API creation is completed successfully.."" ) ; microgatewaySetup ( ) ; log . info ( ""Microgateway setup success"" ) ; microgatewayBuild ( ) ; log . info ( ""Microgateway build success"" ) ; unzipTargetFile ( ) ; moveUnzippedFolderToMountLocation ( ) ; log . info ( ""Init container configuration is completed successfully.."" ) ; } catch ( APIException e ) { log . error ( ""Error occurred while creating APIs in Global API manager. "" + e . getMessage ( ) , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( IOException e ) { log . error ( ""Error occurred while configuring the microgateway."" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } catch ( InterruptedException e ) { log . error ( ""Error occurred while waiting for the process completion"" , e ) ; System . exit ( Constants . Utils . ERROR_EXIT_CODE ) ; } } +" +794,"protected void rotateOutputFile ( ) throws IOException { long start = System . currentTimeMillis ( ) ; synchronized ( this . writeLock ) { closeOutputFile ( ) ; cleanupCallback ( ) ; LOG . debug ( ""Performing {} file rotation actions."" , this . rotationActions . size ( ) ) ; for ( RotationAction action : this . rotationActions ) { action . execute ( this . fs , this . currentFile ) ; } } long time = System . currentTimeMillis ( ) - start ; LOG . info ( ""File rotation took {} ms"" , time ) ; } +","protected void rotateOutputFile ( ) throws IOException { LOG . debug ( ""Rotating output file..."" ) ; long start = System . currentTimeMillis ( ) ; synchronized ( this . writeLock ) { closeOutputFile ( ) ; cleanupCallback ( ) ; LOG . debug ( ""Performing {} file rotation actions."" , this . rotationActions . size ( ) ) ; for ( RotationAction action : this . rotationActions ) { action . execute ( this . fs , this . currentFile ) ; } } long time = System . currentTimeMillis ( ) - start ; LOG . info ( ""File rotation took {} ms"" , time ) ; } +" +795,"protected void rotateOutputFile ( ) throws IOException { LOG . debug ( ""Rotating output file..."" ) ; long start = System . currentTimeMillis ( ) ; synchronized ( this . writeLock ) { closeOutputFile ( ) ; cleanupCallback ( ) ; for ( RotationAction action : this . rotationActions ) { action . execute ( this . fs , this . currentFile ) ; } } long time = System . currentTimeMillis ( ) - start ; LOG . info ( ""File rotation took {} ms"" , time ) ; } +","protected void rotateOutputFile ( ) throws IOException { LOG . debug ( ""Rotating output file..."" ) ; long start = System . currentTimeMillis ( ) ; synchronized ( this . writeLock ) { closeOutputFile ( ) ; cleanupCallback ( ) ; LOG . debug ( ""Performing {} file rotation actions."" , this . rotationActions . size ( ) ) ; for ( RotationAction action : this . rotationActions ) { action . execute ( this . fs , this . currentFile ) ; } } long time = System . currentTimeMillis ( ) - start ; LOG . info ( ""File rotation took {} ms"" , time ) ; } +" +796,"protected void rotateOutputFile ( ) throws IOException { LOG . debug ( ""Rotating output file..."" ) ; long start = System . currentTimeMillis ( ) ; synchronized ( this . writeLock ) { closeOutputFile ( ) ; cleanupCallback ( ) ; LOG . debug ( ""Performing {} file rotation actions."" , this . rotationActions . size ( ) ) ; for ( RotationAction action : this . rotationActions ) { action . execute ( this . fs , this . currentFile ) ; } } long time = System . currentTimeMillis ( ) - start ; } +","protected void rotateOutputFile ( ) throws IOException { LOG . debug ( ""Rotating output file..."" ) ; long start = System . currentTimeMillis ( ) ; synchronized ( this . writeLock ) { closeOutputFile ( ) ; cleanupCallback ( ) ; LOG . debug ( ""Performing {} file rotation actions."" , this . rotationActions . size ( ) ) ; for ( RotationAction action : this . rotationActions ) { action . execute ( this . fs , this . currentFile ) ; } } long time = System . currentTimeMillis ( ) - start ; LOG . info ( ""File rotation took {} ms"" , time ) ; } +" +797,"public void destroy ( ) throws Exception { executorService . shutdownNow ( ) ; executorService . awaitTermination ( 10 , TimeUnit . SECONDS ) ; } +","public void destroy ( ) throws Exception { logger . info ( ""shutting down thread pool executor"" ) ; executorService . shutdownNow ( ) ; executorService . awaitTermination ( 10 , TimeUnit . SECONDS ) ; } +" +798,"public void start ( ) { wdh = new DataWormHoleWorker ( ""DataWormHoleWorker"" , this . feature , ""dwhhandlers"" ) ; wdhThread = Executors . newSingleThreadExecutor ( ) ; wdhThread . execute ( wdh ) ; if ( log . isTraceEnable ( ) ) { } } +","public void start ( ) { wdh = new DataWormHoleWorker ( ""DataWormHoleWorker"" , this . feature , ""dwhhandlers"" ) ; wdhThread = Executors . newSingleThreadExecutor ( ) ; wdhThread . execute ( wdh ) ; if ( log . isTraceEnable ( ) ) { log . info ( this , ""DataWormHoleWorker started"" ) ; } } +" +799,"public static boolean hasLink ( WiserMessage emailMessage , LinkType linkType ) { Pattern linkPattern = getLinkRegex ( linkType ) ; Matcher matcher = linkPattern . matcher ( HasEmailRule . getEmailContent ( emailMessage ) ) ; return matcher . find ( ) ; } +","public static boolean hasLink ( WiserMessage emailMessage , LinkType linkType ) { log . info ( ""Query {} has a {} link"" , emailMessage , linkType . name ( ) ) ; Pattern linkPattern = getLinkRegex ( linkType ) ; Matcher matcher = linkPattern . matcher ( HasEmailRule . getEmailContent ( emailMessage ) ) ; return matcher . find ( ) ; } +" +800,"public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +","public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +" +801,"public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +","public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +" +802,"public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +","public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +" +803,"public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +","public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +" +804,"public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { throw new ProcessException ( e ) ; } } +","public Connection getConnection ( ) throws ProcessException { try { if ( ugi != null ) { getLogger ( ) . trace ( ""getting UGI instance"" ) ; if ( kerberosUserReference . get ( ) != null ) { KerberosUser kerberosUser = kerberosUserReference . get ( ) ; getLogger ( ) . debug ( ""kerberosUser is "" + kerberosUser ) ; try { getLogger ( ) . debug ( ""checking TGT on kerberosUser "" + kerberosUser ) ; kerberosUser . checkTGTAndRelogin ( ) ; } catch ( LoginException e ) { throw new ProcessException ( ""Unable to relogin with kerberos credentials for "" + kerberosUser . getPrincipal ( ) , e ) ; } } else { getLogger ( ) . debug ( ""kerberosUser was null, will not refresh TGT with KerberosUser"" ) ; ugi . checkTGTAndReloginFromKeytab ( ) ; } try { return ugi . doAs ( ( PrivilegedExceptionAction < Connection > ) ( ) -> dataSource . getConnection ( ) ) ; } catch ( UndeclaredThrowableException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof SQLException ) { throw ( SQLException ) cause ; } else { throw e ; } } } else { getLogger ( ) . info ( ""Simple Authentication"" ) ; return dataSource . getConnection ( ) ; } } catch ( SQLException | IOException | InterruptedException e ) { getLogger ( ) . error ( ""Error getting Hive connection"" , e ) ; throw new ProcessException ( e ) ; } } +" +805,"public void remove ( final Collection < String > keys ) { final List < String > cacheKeys = new ArrayList < > ( keys . size ( ) ) ; final String keyPrefix = getKeyPrefix ( ) ; for ( final String key : keys ) { cacheKeys . add ( keyPrefix + key ) ; } try ( final Jedis jedis = Connections . getJedis ( ) ) { jedis . del ( cacheKeys . toArray ( new String [ ] { } ) ) ; } catch ( final Exception e ) { } } +","public void remove ( final Collection < String > keys ) { final List < String > cacheKeys = new ArrayList < > ( keys . size ( ) ) ; final String keyPrefix = getKeyPrefix ( ) ; for ( final String key : keys ) { cacheKeys . add ( keyPrefix + key ) ; } try ( final Jedis jedis = Connections . getJedis ( ) ) { jedis . del ( cacheKeys . toArray ( new String [ ] { } ) ) ; } catch ( final Exception e ) { LOGGER . log ( Level . ERROR , ""Remove data to cache with keys ["" + keys + ""] failed"" , e ) ; } } +" +806,"private void processRequest ( Request req ) { if ( ! req . hasYamcsInstance ( ) ) { sendErrorReturn ( 0 , ""instance not present in the request"" ) ; return ; } ReplicationMaster master = masters . get ( req . getYamcsInstance ( ) ) ; if ( master == null ) { sendErrorReturn ( req . getRequestSeq ( ) , ""No replication master registered for instance '"" + req . getYamcsInstance ( ) + ""''"" ) ; return ; } log . debug ( ""Received a replication request: {}, starting a new handler on the master"" , TextFormat . shortDebugString ( req ) ) ; ChannelPipeline pipeline = channelHandlerContext . channel ( ) . pipeline ( ) ; pipeline . remove ( this ) ; pipeline . addLast ( master . newChannelHandler ( req ) ) ; } +","private void processRequest ( Request req ) { if ( ! req . hasYamcsInstance ( ) ) { sendErrorReturn ( 0 , ""instance not present in the request"" ) ; return ; } ReplicationMaster master = masters . get ( req . getYamcsInstance ( ) ) ; if ( master == null ) { log . warn ( ""Received a replication request for non registered master: {}"" , TextFormat . shortDebugString ( req ) ) ; sendErrorReturn ( req . getRequestSeq ( ) , ""No replication master registered for instance '"" + req . getYamcsInstance ( ) + ""''"" ) ; return ; } log . debug ( ""Received a replication request: {}, starting a new handler on the master"" , TextFormat . shortDebugString ( req ) ) ; ChannelPipeline pipeline = channelHandlerContext . channel ( ) . pipeline ( ) ; pipeline . remove ( this ) ; pipeline . addLast ( master . newChannelHandler ( req ) ) ; } +" +807,"private void processRequest ( Request req ) { if ( ! req . hasYamcsInstance ( ) ) { sendErrorReturn ( 0 , ""instance not present in the request"" ) ; return ; } ReplicationMaster master = masters . get ( req . getYamcsInstance ( ) ) ; if ( master == null ) { log . warn ( ""Received a replication request for non registered master: {}"" , TextFormat . shortDebugString ( req ) ) ; sendErrorReturn ( req . getRequestSeq ( ) , ""No replication master registered for instance '"" + req . getYamcsInstance ( ) + ""''"" ) ; return ; } ChannelPipeline pipeline = channelHandlerContext . channel ( ) . pipeline ( ) ; pipeline . remove ( this ) ; pipeline . addLast ( master . newChannelHandler ( req ) ) ; } +","private void processRequest ( Request req ) { if ( ! req . hasYamcsInstance ( ) ) { sendErrorReturn ( 0 , ""instance not present in the request"" ) ; return ; } ReplicationMaster master = masters . get ( req . getYamcsInstance ( ) ) ; if ( master == null ) { log . warn ( ""Received a replication request for non registered master: {}"" , TextFormat . shortDebugString ( req ) ) ; sendErrorReturn ( req . getRequestSeq ( ) , ""No replication master registered for instance '"" + req . getYamcsInstance ( ) + ""''"" ) ; return ; } log . debug ( ""Received a replication request: {}, starting a new handler on the master"" , TextFormat . shortDebugString ( req ) ) ; ChannelPipeline pipeline = channelHandlerContext . channel ( ) . pipeline ( ) ; pipeline . remove ( this ) ; pipeline . addLast ( master . newChannelHandler ( req ) ) ; } +" +808,"public void stop ( ) { destroyConnection ( ) ; super . stop ( ) ; logger . debug ( ""IRC sink {} stopped. Metrics:{}"" , this . getName ( ) , counterGroup ) ; } +","public void stop ( ) { logger . info ( ""IRC sink {} stopping"" , this . getName ( ) ) ; destroyConnection ( ) ; super . stop ( ) ; logger . debug ( ""IRC sink {} stopped. Metrics:{}"" , this . getName ( ) , counterGroup ) ; } +" +809,"public void stop ( ) { logger . info ( ""IRC sink {} stopping"" , this . getName ( ) ) ; destroyConnection ( ) ; super . stop ( ) ; } +","public void stop ( ) { logger . info ( ""IRC sink {} stopping"" , this . getName ( ) ) ; destroyConnection ( ) ; super . stop ( ) ; logger . debug ( ""IRC sink {} stopped. Metrics:{}"" , this . getName ( ) , counterGroup ) ; } +" +810,"private boolean lockMultiple ( IgniteInternalTx tx , Iterable < IgniteTxEntry > entries ) throws IgniteCheckedException { assert tx . optimistic ( ) || ! tx . local ( ) ; long remainingTime = tx . remainingTime ( ) ; long timeout = remainingTime < 0 ? 0 : remainingTime ; GridCacheVersion serOrder = ( tx . serializable ( ) && tx . optimistic ( ) ) ? tx . nearXidVersion ( ) : null ; for ( IgniteTxEntry txEntry1 : entries ) { if ( ! txEntry1 . markPrepared ( ) || txEntry1 . explicitVersion ( ) != null ) continue ; GridCacheContext cacheCtx = txEntry1 . context ( ) ; while ( true ) { cctx . database ( ) . checkpointReadLock ( ) ; try { GridCacheEntryEx entry1 = txEntry1 . cached ( ) ; assert entry1 != null : txEntry1 ; assert ! entry1 . detached ( ) : ""Expected non-detached entry for near transaction "" + ""[locNodeId="" + cctx . localNodeId ( ) + "", entry="" + entry1 + ']' ; GridCacheVersion serReadVer = txEntry1 . entryReadVersion ( ) ; assert serReadVer == null || ( tx . optimistic ( ) && tx . serializable ( ) ) : txEntry1 ; boolean read = serOrder != null && txEntry1 . op ( ) == READ ; entry1 . unswap ( ) ; if ( ! entry1 . tmLock ( tx , timeout , serOrder , serReadVer , read ) ) { for ( IgniteTxEntry txEntry2 : entries ) { if ( txEntry2 == txEntry1 ) break ; txUnlock ( tx , txEntry2 ) ; } return false ; } break ; } catch ( GridCacheEntryRemovedException ignored ) { if ( log . isDebugEnabled ( ) ) try { txEntry1 . cached ( cacheCtx . cache ( ) . entryEx ( txEntry1 . key ( ) , tx . topologyVersion ( ) ) ) ; } catch ( GridDhtInvalidPartitionException e ) { assert tx . dht ( ) : ""Received invalid partition for non DHT transaction [tx="" + tx + "", invalidPart="" + e . partition ( ) + ']' ; tx . addInvalidPartition ( cacheCtx . cacheId ( ) , e . partition ( ) ) ; break ; } } catch ( GridDistributedLockCancelledException ignore ) { tx . setRollbackOnly ( ) ; throw new IgniteCheckedException ( ""Entry lock has been cancelled for transaction: "" + tx ) ; } finally { cctx . database ( ) . checkpointReadUnlock ( ) ; } } } return true ; } +","private boolean lockMultiple ( IgniteInternalTx tx , Iterable < IgniteTxEntry > entries ) throws IgniteCheckedException { assert tx . optimistic ( ) || ! tx . local ( ) ; long remainingTime = tx . remainingTime ( ) ; long timeout = remainingTime < 0 ? 0 : remainingTime ; GridCacheVersion serOrder = ( tx . serializable ( ) && tx . optimistic ( ) ) ? tx . nearXidVersion ( ) : null ; for ( IgniteTxEntry txEntry1 : entries ) { if ( ! txEntry1 . markPrepared ( ) || txEntry1 . explicitVersion ( ) != null ) continue ; GridCacheContext cacheCtx = txEntry1 . context ( ) ; while ( true ) { cctx . database ( ) . checkpointReadLock ( ) ; try { GridCacheEntryEx entry1 = txEntry1 . cached ( ) ; assert entry1 != null : txEntry1 ; assert ! entry1 . detached ( ) : ""Expected non-detached entry for near transaction "" + ""[locNodeId="" + cctx . localNodeId ( ) + "", entry="" + entry1 + ']' ; GridCacheVersion serReadVer = txEntry1 . entryReadVersion ( ) ; assert serReadVer == null || ( tx . optimistic ( ) && tx . serializable ( ) ) : txEntry1 ; boolean read = serOrder != null && txEntry1 . op ( ) == READ ; entry1 . unswap ( ) ; if ( ! entry1 . tmLock ( tx , timeout , serOrder , serReadVer , read ) ) { for ( IgniteTxEntry txEntry2 : entries ) { if ( txEntry2 == txEntry1 ) break ; txUnlock ( tx , txEntry2 ) ; } return false ; } break ; } catch ( GridCacheEntryRemovedException ignored ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Got removed entry in TM lockMultiple(..) method (will retry): "" + txEntry1 ) ; try { txEntry1 . cached ( cacheCtx . cache ( ) . entryEx ( txEntry1 . key ( ) , tx . topologyVersion ( ) ) ) ; } catch ( GridDhtInvalidPartitionException e ) { assert tx . dht ( ) : ""Received invalid partition for non DHT transaction [tx="" + tx + "", invalidPart="" + e . partition ( ) + ']' ; tx . addInvalidPartition ( cacheCtx . cacheId ( ) , e . partition ( ) ) ; break ; } } catch ( GridDistributedLockCancelledException ignore ) { tx . setRollbackOnly ( ) ; throw new IgniteCheckedException ( ""Entry lock has been cancelled for transaction: "" + tx ) ; } finally { cctx . database ( ) . checkpointReadUnlock ( ) ; } } } return true ; } +" +811,"private void runUpdate ( ) { try { updateFromControllerInfo ( ) ; startTouchJob ( ) ; updateStatus ( ThingStatus . ONLINE ) ; } catch ( NanoleafUnauthorizedException nae ) { logger . warn ( ""Status update unauthorized: {}"" , nae . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.invalidToken"" ) ; String localAuthToken = getAuthToken ( ) ; if ( localAuthToken == null || localAuthToken . isEmpty ( ) ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_PENDING , ""@text/error.nanoleaf.controller.noToken"" ) ; } } catch ( NanoleafException ne ) { logger . warn ( ""Status update failed: {}"" , ne . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.communication"" ) ; } catch ( RuntimeException e ) { logger . warn ( ""Update job failed"" , e ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . NONE , ""@text/error.nanoleaf.controller.runtime"" ) ; } } +","private void runUpdate ( ) { logger . debug ( ""Run update job"" ) ; try { updateFromControllerInfo ( ) ; startTouchJob ( ) ; updateStatus ( ThingStatus . ONLINE ) ; } catch ( NanoleafUnauthorizedException nae ) { logger . warn ( ""Status update unauthorized: {}"" , nae . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.invalidToken"" ) ; String localAuthToken = getAuthToken ( ) ; if ( localAuthToken == null || localAuthToken . isEmpty ( ) ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_PENDING , ""@text/error.nanoleaf.controller.noToken"" ) ; } } catch ( NanoleafException ne ) { logger . warn ( ""Status update failed: {}"" , ne . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.communication"" ) ; } catch ( RuntimeException e ) { logger . warn ( ""Update job failed"" , e ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . NONE , ""@text/error.nanoleaf.controller.runtime"" ) ; } } +" +812,"private void runUpdate ( ) { logger . debug ( ""Run update job"" ) ; try { updateFromControllerInfo ( ) ; startTouchJob ( ) ; updateStatus ( ThingStatus . ONLINE ) ; } catch ( NanoleafUnauthorizedException nae ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.invalidToken"" ) ; String localAuthToken = getAuthToken ( ) ; if ( localAuthToken == null || localAuthToken . isEmpty ( ) ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_PENDING , ""@text/error.nanoleaf.controller.noToken"" ) ; } } catch ( NanoleafException ne ) { logger . warn ( ""Status update failed: {}"" , ne . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.communication"" ) ; } catch ( RuntimeException e ) { logger . warn ( ""Update job failed"" , e ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . NONE , ""@text/error.nanoleaf.controller.runtime"" ) ; } } +","private void runUpdate ( ) { logger . debug ( ""Run update job"" ) ; try { updateFromControllerInfo ( ) ; startTouchJob ( ) ; updateStatus ( ThingStatus . ONLINE ) ; } catch ( NanoleafUnauthorizedException nae ) { logger . warn ( ""Status update unauthorized: {}"" , nae . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.invalidToken"" ) ; String localAuthToken = getAuthToken ( ) ; if ( localAuthToken == null || localAuthToken . isEmpty ( ) ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_PENDING , ""@text/error.nanoleaf.controller.noToken"" ) ; } } catch ( NanoleafException ne ) { logger . warn ( ""Status update failed: {}"" , ne . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.communication"" ) ; } catch ( RuntimeException e ) { logger . warn ( ""Update job failed"" , e ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . NONE , ""@text/error.nanoleaf.controller.runtime"" ) ; } } +" +813,"private void runUpdate ( ) { logger . debug ( ""Run update job"" ) ; try { updateFromControllerInfo ( ) ; startTouchJob ( ) ; updateStatus ( ThingStatus . ONLINE ) ; } catch ( NanoleafUnauthorizedException nae ) { logger . warn ( ""Status update unauthorized: {}"" , nae . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.invalidToken"" ) ; String localAuthToken = getAuthToken ( ) ; if ( localAuthToken == null || localAuthToken . isEmpty ( ) ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_PENDING , ""@text/error.nanoleaf.controller.noToken"" ) ; } } catch ( NanoleafException ne ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.communication"" ) ; } catch ( RuntimeException e ) { logger . warn ( ""Update job failed"" , e ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . NONE , ""@text/error.nanoleaf.controller.runtime"" ) ; } } +","private void runUpdate ( ) { logger . debug ( ""Run update job"" ) ; try { updateFromControllerInfo ( ) ; startTouchJob ( ) ; updateStatus ( ThingStatus . ONLINE ) ; } catch ( NanoleafUnauthorizedException nae ) { logger . warn ( ""Status update unauthorized: {}"" , nae . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.invalidToken"" ) ; String localAuthToken = getAuthToken ( ) ; if ( localAuthToken == null || localAuthToken . isEmpty ( ) ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_PENDING , ""@text/error.nanoleaf.controller.noToken"" ) ; } } catch ( NanoleafException ne ) { logger . warn ( ""Status update failed: {}"" , ne . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.communication"" ) ; } catch ( RuntimeException e ) { logger . warn ( ""Update job failed"" , e ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . NONE , ""@text/error.nanoleaf.controller.runtime"" ) ; } } +" +814,"private void runUpdate ( ) { logger . debug ( ""Run update job"" ) ; try { updateFromControllerInfo ( ) ; startTouchJob ( ) ; updateStatus ( ThingStatus . ONLINE ) ; } catch ( NanoleafUnauthorizedException nae ) { logger . warn ( ""Status update unauthorized: {}"" , nae . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.invalidToken"" ) ; String localAuthToken = getAuthToken ( ) ; if ( localAuthToken == null || localAuthToken . isEmpty ( ) ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_PENDING , ""@text/error.nanoleaf.controller.noToken"" ) ; } } catch ( NanoleafException ne ) { logger . warn ( ""Status update failed: {}"" , ne . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.communication"" ) ; } catch ( RuntimeException e ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . NONE , ""@text/error.nanoleaf.controller.runtime"" ) ; } } +","private void runUpdate ( ) { logger . debug ( ""Run update job"" ) ; try { updateFromControllerInfo ( ) ; startTouchJob ( ) ; updateStatus ( ThingStatus . ONLINE ) ; } catch ( NanoleafUnauthorizedException nae ) { logger . warn ( ""Status update unauthorized: {}"" , nae . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.invalidToken"" ) ; String localAuthToken = getAuthToken ( ) ; if ( localAuthToken == null || localAuthToken . isEmpty ( ) ) { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . CONFIGURATION_PENDING , ""@text/error.nanoleaf.controller.noToken"" ) ; } } catch ( NanoleafException ne ) { logger . warn ( ""Status update failed: {}"" , ne . getMessage ( ) ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , ""@text/error.nanoleaf.controller.communication"" ) ; } catch ( RuntimeException e ) { logger . warn ( ""Update job failed"" , e ) ; updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . NONE , ""@text/error.nanoleaf.controller.runtime"" ) ; } } +" +815,"private void resolveNextLink ( ) { final String field = links_to_read . peek ( ) ; if ( field == null ) return ; String link_name = record_name + ""."" + field ; if ( Settings . read_long_fields ) link_name += ""$"" ; try { final PV pv = PVPool . getPV ( link_name ) ; link_flow = pv . onValueEvent ( ) . firstOrError ( ) . subscribe ( link_consumer ) ; link_pv . set ( pv ) ; } catch ( Exception ex ) { logger . log ( Level . WARNING , ""Cannot create link PV"" + link_name , ex ) ; } } +","private void resolveNextLink ( ) { final String field = links_to_read . peek ( ) ; if ( field == null ) return ; String link_name = record_name + ""."" + field ; if ( Settings . read_long_fields ) link_name += ""$"" ; try { logger . log ( TRACE , ""Resolve "" + link_name ) ; final PV pv = PVPool . getPV ( link_name ) ; link_flow = pv . onValueEvent ( ) . firstOrError ( ) . subscribe ( link_consumer ) ; link_pv . set ( pv ) ; } catch ( Exception ex ) { logger . log ( Level . WARNING , ""Cannot create link PV"" + link_name , ex ) ; } } +" +816,"private void resolveNextLink ( ) { final String field = links_to_read . peek ( ) ; if ( field == null ) return ; String link_name = record_name + ""."" + field ; if ( Settings . read_long_fields ) link_name += ""$"" ; try { logger . log ( TRACE , ""Resolve "" + link_name ) ; final PV pv = PVPool . getPV ( link_name ) ; link_flow = pv . onValueEvent ( ) . firstOrError ( ) . subscribe ( link_consumer ) ; link_pv . set ( pv ) ; } catch ( Exception ex ) { } } +","private void resolveNextLink ( ) { final String field = links_to_read . peek ( ) ; if ( field == null ) return ; String link_name = record_name + ""."" + field ; if ( Settings . read_long_fields ) link_name += ""$"" ; try { logger . log ( TRACE , ""Resolve "" + link_name ) ; final PV pv = PVPool . getPV ( link_name ) ; link_flow = pv . onValueEvent ( ) . firstOrError ( ) . subscribe ( link_consumer ) ; link_pv . set ( pv ) ; } catch ( Exception ex ) { logger . log ( Level . WARNING , ""Cannot create link PV"" + link_name , ex ) ; } } +" +817,"public void start ( ) { super . start ( ) ; RestrictionTracker < OffsetHolder , ? > tracker = restrictionTrackers . get ( restrictionTrackers . keySet ( ) . iterator ( ) . next ( ) ) ; this . history = ( List < byte [ ] > ) tracker . currentRestriction ( ) . history ; } +","public void start ( ) { super . start ( ) ; LOG . debug ( ""------------ STARTING THE DATABASE HISTORY! - trackers: {} - config: {}"" , restrictionTrackers , config . asMap ( ) ) ; RestrictionTracker < OffsetHolder , ? > tracker = restrictionTrackers . get ( restrictionTrackers . keySet ( ) . iterator ( ) . next ( ) ) ; this . history = ( List < byte [ ] > ) tracker . currentRestriction ( ) . history ; } +" +818,"public MemberRequest findByKey ( String key ) throws NoSuchMemberRequestException { MemberRequest memberRequest = fetchByKey ( key ) ; if ( memberRequest == null ) { StringBundler sb = new StringBundler ( 4 ) ; sb . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; sb . append ( ""key="" ) ; sb . append ( key ) ; sb . append ( ""}"" ) ; if ( _log . isDebugEnabled ( ) ) { } throw new NoSuchMemberRequestException ( sb . toString ( ) ) ; } return memberRequest ; } +","public MemberRequest findByKey ( String key ) throws NoSuchMemberRequestException { MemberRequest memberRequest = fetchByKey ( key ) ; if ( memberRequest == null ) { StringBundler sb = new StringBundler ( 4 ) ; sb . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; sb . append ( ""key="" ) ; sb . append ( key ) ; sb . append ( ""}"" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( sb . toString ( ) ) ; } throw new NoSuchMemberRequestException ( sb . toString ( ) ) ; } return memberRequest ; } +" +819,"@ GemfireFunction ( id = CREATE_INDEX_FUNCTION_ID ) public boolean createIndex ( IndexDefinition indexDefinition ) { Cache gemfireCache = resolveCache ( ) ; if ( isNonExistingIndex ( gemfireCache , indexDefinition ) ) { if ( logger . isInfoEnabled ( ) ) { } QueryService queryService = gemfireCache . getQueryService ( ) ; try { switch ( indexDefinition . getIndexType ( ) ) { case KEY : case PRIMARY_KEY : queryService . createKeyIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; case HASH : queryService . createHashIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; case FUNCTIONAL : queryService . createIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; default : return false ; } } catch ( QueryException cause ) { throw GemfireCacheUtils . convertGemfireAccessException ( cause ) ; } } else { if ( logger . isInfoEnabled ( ) ) { logger . info ( ""Index with name [{}] already exists"" , indexDefinition . getName ( ) ) ; } return false ; } } +","@ GemfireFunction ( id = CREATE_INDEX_FUNCTION_ID ) public boolean createIndex ( IndexDefinition indexDefinition ) { Cache gemfireCache = resolveCache ( ) ; if ( isNonExistingIndex ( gemfireCache , indexDefinition ) ) { if ( logger . isInfoEnabled ( ) ) { logger . info ( ""Creating Index with name [{}] having expression [{}] on Region [{}] with type [{}]"" , indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) , indexDefinition . getIndexType ( ) ) ; } QueryService queryService = gemfireCache . getQueryService ( ) ; try { switch ( indexDefinition . getIndexType ( ) ) { case KEY : case PRIMARY_KEY : queryService . createKeyIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; case HASH : queryService . createHashIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; case FUNCTIONAL : queryService . createIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; default : return false ; } } catch ( QueryException cause ) { throw GemfireCacheUtils . convertGemfireAccessException ( cause ) ; } } else { if ( logger . isInfoEnabled ( ) ) { logger . info ( ""Index with name [{}] already exists"" , indexDefinition . getName ( ) ) ; } return false ; } } +" +820,"@ GemfireFunction ( id = CREATE_INDEX_FUNCTION_ID ) public boolean createIndex ( IndexDefinition indexDefinition ) { Cache gemfireCache = resolveCache ( ) ; if ( isNonExistingIndex ( gemfireCache , indexDefinition ) ) { if ( logger . isInfoEnabled ( ) ) { logger . info ( ""Creating Index with name [{}] having expression [{}] on Region [{}] with type [{}]"" , indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) , indexDefinition . getIndexType ( ) ) ; } QueryService queryService = gemfireCache . getQueryService ( ) ; try { switch ( indexDefinition . getIndexType ( ) ) { case KEY : case PRIMARY_KEY : queryService . createKeyIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; case HASH : queryService . createHashIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; case FUNCTIONAL : queryService . createIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; default : return false ; } } catch ( QueryException cause ) { throw GemfireCacheUtils . convertGemfireAccessException ( cause ) ; } } else { if ( logger . isInfoEnabled ( ) ) { } return false ; } } +","@ GemfireFunction ( id = CREATE_INDEX_FUNCTION_ID ) public boolean createIndex ( IndexDefinition indexDefinition ) { Cache gemfireCache = resolveCache ( ) ; if ( isNonExistingIndex ( gemfireCache , indexDefinition ) ) { if ( logger . isInfoEnabled ( ) ) { logger . info ( ""Creating Index with name [{}] having expression [{}] on Region [{}] with type [{}]"" , indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) , indexDefinition . getIndexType ( ) ) ; } QueryService queryService = gemfireCache . getQueryService ( ) ; try { switch ( indexDefinition . getIndexType ( ) ) { case KEY : case PRIMARY_KEY : queryService . createKeyIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; case HASH : queryService . createHashIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; case FUNCTIONAL : queryService . createIndex ( indexDefinition . getName ( ) , indexDefinition . getExpression ( ) , indexDefinition . getFromClause ( ) ) ; return true ; default : return false ; } } catch ( QueryException cause ) { throw GemfireCacheUtils . convertGemfireAccessException ( cause ) ; } } else { if ( logger . isInfoEnabled ( ) ) { logger . info ( ""Index with name [{}] already exists"" , indexDefinition . getName ( ) ) ; } return false ; } } +" +821,"private ActionCallbackDto manageBarrierResults ( StratioStreamingMessage message , ActionCallbackDto reply , String path , Boolean success ) throws Exception { ActionCallbackDto clusterReply = reply ; if ( ! success ) { clusterReply = new ActionCallbackDto ( ReplyCode . KO_NODE_NOT_REPLY . getCode ( ) , ReplyCode . KO_NODE_NOT_REPLY . getMessage ( ) ) ; } else { logger . debug ( ""Leaving ACK barrier for path: {} WITH SUCCESS"" , path ) ; if ( reply . getErrorCode ( ) == ReplyCode . OK . getCode ( ) ) { Gson gson = new Gson ( ) ; Boolean koResponse = false ; for ( int i = 0 ; i < nodesToCheck . size ( ) && ! koResponse ; i ++ ) { String nodePath = path . concat ( ""/"" ) . concat ( nodesToCheck . get ( i ) ) ; String data = new String ( client . getData ( ) . forPath ( nodePath ) ) ; ActionCallbackDto parsedResponse = gson . fromJson ( data , ActionCallbackDto . class ) ; if ( parsedResponse . getErrorCode ( ) != ReplyCode . OK . getCode ( ) ) { clusterReply = parsedResponse ; koResponse = true ; } } } } zkUtils . createZNodeJsonReply ( message , clusterReply ) ; client . delete ( ) . deletingChildrenIfNeeded ( ) . forPath ( path ) ; return clusterReply ; } +","private ActionCallbackDto manageBarrierResults ( StratioStreamingMessage message , ActionCallbackDto reply , String path , Boolean success ) throws Exception { ActionCallbackDto clusterReply = reply ; if ( ! success ) { logger . debug ( ""Leaving ACK barrier for path: {} WITH NO SUCCESS"" , path ) ; clusterReply = new ActionCallbackDto ( ReplyCode . KO_NODE_NOT_REPLY . getCode ( ) , ReplyCode . KO_NODE_NOT_REPLY . getMessage ( ) ) ; } else { logger . debug ( ""Leaving ACK barrier for path: {} WITH SUCCESS"" , path ) ; if ( reply . getErrorCode ( ) == ReplyCode . OK . getCode ( ) ) { Gson gson = new Gson ( ) ; Boolean koResponse = false ; for ( int i = 0 ; i < nodesToCheck . size ( ) && ! koResponse ; i ++ ) { String nodePath = path . concat ( ""/"" ) . concat ( nodesToCheck . get ( i ) ) ; String data = new String ( client . getData ( ) . forPath ( nodePath ) ) ; ActionCallbackDto parsedResponse = gson . fromJson ( data , ActionCallbackDto . class ) ; if ( parsedResponse . getErrorCode ( ) != ReplyCode . OK . getCode ( ) ) { clusterReply = parsedResponse ; koResponse = true ; } } } } zkUtils . createZNodeJsonReply ( message , clusterReply ) ; client . delete ( ) . deletingChildrenIfNeeded ( ) . forPath ( path ) ; return clusterReply ; } +" +822,"private ActionCallbackDto manageBarrierResults ( StratioStreamingMessage message , ActionCallbackDto reply , String path , Boolean success ) throws Exception { ActionCallbackDto clusterReply = reply ; if ( ! success ) { logger . debug ( ""Leaving ACK barrier for path: {} WITH NO SUCCESS"" , path ) ; clusterReply = new ActionCallbackDto ( ReplyCode . KO_NODE_NOT_REPLY . getCode ( ) , ReplyCode . KO_NODE_NOT_REPLY . getMessage ( ) ) ; } else { if ( reply . getErrorCode ( ) == ReplyCode . OK . getCode ( ) ) { Gson gson = new Gson ( ) ; Boolean koResponse = false ; for ( int i = 0 ; i < nodesToCheck . size ( ) && ! koResponse ; i ++ ) { String nodePath = path . concat ( ""/"" ) . concat ( nodesToCheck . get ( i ) ) ; String data = new String ( client . getData ( ) . forPath ( nodePath ) ) ; ActionCallbackDto parsedResponse = gson . fromJson ( data , ActionCallbackDto . class ) ; if ( parsedResponse . getErrorCode ( ) != ReplyCode . OK . getCode ( ) ) { clusterReply = parsedResponse ; koResponse = true ; } } } } zkUtils . createZNodeJsonReply ( message , clusterReply ) ; client . delete ( ) . deletingChildrenIfNeeded ( ) . forPath ( path ) ; return clusterReply ; } +","private ActionCallbackDto manageBarrierResults ( StratioStreamingMessage message , ActionCallbackDto reply , String path , Boolean success ) throws Exception { ActionCallbackDto clusterReply = reply ; if ( ! success ) { logger . debug ( ""Leaving ACK barrier for path: {} WITH NO SUCCESS"" , path ) ; clusterReply = new ActionCallbackDto ( ReplyCode . KO_NODE_NOT_REPLY . getCode ( ) , ReplyCode . KO_NODE_NOT_REPLY . getMessage ( ) ) ; } else { logger . debug ( ""Leaving ACK barrier for path: {} WITH SUCCESS"" , path ) ; if ( reply . getErrorCode ( ) == ReplyCode . OK . getCode ( ) ) { Gson gson = new Gson ( ) ; Boolean koResponse = false ; for ( int i = 0 ; i < nodesToCheck . size ( ) && ! koResponse ; i ++ ) { String nodePath = path . concat ( ""/"" ) . concat ( nodesToCheck . get ( i ) ) ; String data = new String ( client . getData ( ) . forPath ( nodePath ) ) ; ActionCallbackDto parsedResponse = gson . fromJson ( data , ActionCallbackDto . class ) ; if ( parsedResponse . getErrorCode ( ) != ReplyCode . OK . getCode ( ) ) { clusterReply = parsedResponse ; koResponse = true ; } } } } zkUtils . createZNodeJsonReply ( message , clusterReply ) ; client . delete ( ) . deletingChildrenIfNeeded ( ) . forPath ( path ) ; return clusterReply ; } +" +823,"protected String formatQuery ( String query ) { try { Objects . requireNonNull ( query ) ; return this . formatter . format ( query ) ; } catch ( Exception e ) { } return ""FORMATTER ERROR!"" ; } +","protected String formatQuery ( String query ) { try { Objects . requireNonNull ( query ) ; return this . formatter . format ( query ) ; } catch ( Exception e ) { log . error ( ""Query formatter failed!"" , e ) ; } return ""FORMATTER ERROR!"" ; } +" +824,"public boolean isSatisified ( ) throws Exception { return resumedCount . get ( ) >= WORKER_COUNT ; } +","public boolean isSatisified ( ) throws Exception { LOG . debug ( ""Test run waiting for connections to get resumed.. at: "" + resumedCount . get ( ) ) ; return resumedCount . get ( ) >= WORKER_COUNT ; } +" +825,"private Collection < DataObject > getAllItemsInDatabox ( URI databoxUri ) throws NotFoundException { PrivacyPreference databox = this . getResourceStore ( ) . get ( databoxUri , PrivacyPreference . class ) ; Collection < Resource > itemUris = databox . getAllAppliesToResource_as ( ) . asList ( ) ; Collection < DataObject > items = new ArrayList < DataObject > ( ) ; for ( Resource item : itemUris ) { try { items . add ( this . getResourceStore ( ) . get ( item . asURI ( ) , DataObject . class ) ) ; } catch ( NotFoundException e ) { } } return items ; } +","private Collection < DataObject > getAllItemsInDatabox ( URI databoxUri ) throws NotFoundException { PrivacyPreference databox = this . getResourceStore ( ) . get ( databoxUri , PrivacyPreference . class ) ; Collection < Resource > itemUris = databox . getAllAppliesToResource_as ( ) . asList ( ) ; Collection < DataObject > items = new ArrayList < DataObject > ( ) ; for ( Resource item : itemUris ) { try { items . add ( this . getResourceStore ( ) . get ( item . asURI ( ) , DataObject . class ) ) ; } catch ( NotFoundException e ) { logger . warn ( ""Item "" + item . asURI ( ) + "" is in databox "" + databox . asURI ( ) + "" but it is does not exist."" ) ; } } return items ; } +" +826,"public ServerConfiguration getServerConfiguration ( ) { if ( containsRadiusServerConfiguration ( ) == false ) return null ; try { return persistenceEntryManager . find ( ServerConfiguration . class , getServerConfigurationDn ( ) ) ; } catch ( MappingException e ) { } return null ; } +","public ServerConfiguration getServerConfiguration ( ) { if ( containsRadiusServerConfiguration ( ) == false ) return null ; try { return persistenceEntryManager . find ( ServerConfiguration . class , getServerConfigurationDn ( ) ) ; } catch ( MappingException e ) { log . error ( ""Failed to load radius server configuration"" , e ) ; } return null ; } +" +827,"private void checkExecutionRequests ( ) { synchronized ( lock ) { for ( Map . Entry < ExecutionRequestKey , ThreadTimeCombination > executionRequestThreadEntry : executionRequestThreadMap . entrySet ( ) ) { if ( isTerminated ( executionRequestThreadEntry . getKey ( ) ) ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} completed, remove from monitoring"" , executionRequestThreadEntry . toString ( ) ) ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } else if ( executionRequestThreadEntry . getValue ( ) . startTimestamp . plus ( timeout , ChronoUnit . MINUTES ) . isBefore ( LocalDateTime . now ( ) ) ) { log . info ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} exceeded timeout"" , executionRequestThreadEntry . toString ( ) ) ) ; try { executionRequestThreadEntry . getValue ( ) . thread . interrupt ( ) ; executionRequestThreadEntry . getValue ( ) . thread . join ( 5000 ) ; } catch ( InterruptedException e ) { log . warn ( MessageFormat . format ( ""executionrequestmonitor=unable to close blocking Execution Request {}"" , executionRequestThreadEntry . getKey ( ) . toString ( ) ) ) ; } ExecutionRequestConfiguration . getInstance ( ) . get ( executionRequestThreadEntry . getKey ( ) ) . ifPresent ( executionRequest -> { executionRequest . getScriptExecutionRequests ( ) . forEach ( this :: markAborted ) ; ExecutionRequestConfiguration . getInstance ( ) . update ( executionRequest ) ; } ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } } } } +","private void checkExecutionRequests ( ) { synchronized ( lock ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=({0}) still executing"" , executionRequestThreadMap . keySet ( ) . stream ( ) . map ( MetadataKey :: toString ) . collect ( Collectors . joining ( "", "" ) ) ) ) ; for ( Map . Entry < ExecutionRequestKey , ThreadTimeCombination > executionRequestThreadEntry : executionRequestThreadMap . entrySet ( ) ) { if ( isTerminated ( executionRequestThreadEntry . getKey ( ) ) ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} completed, remove from monitoring"" , executionRequestThreadEntry . toString ( ) ) ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } else if ( executionRequestThreadEntry . getValue ( ) . startTimestamp . plus ( timeout , ChronoUnit . MINUTES ) . isBefore ( LocalDateTime . now ( ) ) ) { log . info ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} exceeded timeout"" , executionRequestThreadEntry . toString ( ) ) ) ; try { executionRequestThreadEntry . getValue ( ) . thread . interrupt ( ) ; executionRequestThreadEntry . getValue ( ) . thread . join ( 5000 ) ; } catch ( InterruptedException e ) { log . warn ( MessageFormat . format ( ""executionrequestmonitor=unable to close blocking Execution Request {}"" , executionRequestThreadEntry . getKey ( ) . toString ( ) ) ) ; } ExecutionRequestConfiguration . getInstance ( ) . get ( executionRequestThreadEntry . getKey ( ) ) . ifPresent ( executionRequest -> { executionRequest . getScriptExecutionRequests ( ) . forEach ( this :: markAborted ) ; ExecutionRequestConfiguration . getInstance ( ) . update ( executionRequest ) ; } ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } } } } +" +828,"private void checkExecutionRequests ( ) { synchronized ( lock ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=({0}) still executing"" , executionRequestThreadMap . keySet ( ) . stream ( ) . map ( MetadataKey :: toString ) . collect ( Collectors . joining ( "", "" ) ) ) ) ; for ( Map . Entry < ExecutionRequestKey , ThreadTimeCombination > executionRequestThreadEntry : executionRequestThreadMap . entrySet ( ) ) { if ( isTerminated ( executionRequestThreadEntry . getKey ( ) ) ) { executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } else if ( executionRequestThreadEntry . getValue ( ) . startTimestamp . plus ( timeout , ChronoUnit . MINUTES ) . isBefore ( LocalDateTime . now ( ) ) ) { log . info ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} exceeded timeout"" , executionRequestThreadEntry . toString ( ) ) ) ; try { executionRequestThreadEntry . getValue ( ) . thread . interrupt ( ) ; executionRequestThreadEntry . getValue ( ) . thread . join ( 5000 ) ; } catch ( InterruptedException e ) { log . warn ( MessageFormat . format ( ""executionrequestmonitor=unable to close blocking Execution Request {}"" , executionRequestThreadEntry . getKey ( ) . toString ( ) ) ) ; } ExecutionRequestConfiguration . getInstance ( ) . get ( executionRequestThreadEntry . getKey ( ) ) . ifPresent ( executionRequest -> { executionRequest . getScriptExecutionRequests ( ) . forEach ( this :: markAborted ) ; ExecutionRequestConfiguration . getInstance ( ) . update ( executionRequest ) ; } ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } } } } +","private void checkExecutionRequests ( ) { synchronized ( lock ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=({0}) still executing"" , executionRequestThreadMap . keySet ( ) . stream ( ) . map ( MetadataKey :: toString ) . collect ( Collectors . joining ( "", "" ) ) ) ) ; for ( Map . Entry < ExecutionRequestKey , ThreadTimeCombination > executionRequestThreadEntry : executionRequestThreadMap . entrySet ( ) ) { if ( isTerminated ( executionRequestThreadEntry . getKey ( ) ) ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} completed, remove from monitoring"" , executionRequestThreadEntry . toString ( ) ) ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } else if ( executionRequestThreadEntry . getValue ( ) . startTimestamp . plus ( timeout , ChronoUnit . MINUTES ) . isBefore ( LocalDateTime . now ( ) ) ) { log . info ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} exceeded timeout"" , executionRequestThreadEntry . toString ( ) ) ) ; try { executionRequestThreadEntry . getValue ( ) . thread . interrupt ( ) ; executionRequestThreadEntry . getValue ( ) . thread . join ( 5000 ) ; } catch ( InterruptedException e ) { log . warn ( MessageFormat . format ( ""executionrequestmonitor=unable to close blocking Execution Request {}"" , executionRequestThreadEntry . getKey ( ) . toString ( ) ) ) ; } ExecutionRequestConfiguration . getInstance ( ) . get ( executionRequestThreadEntry . getKey ( ) ) . ifPresent ( executionRequest -> { executionRequest . getScriptExecutionRequests ( ) . forEach ( this :: markAborted ) ; ExecutionRequestConfiguration . getInstance ( ) . update ( executionRequest ) ; } ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } } } } +" +829,"private void checkExecutionRequests ( ) { synchronized ( lock ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=({0}) still executing"" , executionRequestThreadMap . keySet ( ) . stream ( ) . map ( MetadataKey :: toString ) . collect ( Collectors . joining ( "", "" ) ) ) ) ; for ( Map . Entry < ExecutionRequestKey , ThreadTimeCombination > executionRequestThreadEntry : executionRequestThreadMap . entrySet ( ) ) { if ( isTerminated ( executionRequestThreadEntry . getKey ( ) ) ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} completed, remove from monitoring"" , executionRequestThreadEntry . toString ( ) ) ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } else if ( executionRequestThreadEntry . getValue ( ) . startTimestamp . plus ( timeout , ChronoUnit . MINUTES ) . isBefore ( LocalDateTime . now ( ) ) ) { try { executionRequestThreadEntry . getValue ( ) . thread . interrupt ( ) ; executionRequestThreadEntry . getValue ( ) . thread . join ( 5000 ) ; } catch ( InterruptedException e ) { log . warn ( MessageFormat . format ( ""executionrequestmonitor=unable to close blocking Execution Request {}"" , executionRequestThreadEntry . getKey ( ) . toString ( ) ) ) ; } ExecutionRequestConfiguration . getInstance ( ) . get ( executionRequestThreadEntry . getKey ( ) ) . ifPresent ( executionRequest -> { executionRequest . getScriptExecutionRequests ( ) . forEach ( this :: markAborted ) ; ExecutionRequestConfiguration . getInstance ( ) . update ( executionRequest ) ; } ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } } } } +","private void checkExecutionRequests ( ) { synchronized ( lock ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=({0}) still executing"" , executionRequestThreadMap . keySet ( ) . stream ( ) . map ( MetadataKey :: toString ) . collect ( Collectors . joining ( "", "" ) ) ) ) ; for ( Map . Entry < ExecutionRequestKey , ThreadTimeCombination > executionRequestThreadEntry : executionRequestThreadMap . entrySet ( ) ) { if ( isTerminated ( executionRequestThreadEntry . getKey ( ) ) ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} completed, remove from monitoring"" , executionRequestThreadEntry . toString ( ) ) ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } else if ( executionRequestThreadEntry . getValue ( ) . startTimestamp . plus ( timeout , ChronoUnit . MINUTES ) . isBefore ( LocalDateTime . now ( ) ) ) { log . info ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} exceeded timeout"" , executionRequestThreadEntry . toString ( ) ) ) ; try { executionRequestThreadEntry . getValue ( ) . thread . interrupt ( ) ; executionRequestThreadEntry . getValue ( ) . thread . join ( 5000 ) ; } catch ( InterruptedException e ) { log . warn ( MessageFormat . format ( ""executionrequestmonitor=unable to close blocking Execution Request {}"" , executionRequestThreadEntry . getKey ( ) . toString ( ) ) ) ; } ExecutionRequestConfiguration . getInstance ( ) . get ( executionRequestThreadEntry . getKey ( ) ) . ifPresent ( executionRequest -> { executionRequest . getScriptExecutionRequests ( ) . forEach ( this :: markAborted ) ; ExecutionRequestConfiguration . getInstance ( ) . update ( executionRequest ) ; } ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } } } } +" +830,"private void checkExecutionRequests ( ) { synchronized ( lock ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=({0}) still executing"" , executionRequestThreadMap . keySet ( ) . stream ( ) . map ( MetadataKey :: toString ) . collect ( Collectors . joining ( "", "" ) ) ) ) ; for ( Map . Entry < ExecutionRequestKey , ThreadTimeCombination > executionRequestThreadEntry : executionRequestThreadMap . entrySet ( ) ) { if ( isTerminated ( executionRequestThreadEntry . getKey ( ) ) ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} completed, remove from monitoring"" , executionRequestThreadEntry . toString ( ) ) ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } else if ( executionRequestThreadEntry . getValue ( ) . startTimestamp . plus ( timeout , ChronoUnit . MINUTES ) . isBefore ( LocalDateTime . now ( ) ) ) { log . info ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} exceeded timeout"" , executionRequestThreadEntry . toString ( ) ) ) ; try { executionRequestThreadEntry . getValue ( ) . thread . interrupt ( ) ; executionRequestThreadEntry . getValue ( ) . thread . join ( 5000 ) ; } catch ( InterruptedException e ) { } ExecutionRequestConfiguration . getInstance ( ) . get ( executionRequestThreadEntry . getKey ( ) ) . ifPresent ( executionRequest -> { executionRequest . getScriptExecutionRequests ( ) . forEach ( this :: markAborted ) ; ExecutionRequestConfiguration . getInstance ( ) . update ( executionRequest ) ; } ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } } } } +","private void checkExecutionRequests ( ) { synchronized ( lock ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=({0}) still executing"" , executionRequestThreadMap . keySet ( ) . stream ( ) . map ( MetadataKey :: toString ) . collect ( Collectors . joining ( "", "" ) ) ) ) ; for ( Map . Entry < ExecutionRequestKey , ThreadTimeCombination > executionRequestThreadEntry : executionRequestThreadMap . entrySet ( ) ) { if ( isTerminated ( executionRequestThreadEntry . getKey ( ) ) ) { log . debug ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} completed, remove from monitoring"" , executionRequestThreadEntry . toString ( ) ) ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } else if ( executionRequestThreadEntry . getValue ( ) . startTimestamp . plus ( timeout , ChronoUnit . MINUTES ) . isBefore ( LocalDateTime . now ( ) ) ) { log . info ( MessageFormat . format ( ""executionrequestmonitor=execution request {0} exceeded timeout"" , executionRequestThreadEntry . toString ( ) ) ) ; try { executionRequestThreadEntry . getValue ( ) . thread . interrupt ( ) ; executionRequestThreadEntry . getValue ( ) . thread . join ( 5000 ) ; } catch ( InterruptedException e ) { log . warn ( MessageFormat . format ( ""executionrequestmonitor=unable to close blocking Execution Request {}"" , executionRequestThreadEntry . getKey ( ) . toString ( ) ) ) ; } ExecutionRequestConfiguration . getInstance ( ) . get ( executionRequestThreadEntry . getKey ( ) ) . ifPresent ( executionRequest -> { executionRequest . getScriptExecutionRequests ( ) . forEach ( this :: markAborted ) ; ExecutionRequestConfiguration . getInstance ( ) . update ( executionRequest ) ; } ) ; executionRequestThreadMap . remove ( executionRequestThreadEntry . getKey ( ) ) ; } } } } +" +831,"public static ElideNamespaceConfig stringToElideNamespaceConfigPojo ( String fileName , String content , Map < String , Object > variables , DynamicConfigSchemaValidator schemaValidator ) throws IOException { ElideNamespaceConfig namespaceconfig = new ElideNamespaceConfig ( ) ; String jsonConfig = hjsonToJson ( resolveVariables ( content , variables ) ) ; try { if ( schemaValidator . verifySchema ( Config . NAMESPACEConfig , jsonConfig , fileName ) ) { namespaceconfig = getModelPojo ( jsonConfig , ElideNamespaceConfig . class ) ; } } catch ( ProcessingException e ) { throw new IOException ( e ) ; } return namespaceconfig ; } +","public static ElideNamespaceConfig stringToElideNamespaceConfigPojo ( String fileName , String content , Map < String , Object > variables , DynamicConfigSchemaValidator schemaValidator ) throws IOException { ElideNamespaceConfig namespaceconfig = new ElideNamespaceConfig ( ) ; String jsonConfig = hjsonToJson ( resolveVariables ( content , variables ) ) ; try { if ( schemaValidator . verifySchema ( Config . NAMESPACEConfig , jsonConfig , fileName ) ) { namespaceconfig = getModelPojo ( jsonConfig , ElideNamespaceConfig . class ) ; } } catch ( ProcessingException e ) { log . error ( ""Error Validating DB config : "" + e . getMessage ( ) ) ; throw new IOException ( e ) ; } return namespaceconfig ; } +" +832,"public void cleanUp ( ) { try { loadLog . close ( ) ; new File ( syncFolderPath , SyncConstant . SYNC_LOG_NAME ) . delete ( ) ; new File ( syncFolderPath , SyncConstant . LOAD_LOG_NAME ) . delete ( ) ; FileUtils . deleteDirectory ( new File ( syncFolderPath , SyncConstant . RECEIVER_DATA_FOLDER_NAME ) ) ; FileLoaderManager . getInstance ( ) . removeFileLoader ( senderName ) ; } catch ( IOException e ) { LOGGER . error ( ""Can not clean up sync resource."" , e ) ; } } +","public void cleanUp ( ) { try { loadLog . close ( ) ; new File ( syncFolderPath , SyncConstant . SYNC_LOG_NAME ) . delete ( ) ; new File ( syncFolderPath , SyncConstant . LOAD_LOG_NAME ) . delete ( ) ; FileUtils . deleteDirectory ( new File ( syncFolderPath , SyncConstant . RECEIVER_DATA_FOLDER_NAME ) ) ; FileLoaderManager . getInstance ( ) . removeFileLoader ( senderName ) ; LOGGER . info ( ""Sync loading process for {} has finished."" , senderName ) ; } catch ( IOException e ) { LOGGER . error ( ""Can not clean up sync resource."" , e ) ; } } +" +833,"public void cleanUp ( ) { try { loadLog . close ( ) ; new File ( syncFolderPath , SyncConstant . SYNC_LOG_NAME ) . delete ( ) ; new File ( syncFolderPath , SyncConstant . LOAD_LOG_NAME ) . delete ( ) ; FileUtils . deleteDirectory ( new File ( syncFolderPath , SyncConstant . RECEIVER_DATA_FOLDER_NAME ) ) ; FileLoaderManager . getInstance ( ) . removeFileLoader ( senderName ) ; LOGGER . info ( ""Sync loading process for {} has finished."" , senderName ) ; } catch ( IOException e ) { } } +","public void cleanUp ( ) { try { loadLog . close ( ) ; new File ( syncFolderPath , SyncConstant . SYNC_LOG_NAME ) . delete ( ) ; new File ( syncFolderPath , SyncConstant . LOAD_LOG_NAME ) . delete ( ) ; FileUtils . deleteDirectory ( new File ( syncFolderPath , SyncConstant . RECEIVER_DATA_FOLDER_NAME ) ) ; FileLoaderManager . getInstance ( ) . removeFileLoader ( senderName ) ; LOGGER . info ( ""Sync loading process for {} has finished."" , senderName ) ; } catch ( IOException e ) { LOGGER . error ( ""Can not clean up sync resource."" , e ) ; } } +" +834,"public Entity add ( PersistencePackage persistencePackage , DynamicEntityDao dynamicEntityDao , RecordHelper helper ) throws ServiceException { Entity entity = persistencePackage . getEntity ( ) ; try { PersistencePerspective persistencePerspective = persistencePackage . getPersistencePerspective ( ) ; OfferCode offerCodeInstance = ( OfferCode ) Class . forName ( entity . getType ( ) [ 0 ] ) . newInstance ( ) ; Map < String , FieldMetadata > offerCodeProperties = helper . getSimpleMergedProperties ( OfferCode . class . getName ( ) , persistencePerspective ) ; offerCodeInstance = ( OfferCode ) helper . createPopulatedInstance ( offerCodeInstance , entity , offerCodeProperties , false ) ; Entity errorEntity = validateOfferCode ( entity , offerCodeInstance ) ; if ( errorEntity != null ) { return errorEntity ; } offerCodeInstance = dynamicEntityDao . merge ( offerCodeInstance ) ; return helper . getRecord ( offerCodeProperties , offerCodeInstance , null , null ) ; } catch ( Exception e ) { throw new ServiceException ( ""Unable to add entity for "" + entity . getType ( ) [ 0 ] , e ) ; } } +","public Entity add ( PersistencePackage persistencePackage , DynamicEntityDao dynamicEntityDao , RecordHelper helper ) throws ServiceException { Entity entity = persistencePackage . getEntity ( ) ; try { PersistencePerspective persistencePerspective = persistencePackage . getPersistencePerspective ( ) ; OfferCode offerCodeInstance = ( OfferCode ) Class . forName ( entity . getType ( ) [ 0 ] ) . newInstance ( ) ; Map < String , FieldMetadata > offerCodeProperties = helper . getSimpleMergedProperties ( OfferCode . class . getName ( ) , persistencePerspective ) ; offerCodeInstance = ( OfferCode ) helper . createPopulatedInstance ( offerCodeInstance , entity , offerCodeProperties , false ) ; Entity errorEntity = validateOfferCode ( entity , offerCodeInstance ) ; if ( errorEntity != null ) { return errorEntity ; } offerCodeInstance = dynamicEntityDao . merge ( offerCodeInstance ) ; return helper . getRecord ( offerCodeProperties , offerCodeInstance , null , null ) ; } catch ( Exception e ) { LOG . error ( ""Unable to execute persistence activity"" , e ) ; throw new ServiceException ( ""Unable to add entity for "" + entity . getType ( ) [ 0 ] , e ) ; } } +" +835,"public void write ( byte [ ] buf , int off , int len ) throws IOException { if ( LOG . isDebugEnabled ( ) ) { } byte [ ] wrapped = cryptoAES . wrap ( buf , off , len ) ; DataOutputStream dob = new DataOutputStream ( out ) ; dob . writeInt ( wrapped . length ) ; dob . write ( wrapped , 0 , wrapped . length ) ; dob . flush ( ) ; } +","public void write ( byte [ ] buf , int off , int len ) throws IOException { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( ""wrapping token of length:"" + len ) ; } byte [ ] wrapped = cryptoAES . wrap ( buf , off , len ) ; DataOutputStream dob = new DataOutputStream ( out ) ; dob . writeInt ( wrapped . length ) ; dob . write ( wrapped , 0 , wrapped . length ) ; dob . flush ( ) ; } +" +836,"@ GET @ Path ( ""/list"" ) @ SubmarineApi public Response list ( @ QueryParam ( ""dictCode"" ) String dictCode , @ QueryParam ( ""itemText"" ) String itemText , @ QueryParam ( ""itemValue"" ) String itemValue , @ QueryParam ( ""column"" ) String column , @ QueryParam ( ""field"" ) String field , @ QueryParam ( ""order"" ) String order , @ QueryParam ( ""pageNo"" ) int pageNo , @ QueryParam ( ""pageSize"" ) int pageSize ) { List < SysDictItem > list = null ; SqlSession sqlSession = MyBatisUtil . getSqlSession ( ) ; SysDictItemMapper sysDictItemMapper = sqlSession . getMapper ( SysDictItemMapper . class ) ; try { Map < String , Object > where = new HashMap < > ( ) ; where . put ( ""dictCode"" , dictCode ) ; where . put ( ""itemText"" , itemText ) ; where . put ( ""itemValue"" , itemValue ) ; list = sysDictItemMapper . selectAll ( where , new RowBounds ( pageNo , pageSize ) ) ; } catch ( Exception e ) { LOG . error ( e . getMessage ( ) , e ) ; return new JsonResponse . Builder < > ( Response . Status . OK ) . success ( false ) . build ( ) ; } finally { sqlSession . close ( ) ; } PageInfo < SysDictItem > page = new PageInfo < > ( list ) ; ListResult < SysDictItem > listResult = new ListResult ( list , page . getTotal ( ) ) ; return new JsonResponse . Builder < ListResult > ( Response . Status . OK ) . success ( true ) . result ( listResult ) . build ( ) ; } +","@ GET @ Path ( ""/list"" ) @ SubmarineApi public Response list ( @ QueryParam ( ""dictCode"" ) String dictCode , @ QueryParam ( ""itemText"" ) String itemText , @ QueryParam ( ""itemValue"" ) String itemValue , @ QueryParam ( ""column"" ) String column , @ QueryParam ( ""field"" ) String field , @ QueryParam ( ""order"" ) String order , @ QueryParam ( ""pageNo"" ) int pageNo , @ QueryParam ( ""pageSize"" ) int pageSize ) { LOG . info ( ""queryList dictId:{}, itemText:{}, itemValue:{}, pageNo:{}, pageSize:{}"" , dictCode , itemText , itemValue , pageNo , pageSize ) ; List < SysDictItem > list = null ; SqlSession sqlSession = MyBatisUtil . getSqlSession ( ) ; SysDictItemMapper sysDictItemMapper = sqlSession . getMapper ( SysDictItemMapper . class ) ; try { Map < String , Object > where = new HashMap < > ( ) ; where . put ( ""dictCode"" , dictCode ) ; where . put ( ""itemText"" , itemText ) ; where . put ( ""itemValue"" , itemValue ) ; list = sysDictItemMapper . selectAll ( where , new RowBounds ( pageNo , pageSize ) ) ; } catch ( Exception e ) { LOG . error ( e . getMessage ( ) , e ) ; return new JsonResponse . Builder < > ( Response . Status . OK ) . success ( false ) . build ( ) ; } finally { sqlSession . close ( ) ; } PageInfo < SysDictItem > page = new PageInfo < > ( list ) ; ListResult < SysDictItem > listResult = new ListResult ( list , page . getTotal ( ) ) ; return new JsonResponse . Builder < ListResult > ( Response . Status . OK ) . success ( true ) . result ( listResult ) . build ( ) ; } +" +837,"@ GET @ Path ( ""/list"" ) @ SubmarineApi public Response list ( @ QueryParam ( ""dictCode"" ) String dictCode , @ QueryParam ( ""itemText"" ) String itemText , @ QueryParam ( ""itemValue"" ) String itemValue , @ QueryParam ( ""column"" ) String column , @ QueryParam ( ""field"" ) String field , @ QueryParam ( ""order"" ) String order , @ QueryParam ( ""pageNo"" ) int pageNo , @ QueryParam ( ""pageSize"" ) int pageSize ) { LOG . info ( ""queryList dictId:{}, itemText:{}, itemValue:{}, pageNo:{}, pageSize:{}"" , dictCode , itemText , itemValue , pageNo , pageSize ) ; List < SysDictItem > list = null ; SqlSession sqlSession = MyBatisUtil . getSqlSession ( ) ; SysDictItemMapper sysDictItemMapper = sqlSession . getMapper ( SysDictItemMapper . class ) ; try { Map < String , Object > where = new HashMap < > ( ) ; where . put ( ""dictCode"" , dictCode ) ; where . put ( ""itemText"" , itemText ) ; where . put ( ""itemValue"" , itemValue ) ; list = sysDictItemMapper . selectAll ( where , new RowBounds ( pageNo , pageSize ) ) ; } catch ( Exception e ) { return new JsonResponse . Builder < > ( Response . Status . OK ) . success ( false ) . build ( ) ; } finally { sqlSession . close ( ) ; } PageInfo < SysDictItem > page = new PageInfo < > ( list ) ; ListResult < SysDictItem > listResult = new ListResult ( list , page . getTotal ( ) ) ; return new JsonResponse . Builder < ListResult > ( Response . Status . OK ) . success ( true ) . result ( listResult ) . build ( ) ; } +","@ GET @ Path ( ""/list"" ) @ SubmarineApi public Response list ( @ QueryParam ( ""dictCode"" ) String dictCode , @ QueryParam ( ""itemText"" ) String itemText , @ QueryParam ( ""itemValue"" ) String itemValue , @ QueryParam ( ""column"" ) String column , @ QueryParam ( ""field"" ) String field , @ QueryParam ( ""order"" ) String order , @ QueryParam ( ""pageNo"" ) int pageNo , @ QueryParam ( ""pageSize"" ) int pageSize ) { LOG . info ( ""queryList dictId:{}, itemText:{}, itemValue:{}, pageNo:{}, pageSize:{}"" , dictCode , itemText , itemValue , pageNo , pageSize ) ; List < SysDictItem > list = null ; SqlSession sqlSession = MyBatisUtil . getSqlSession ( ) ; SysDictItemMapper sysDictItemMapper = sqlSession . getMapper ( SysDictItemMapper . class ) ; try { Map < String , Object > where = new HashMap < > ( ) ; where . put ( ""dictCode"" , dictCode ) ; where . put ( ""itemText"" , itemText ) ; where . put ( ""itemValue"" , itemValue ) ; list = sysDictItemMapper . selectAll ( where , new RowBounds ( pageNo , pageSize ) ) ; } catch ( Exception e ) { LOG . error ( e . getMessage ( ) , e ) ; return new JsonResponse . Builder < > ( Response . Status . OK ) . success ( false ) . build ( ) ; } finally { sqlSession . close ( ) ; } PageInfo < SysDictItem > page = new PageInfo < > ( list ) ; ListResult < SysDictItem > listResult = new ListResult ( list , page . getTotal ( ) ) ; return new JsonResponse . Builder < ListResult > ( Response . Status . OK ) . success ( true ) . result ( listResult ) . build ( ) ; } +" +838,"public void release ( Throwable cause ) { Preconditions . checkState ( slotContextFuture . isDone ( ) , ""Releasing of the shared slot is expected only from its successfully allocated physical slot ({})"" , physicalSlotRequestId ) ; Map < ExecutionVertexID , CompletableFuture < SingleLogicalSlot > > logicalSlotFutures = requestedLogicalSlots . keySetA ( ) . stream ( ) . collect ( Collectors . toMap ( executionVertexId -> executionVertexId , requestedLogicalSlots :: getValueByKeyA ) ) ; for ( Map . Entry < ExecutionVertexID , CompletableFuture < SingleLogicalSlot > > entry : logicalSlotFutures . entrySet ( ) ) { LOG . debug ( ""Release {}"" , getLogicalSlotString ( entry . getKey ( ) ) ) ; CompletableFuture < SingleLogicalSlot > logicalSlotFuture = entry . getValue ( ) ; Preconditions . checkNotNull ( logicalSlotFuture ) ; Preconditions . checkState ( logicalSlotFuture . isDone ( ) , ""Logical slot future must already done when release call comes from the successfully allocated physical slot ({})"" , physicalSlotRequestId ) ; logicalSlotFuture . thenAccept ( logicalSlot -> logicalSlot . release ( cause ) ) ; } requestedLogicalSlots . clear ( ) ; releaseExternally ( ) ; } +","public void release ( Throwable cause ) { Preconditions . checkState ( slotContextFuture . isDone ( ) , ""Releasing of the shared slot is expected only from its successfully allocated physical slot ({})"" , physicalSlotRequestId ) ; LOG . debug ( ""Release shared slot ({})"" , physicalSlotRequestId ) ; Map < ExecutionVertexID , CompletableFuture < SingleLogicalSlot > > logicalSlotFutures = requestedLogicalSlots . keySetA ( ) . stream ( ) . collect ( Collectors . toMap ( executionVertexId -> executionVertexId , requestedLogicalSlots :: getValueByKeyA ) ) ; for ( Map . Entry < ExecutionVertexID , CompletableFuture < SingleLogicalSlot > > entry : logicalSlotFutures . entrySet ( ) ) { LOG . debug ( ""Release {}"" , getLogicalSlotString ( entry . getKey ( ) ) ) ; CompletableFuture < SingleLogicalSlot > logicalSlotFuture = entry . getValue ( ) ; Preconditions . checkNotNull ( logicalSlotFuture ) ; Preconditions . checkState ( logicalSlotFuture . isDone ( ) , ""Logical slot future must already done when release call comes from the successfully allocated physical slot ({})"" , physicalSlotRequestId ) ; logicalSlotFuture . thenAccept ( logicalSlot -> logicalSlot . release ( cause ) ) ; } requestedLogicalSlots . clear ( ) ; releaseExternally ( ) ; } +" +839,"public void release ( Throwable cause ) { Preconditions . checkState ( slotContextFuture . isDone ( ) , ""Releasing of the shared slot is expected only from its successfully allocated physical slot ({})"" , physicalSlotRequestId ) ; LOG . debug ( ""Release shared slot ({})"" , physicalSlotRequestId ) ; Map < ExecutionVertexID , CompletableFuture < SingleLogicalSlot > > logicalSlotFutures = requestedLogicalSlots . keySetA ( ) . stream ( ) . collect ( Collectors . toMap ( executionVertexId -> executionVertexId , requestedLogicalSlots :: getValueByKeyA ) ) ; for ( Map . Entry < ExecutionVertexID , CompletableFuture < SingleLogicalSlot > > entry : logicalSlotFutures . entrySet ( ) ) { CompletableFuture < SingleLogicalSlot > logicalSlotFuture = entry . getValue ( ) ; Preconditions . checkNotNull ( logicalSlotFuture ) ; Preconditions . checkState ( logicalSlotFuture . isDone ( ) , ""Logical slot future must already done when release call comes from the successfully allocated physical slot ({})"" , physicalSlotRequestId ) ; logicalSlotFuture . thenAccept ( logicalSlot -> logicalSlot . release ( cause ) ) ; } requestedLogicalSlots . clear ( ) ; releaseExternally ( ) ; } +","public void release ( Throwable cause ) { Preconditions . checkState ( slotContextFuture . isDone ( ) , ""Releasing of the shared slot is expected only from its successfully allocated physical slot ({})"" , physicalSlotRequestId ) ; LOG . debug ( ""Release shared slot ({})"" , physicalSlotRequestId ) ; Map < ExecutionVertexID , CompletableFuture < SingleLogicalSlot > > logicalSlotFutures = requestedLogicalSlots . keySetA ( ) . stream ( ) . collect ( Collectors . toMap ( executionVertexId -> executionVertexId , requestedLogicalSlots :: getValueByKeyA ) ) ; for ( Map . Entry < ExecutionVertexID , CompletableFuture < SingleLogicalSlot > > entry : logicalSlotFutures . entrySet ( ) ) { LOG . debug ( ""Release {}"" , getLogicalSlotString ( entry . getKey ( ) ) ) ; CompletableFuture < SingleLogicalSlot > logicalSlotFuture = entry . getValue ( ) ; Preconditions . checkNotNull ( logicalSlotFuture ) ; Preconditions . checkState ( logicalSlotFuture . isDone ( ) , ""Logical slot future must already done when release call comes from the successfully allocated physical slot ({})"" , physicalSlotRequestId ) ; logicalSlotFuture . thenAccept ( logicalSlot -> logicalSlot . release ( cause ) ) ; } requestedLogicalSlots . clear ( ) ; releaseExternally ( ) ; } +" +840,"public byte [ ] getBytes ( final int width , final int height , final int sx , final int sy ) { if ( LOG . isDebugEnabled ( ) ) { } final byte [ ] array = new byte [ width * height * 4 ] ; int index = 0 ; for ( int x = sx ; x < sx + width ; x ++ ) { if ( x < 0 || x >= image_ . getWidth ( ) ) { array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; } else { for ( int y = sy ; y < sy + height ; y ++ ) { if ( y < 0 || y >= image_ . getHeight ( ) ) { array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; } else { final int color = image_ . getRGB ( x , y ) ; array [ index ++ ] = ( byte ) ( ( color & 0xff0000 ) > > 16 ) ; array [ index ++ ] = ( byte ) ( ( color & 0xff00 ) > > 8 ) ; array [ index ++ ] = ( byte ) ( color & 0xff ) ; array [ index ++ ] = ( byte ) ( ( color & 0xff000000 ) > > > 24 ) ; } } } } return array ; } +","public byte [ ] getBytes ( final int width , final int height , final int sx , final int sy ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( ""["" + id_ + ""] getBytes("" + width + "", "" + height + "", "" + sx + "", "" + sy + "")"" ) ; } final byte [ ] array = new byte [ width * height * 4 ] ; int index = 0 ; for ( int x = sx ; x < sx + width ; x ++ ) { if ( x < 0 || x >= image_ . getWidth ( ) ) { array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; } else { for ( int y = sy ; y < sy + height ; y ++ ) { if ( y < 0 || y >= image_ . getHeight ( ) ) { array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; array [ index ++ ] = ( byte ) 0 ; } else { final int color = image_ . getRGB ( x , y ) ; array [ index ++ ] = ( byte ) ( ( color & 0xff0000 ) > > 16 ) ; array [ index ++ ] = ( byte ) ( ( color & 0xff00 ) > > 8 ) ; array [ index ++ ] = ( byte ) ( color & 0xff ) ; array [ index ++ ] = ( byte ) ( ( color & 0xff000000 ) > > > 24 ) ; } } } } return array ; } +" +841,"private void reinitialize ( ) { stopRetryRegistering ( ) ; stopOfflineMonitor1 ( ) ; stopOfflineMonitor2 ( ) ; unregisterEventListener ( ) ; initialize ( ) ; } +","private void reinitialize ( ) { logger . debug ( ""Reinitialize thing handler ({}). haId={}"" , getThingLabel ( ) , getThingHaId ( ) ) ; stopRetryRegistering ( ) ; stopOfflineMonitor1 ( ) ; stopOfflineMonitor2 ( ) ; unregisterEventListener ( ) ; initialize ( ) ; } +" +842,"public void setSessionTimeout ( Integer minutes , HttpContext httpContext ) { serverModel . runSilently ( ( ) -> { final Batch batch = new Batch ( ""Session timeout configuration"" ) ; WebContainerContext ctx = unify ( httpContext ) ; OsgiContextModel contextModel = serverModel . getOrCreateOsgiContextModel ( ctx , serviceBundle , PaxWebConstants . DEFAULT_CONTEXT_PATH , batch ) ; contextModel . setSessionTimeout ( minutes ) ; serverController . sendBatch ( batch ) ; return null ; } ) ; } +","public void setSessionTimeout ( Integer minutes , HttpContext httpContext ) { serverModel . runSilently ( ( ) -> { final Batch batch = new Batch ( ""Session timeout configuration"" ) ; WebContainerContext ctx = unify ( httpContext ) ; OsgiContextModel contextModel = serverModel . getOrCreateOsgiContextModel ( ctx , serviceBundle , PaxWebConstants . DEFAULT_CONTEXT_PATH , batch ) ; LOG . info ( ""Setting session timeout for {}"" , contextModel ) ; contextModel . setSessionTimeout ( minutes ) ; serverController . sendBatch ( batch ) ; return null ; } ) ; } +" +843,"public void updated ( Map < String , Object > properties ) { this . properties = properties ; if ( this . properties != null ) { Object enabledVal = this . properties . get ( ""enabled"" ) ; if ( enabledVal != null ) { this . configEnabled = ( Boolean ) enabledVal ; } if ( ! this . configEnabled ) { return ; } if ( this . properties . get ( ""pingInterval"" ) != null ) { this . pingInterval = ( Integer ) this . properties . get ( ""pingInterval"" ) ; if ( this . future != null ) { this . future . cancel ( false ) ; while ( ! this . future . isDone ( ) ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; logger . debug ( e . getMessage ( ) , e ) ; } } } this . future = this . executor . scheduleAtFixedRate ( ( ) -> { Thread . currentThread ( ) . setName ( getClass ( ) . getSimpleName ( ) ) ; if ( WatchdogServiceImpl . this . configEnabled ) { doWatchdogLoop ( ) ; } } , 0 , this . pingInterval , TimeUnit . MILLISECONDS ) ; } } } +","public void updated ( Map < String , Object > properties ) { logger . debug ( ""updated..."" ) ; this . properties = properties ; if ( this . properties != null ) { Object enabledVal = this . properties . get ( ""enabled"" ) ; if ( enabledVal != null ) { this . configEnabled = ( Boolean ) enabledVal ; } if ( ! this . configEnabled ) { return ; } if ( this . properties . get ( ""pingInterval"" ) != null ) { this . pingInterval = ( Integer ) this . properties . get ( ""pingInterval"" ) ; if ( this . future != null ) { this . future . cancel ( false ) ; while ( ! this . future . isDone ( ) ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; logger . debug ( e . getMessage ( ) , e ) ; } } } this . future = this . executor . scheduleAtFixedRate ( ( ) -> { Thread . currentThread ( ) . setName ( getClass ( ) . getSimpleName ( ) ) ; if ( WatchdogServiceImpl . this . configEnabled ) { doWatchdogLoop ( ) ; } } , 0 , this . pingInterval , TimeUnit . MILLISECONDS ) ; } } } +" +844,"public void updated ( Map < String , Object > properties ) { logger . debug ( ""updated..."" ) ; this . properties = properties ; if ( this . properties != null ) { Object enabledVal = this . properties . get ( ""enabled"" ) ; if ( enabledVal != null ) { this . configEnabled = ( Boolean ) enabledVal ; } if ( ! this . configEnabled ) { return ; } if ( this . properties . get ( ""pingInterval"" ) != null ) { this . pingInterval = ( Integer ) this . properties . get ( ""pingInterval"" ) ; if ( this . future != null ) { this . future . cancel ( false ) ; while ( ! this . future . isDone ( ) ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } } } this . future = this . executor . scheduleAtFixedRate ( ( ) -> { Thread . currentThread ( ) . setName ( getClass ( ) . getSimpleName ( ) ) ; if ( WatchdogServiceImpl . this . configEnabled ) { doWatchdogLoop ( ) ; } } , 0 , this . pingInterval , TimeUnit . MILLISECONDS ) ; } } } +","public void updated ( Map < String , Object > properties ) { logger . debug ( ""updated..."" ) ; this . properties = properties ; if ( this . properties != null ) { Object enabledVal = this . properties . get ( ""enabled"" ) ; if ( enabledVal != null ) { this . configEnabled = ( Boolean ) enabledVal ; } if ( ! this . configEnabled ) { return ; } if ( this . properties . get ( ""pingInterval"" ) != null ) { this . pingInterval = ( Integer ) this . properties . get ( ""pingInterval"" ) ; if ( this . future != null ) { this . future . cancel ( false ) ; while ( ! this . future . isDone ( ) ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; logger . debug ( e . getMessage ( ) , e ) ; } } } this . future = this . executor . scheduleAtFixedRate ( ( ) -> { Thread . currentThread ( ) . setName ( getClass ( ) . getSimpleName ( ) ) ; if ( WatchdogServiceImpl . this . configEnabled ) { doWatchdogLoop ( ) ; } } , 0 , this . pingInterval , TimeUnit . MILLISECONDS ) ; } } } +" +845,"public void initContext ( AgentSession agentSession ) { } +","public void initContext ( AgentSession agentSession ) { log . info ( ""Initializing Thrift agent context"" ) ; } +" +846,"@ Test public void testTensorflow ( ) throws InterpreterException { String callTensorflowFunc = ""import tensorflow as tf\n"" + ""print('Installed TensorFlow version:' + tf.__version__)"" ; InterpreterContext intpContext = getIntpContext ( ) ; InterpreterResult intpResult = pySubmarineIntp . interpret ( callTensorflowFunc , intpContext ) ; assertEquals ( InterpreterResult . Code . SUCCESS , intpResult . code ( ) ) ; String tfVersionInfo = intpContext . out ( ) . getCurrentOutput ( ) . toString ( ) ; boolean getVersion = tfVersionInfo . contains ( ""Installed TensorFlow version:"" ) ; assertTrue ( tfVersionInfo , getVersion ) ; } +","@ Test public void testTensorflow ( ) throws InterpreterException { String callTensorflowFunc = ""import tensorflow as tf\n"" + ""print('Installed TensorFlow version:' + tf.__version__)"" ; InterpreterContext intpContext = getIntpContext ( ) ; InterpreterResult intpResult = pySubmarineIntp . interpret ( callTensorflowFunc , intpContext ) ; assertEquals ( InterpreterResult . Code . SUCCESS , intpResult . code ( ) ) ; String tfVersionInfo = intpContext . out ( ) . getCurrentOutput ( ) . toString ( ) ; LOGGER . info ( tfVersionInfo ) ; boolean getVersion = tfVersionInfo . contains ( ""Installed TensorFlow version:"" ) ; assertTrue ( tfVersionInfo , getVersion ) ; } +" +847,"public void onNext ( final CompletedJob job ) { stopAndNotify ( ) ; } +","public void onNext ( final CompletedJob job ) { LOG . log ( Level . INFO , ""Completed job: {0}"" , job . getId ( ) ) ; stopAndNotify ( ) ; } +" +848,"public DfAlterCheckFinalInfo savePrevious ( ) { _log . info ( ""+-------------------+"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""| Save Previous |"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""+-------------------+"" ) ; deleteAllNGMark ( ) ; final DfAlterCheckFinalInfo finalInfo = new DfAlterCheckFinalInfo ( ) ; finalInfo . setResultMessage ( ""Save Previous"" ) ; if ( ! checkSavePreviousInvalidStatus ( finalInfo ) ) { return finalInfo ; } final long before = System . currentTimeMillis ( ) ; _unreleasedAlterAgent . finishReleasedAlterSql ( ) ; deleteExtractedPreviousResource ( ) ; final List < File > copyToFileList = copyToPreviousResource ( ) ; compressPreviousResource ( ) ; finalInfo . setResultMessage ( finalInfo . getResultMessage ( ) + "": saved="" + copyToFileList . size ( ) + "" file(s)"" ) ; final long after = System . currentTimeMillis ( ) ; finalInfo . setProcessPerformanceMillis ( after - before ) ; if ( ! checkSavedPreviousResource ( finalInfo ) ) { return finalInfo ; } markPreviousOK ( copyToFileList ) ; deleteSavePreviousMark ( ) ; finalInfo . addDetailMessage ( ""o (all resources saved)"" ) ; return finalInfo ; } +","public DfAlterCheckFinalInfo savePrevious ( ) { _log . info ( """" ) ; _log . info ( ""+-------------------+"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""| Save Previous |"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""+-------------------+"" ) ; deleteAllNGMark ( ) ; final DfAlterCheckFinalInfo finalInfo = new DfAlterCheckFinalInfo ( ) ; finalInfo . setResultMessage ( ""Save Previous"" ) ; if ( ! checkSavePreviousInvalidStatus ( finalInfo ) ) { return finalInfo ; } final long before = System . currentTimeMillis ( ) ; _unreleasedAlterAgent . finishReleasedAlterSql ( ) ; deleteExtractedPreviousResource ( ) ; final List < File > copyToFileList = copyToPreviousResource ( ) ; compressPreviousResource ( ) ; finalInfo . setResultMessage ( finalInfo . getResultMessage ( ) + "": saved="" + copyToFileList . size ( ) + "" file(s)"" ) ; final long after = System . currentTimeMillis ( ) ; finalInfo . setProcessPerformanceMillis ( after - before ) ; if ( ! checkSavedPreviousResource ( finalInfo ) ) { return finalInfo ; } markPreviousOK ( copyToFileList ) ; deleteSavePreviousMark ( ) ; finalInfo . addDetailMessage ( ""o (all resources saved)"" ) ; return finalInfo ; } +" +849,"public DfAlterCheckFinalInfo savePrevious ( ) { _log . info ( """" ) ; _log . info ( ""+-------------------+"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""+-------------------+"" ) ; deleteAllNGMark ( ) ; final DfAlterCheckFinalInfo finalInfo = new DfAlterCheckFinalInfo ( ) ; finalInfo . setResultMessage ( ""Save Previous"" ) ; if ( ! checkSavePreviousInvalidStatus ( finalInfo ) ) { return finalInfo ; } final long before = System . currentTimeMillis ( ) ; _unreleasedAlterAgent . finishReleasedAlterSql ( ) ; deleteExtractedPreviousResource ( ) ; final List < File > copyToFileList = copyToPreviousResource ( ) ; compressPreviousResource ( ) ; finalInfo . setResultMessage ( finalInfo . getResultMessage ( ) + "": saved="" + copyToFileList . size ( ) + "" file(s)"" ) ; final long after = System . currentTimeMillis ( ) ; finalInfo . setProcessPerformanceMillis ( after - before ) ; if ( ! checkSavedPreviousResource ( finalInfo ) ) { return finalInfo ; } markPreviousOK ( copyToFileList ) ; deleteSavePreviousMark ( ) ; finalInfo . addDetailMessage ( ""o (all resources saved)"" ) ; return finalInfo ; } +","public DfAlterCheckFinalInfo savePrevious ( ) { _log . info ( """" ) ; _log . info ( ""+-------------------+"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""| Save Previous |"" ) ; _log . info ( ""| |"" ) ; _log . info ( ""+-------------------+"" ) ; deleteAllNGMark ( ) ; final DfAlterCheckFinalInfo finalInfo = new DfAlterCheckFinalInfo ( ) ; finalInfo . setResultMessage ( ""Save Previous"" ) ; if ( ! checkSavePreviousInvalidStatus ( finalInfo ) ) { return finalInfo ; } final long before = System . currentTimeMillis ( ) ; _unreleasedAlterAgent . finishReleasedAlterSql ( ) ; deleteExtractedPreviousResource ( ) ; final List < File > copyToFileList = copyToPreviousResource ( ) ; compressPreviousResource ( ) ; finalInfo . setResultMessage ( finalInfo . getResultMessage ( ) + "": saved="" + copyToFileList . size ( ) + "" file(s)"" ) ; final long after = System . currentTimeMillis ( ) ; finalInfo . setProcessPerformanceMillis ( after - before ) ; if ( ! checkSavedPreviousResource ( finalInfo ) ) { return finalInfo ; } markPreviousOK ( copyToFileList ) ; deleteSavePreviousMark ( ) ; finalInfo . addDetailMessage ( ""o (all resources saved)"" ) ; return finalInfo ; } +" +850,"public void run ( ) { try { Result result ; while ( true ) { if ( ( result = scanner . next ( ) ) != null ) { while ( running && ! resultQueue . offer ( result ) ) { Thread . sleep ( sleepMillis ) ; } } } } catch ( Exception e ) { threadFailureReason = e . getMessage ( ) ; } finally { scanner . close ( ) ; } } +","public void run ( ) { try { Result result ; while ( true ) { if ( ( result = scanner . next ( ) ) != null ) { while ( running && ! resultQueue . offer ( result ) ) { Thread . sleep ( sleepMillis ) ; } } } } catch ( Exception e ) { logger . debug ( ""Exception in fetching results {}"" , e . getMessage ( ) ) ; threadFailureReason = e . getMessage ( ) ; } finally { scanner . close ( ) ; } } +" +851,"public double calcAverageRate ( T key , long timeIntervalSecs ) { OffsetDataStats stats = get ( key ) ; if ( stats == null ) { if ( log . isDebugEnabled ( ) ) return - 1 ; } long now = System . currentTimeMillis ( ) ; long mostRecentThreashold = now - timeIntervalSecs * ( long ) ( staleThresholdPercent * 1000 ) ; OffsetData newestOffsetData = stats . mostRecentDataPoint ( ) ; if ( newestOffsetData == null || newestOffsetData . olderThan ( mostRecentThreashold ) ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data too stale for key="" + key ) ; return - 1 ; } long then = newestOffsetData . getTimestamp ( ) - timeIntervalSecs * 1000L ; long thenDelta = timeIntervalSecs * ( long ) ( ageThresholdPercent * 1000 ) ; OffsetData oldestOffsetData = null ; long minDiff = - 1 ; long lastDiff = - 1 ; for ( OffsetData offsetData : stats . getOffsetDataList ( ) ) { long diff = offsetData . within ( then , thenDelta ) ; if ( diff < 0 ) continue ; if ( minDiff == - 1 || minDiff < diff ) { minDiff = diff ; oldestOffsetData = offsetData ; } if ( minDiff != - 1 && lastDiff != - 1 && diff > lastDiff ) { break ; } lastDiff = diff ; } if ( oldestOffsetData == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data history too short for key="" + key ) ; return - 1 ; } return newestOffsetData . averageRate ( oldestOffsetData ) ; } +","public double calcAverageRate ( T key , long timeIntervalSecs ) { OffsetDataStats stats = get ( key ) ; if ( stats == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""No stats data found key="" + key ) ; return - 1 ; } long now = System . currentTimeMillis ( ) ; long mostRecentThreashold = now - timeIntervalSecs * ( long ) ( staleThresholdPercent * 1000 ) ; OffsetData newestOffsetData = stats . mostRecentDataPoint ( ) ; if ( newestOffsetData == null || newestOffsetData . olderThan ( mostRecentThreashold ) ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data too stale for key="" + key ) ; return - 1 ; } long then = newestOffsetData . getTimestamp ( ) - timeIntervalSecs * 1000L ; long thenDelta = timeIntervalSecs * ( long ) ( ageThresholdPercent * 1000 ) ; OffsetData oldestOffsetData = null ; long minDiff = - 1 ; long lastDiff = - 1 ; for ( OffsetData offsetData : stats . getOffsetDataList ( ) ) { long diff = offsetData . within ( then , thenDelta ) ; if ( diff < 0 ) continue ; if ( minDiff == - 1 || minDiff < diff ) { minDiff = diff ; oldestOffsetData = offsetData ; } if ( minDiff != - 1 && lastDiff != - 1 && diff > lastDiff ) { break ; } lastDiff = diff ; } if ( oldestOffsetData == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data history too short for key="" + key ) ; return - 1 ; } return newestOffsetData . averageRate ( oldestOffsetData ) ; } +" +852,"public double calcAverageRate ( T key , long timeIntervalSecs ) { OffsetDataStats stats = get ( key ) ; if ( stats == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""No stats data found key="" + key ) ; return - 1 ; } long now = System . currentTimeMillis ( ) ; long mostRecentThreashold = now - timeIntervalSecs * ( long ) ( staleThresholdPercent * 1000 ) ; OffsetData newestOffsetData = stats . mostRecentDataPoint ( ) ; if ( newestOffsetData == null || newestOffsetData . olderThan ( mostRecentThreashold ) ) { if ( log . isDebugEnabled ( ) ) return - 1 ; } long then = newestOffsetData . getTimestamp ( ) - timeIntervalSecs * 1000L ; long thenDelta = timeIntervalSecs * ( long ) ( ageThresholdPercent * 1000 ) ; OffsetData oldestOffsetData = null ; long minDiff = - 1 ; long lastDiff = - 1 ; for ( OffsetData offsetData : stats . getOffsetDataList ( ) ) { long diff = offsetData . within ( then , thenDelta ) ; if ( diff < 0 ) continue ; if ( minDiff == - 1 || minDiff < diff ) { minDiff = diff ; oldestOffsetData = offsetData ; } if ( minDiff != - 1 && lastDiff != - 1 && diff > lastDiff ) { break ; } lastDiff = diff ; } if ( oldestOffsetData == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data history too short for key="" + key ) ; return - 1 ; } return newestOffsetData . averageRate ( oldestOffsetData ) ; } +","public double calcAverageRate ( T key , long timeIntervalSecs ) { OffsetDataStats stats = get ( key ) ; if ( stats == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""No stats data found key="" + key ) ; return - 1 ; } long now = System . currentTimeMillis ( ) ; long mostRecentThreashold = now - timeIntervalSecs * ( long ) ( staleThresholdPercent * 1000 ) ; OffsetData newestOffsetData = stats . mostRecentDataPoint ( ) ; if ( newestOffsetData == null || newestOffsetData . olderThan ( mostRecentThreashold ) ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data too stale for key="" + key ) ; return - 1 ; } long then = newestOffsetData . getTimestamp ( ) - timeIntervalSecs * 1000L ; long thenDelta = timeIntervalSecs * ( long ) ( ageThresholdPercent * 1000 ) ; OffsetData oldestOffsetData = null ; long minDiff = - 1 ; long lastDiff = - 1 ; for ( OffsetData offsetData : stats . getOffsetDataList ( ) ) { long diff = offsetData . within ( then , thenDelta ) ; if ( diff < 0 ) continue ; if ( minDiff == - 1 || minDiff < diff ) { minDiff = diff ; oldestOffsetData = offsetData ; } if ( minDiff != - 1 && lastDiff != - 1 && diff > lastDiff ) { break ; } lastDiff = diff ; } if ( oldestOffsetData == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data history too short for key="" + key ) ; return - 1 ; } return newestOffsetData . averageRate ( oldestOffsetData ) ; } +" +853,"public double calcAverageRate ( T key , long timeIntervalSecs ) { OffsetDataStats stats = get ( key ) ; if ( stats == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""No stats data found key="" + key ) ; return - 1 ; } long now = System . currentTimeMillis ( ) ; long mostRecentThreashold = now - timeIntervalSecs * ( long ) ( staleThresholdPercent * 1000 ) ; OffsetData newestOffsetData = stats . mostRecentDataPoint ( ) ; if ( newestOffsetData == null || newestOffsetData . olderThan ( mostRecentThreashold ) ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data too stale for key="" + key ) ; return - 1 ; } long then = newestOffsetData . getTimestamp ( ) - timeIntervalSecs * 1000L ; long thenDelta = timeIntervalSecs * ( long ) ( ageThresholdPercent * 1000 ) ; OffsetData oldestOffsetData = null ; long minDiff = - 1 ; long lastDiff = - 1 ; for ( OffsetData offsetData : stats . getOffsetDataList ( ) ) { long diff = offsetData . within ( then , thenDelta ) ; if ( diff < 0 ) continue ; if ( minDiff == - 1 || minDiff < diff ) { minDiff = diff ; oldestOffsetData = offsetData ; } if ( minDiff != - 1 && lastDiff != - 1 && diff > lastDiff ) { break ; } lastDiff = diff ; } if ( oldestOffsetData == null ) { if ( log . isDebugEnabled ( ) ) return - 1 ; } return newestOffsetData . averageRate ( oldestOffsetData ) ; } +","public double calcAverageRate ( T key , long timeIntervalSecs ) { OffsetDataStats stats = get ( key ) ; if ( stats == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""No stats data found key="" + key ) ; return - 1 ; } long now = System . currentTimeMillis ( ) ; long mostRecentThreashold = now - timeIntervalSecs * ( long ) ( staleThresholdPercent * 1000 ) ; OffsetData newestOffsetData = stats . mostRecentDataPoint ( ) ; if ( newestOffsetData == null || newestOffsetData . olderThan ( mostRecentThreashold ) ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data too stale for key="" + key ) ; return - 1 ; } long then = newestOffsetData . getTimestamp ( ) - timeIntervalSecs * 1000L ; long thenDelta = timeIntervalSecs * ( long ) ( ageThresholdPercent * 1000 ) ; OffsetData oldestOffsetData = null ; long minDiff = - 1 ; long lastDiff = - 1 ; for ( OffsetData offsetData : stats . getOffsetDataList ( ) ) { long diff = offsetData . within ( then , thenDelta ) ; if ( diff < 0 ) continue ; if ( minDiff == - 1 || minDiff < diff ) { minDiff = diff ; oldestOffsetData = offsetData ; } if ( minDiff != - 1 && lastDiff != - 1 && diff > lastDiff ) { break ; } lastDiff = diff ; } if ( oldestOffsetData == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( ""Stats data history too short for key="" + key ) ; return - 1 ; } return newestOffsetData . averageRate ( oldestOffsetData ) ; } +" +854,"public void validateAccessToken ( String accessToken , String oxdId ) { if ( StringUtils . isBlank ( accessToken ) ) { throw new HttpException ( ErrorResponseCode . BLANK_ACCESS_TOKEN ) ; } final RpSyncService rpSyncService = ServerLauncher . getInjector ( ) . getInstance ( RpSyncService . class ) ; final Rp rp = rpSyncService . getRp ( oxdId ) ; final IntrospectionResponse introspectionResponse = introspect ( accessToken , oxdId ) ; if ( StringUtils . isBlank ( introspectionResponse . getClientId ( ) ) ) { LOG . error ( ""AS returned introspection response with empty/blank client_id which is required by oxd. Please check your AS installation and make sure AS return client_id for introspection call (CE 3.1.0 or later)."" ) ; throw new HttpException ( ErrorResponseCode . NO_CLIENT_ID_IN_INTROSPECTION_RESPONSE ) ; } if ( ! introspectionResponse . getScope ( ) . contains ( ""oxd"" ) ) { LOG . error ( ""access_token does not have `oxd` scope. Make sure a) scope exists on AS b) register_site is registered with 'oxd' scope c) get_client_token has 'oxd' scope in request"" ) ; throw new HttpException ( ErrorResponseCode . ACCESS_TOKEN_INSUFFICIENT_SCOPE ) ; } if ( introspectionResponse . getClientId ( ) . equals ( rp . getClientId ( ) ) ) { return ; } LOG . error ( ""No access token provided in Authorization header. Forbidden."" ) ; throw new HttpException ( ErrorResponseCode . INVALID_ACCESS_TOKEN ) ; } +","public void validateAccessToken ( String accessToken , String oxdId ) { if ( StringUtils . isBlank ( accessToken ) ) { throw new HttpException ( ErrorResponseCode . BLANK_ACCESS_TOKEN ) ; } final RpSyncService rpSyncService = ServerLauncher . getInjector ( ) . getInstance ( RpSyncService . class ) ; final Rp rp = rpSyncService . getRp ( oxdId ) ; final IntrospectionResponse introspectionResponse = introspect ( accessToken , oxdId ) ; LOG . trace ( ""access_token: "" + accessToken + "", introspection: "" + introspectionResponse + "", clientId: "" + rp . getClientId ( ) ) ; if ( StringUtils . isBlank ( introspectionResponse . getClientId ( ) ) ) { LOG . error ( ""AS returned introspection response with empty/blank client_id which is required by oxd. Please check your AS installation and make sure AS return client_id for introspection call (CE 3.1.0 or later)."" ) ; throw new HttpException ( ErrorResponseCode . NO_CLIENT_ID_IN_INTROSPECTION_RESPONSE ) ; } if ( ! introspectionResponse . getScope ( ) . contains ( ""oxd"" ) ) { LOG . error ( ""access_token does not have `oxd` scope. Make sure a) scope exists on AS b) register_site is registered with 'oxd' scope c) get_client_token has 'oxd' scope in request"" ) ; throw new HttpException ( ErrorResponseCode . ACCESS_TOKEN_INSUFFICIENT_SCOPE ) ; } if ( introspectionResponse . getClientId ( ) . equals ( rp . getClientId ( ) ) ) { return ; } LOG . error ( ""No access token provided in Authorization header. Forbidden."" ) ; throw new HttpException ( ErrorResponseCode . INVALID_ACCESS_TOKEN ) ; } +" +855,"public void validateAccessToken ( String accessToken , String oxdId ) { if ( StringUtils . isBlank ( accessToken ) ) { throw new HttpException ( ErrorResponseCode . BLANK_ACCESS_TOKEN ) ; } final RpSyncService rpSyncService = ServerLauncher . getInjector ( ) . getInstance ( RpSyncService . class ) ; final Rp rp = rpSyncService . getRp ( oxdId ) ; final IntrospectionResponse introspectionResponse = introspect ( accessToken , oxdId ) ; LOG . trace ( ""access_token: "" + accessToken + "", introspection: "" + introspectionResponse + "", clientId: "" + rp . getClientId ( ) ) ; if ( StringUtils . isBlank ( introspectionResponse . getClientId ( ) ) ) { throw new HttpException ( ErrorResponseCode . NO_CLIENT_ID_IN_INTROSPECTION_RESPONSE ) ; } if ( ! introspectionResponse . getScope ( ) . contains ( ""oxd"" ) ) { LOG . error ( ""access_token does not have `oxd` scope. Make sure a) scope exists on AS b) register_site is registered with 'oxd' scope c) get_client_token has 'oxd' scope in request"" ) ; throw new HttpException ( ErrorResponseCode . ACCESS_TOKEN_INSUFFICIENT_SCOPE ) ; } if ( introspectionResponse . getClientId ( ) . equals ( rp . getClientId ( ) ) ) { return ; } LOG . error ( ""No access token provided in Authorization header. Forbidden."" ) ; throw new HttpException ( ErrorResponseCode . INVALID_ACCESS_TOKEN ) ; } +","public void validateAccessToken ( String accessToken , String oxdId ) { if ( StringUtils . isBlank ( accessToken ) ) { throw new HttpException ( ErrorResponseCode . BLANK_ACCESS_TOKEN ) ; } final RpSyncService rpSyncService = ServerLauncher . getInjector ( ) . getInstance ( RpSyncService . class ) ; final Rp rp = rpSyncService . getRp ( oxdId ) ; final IntrospectionResponse introspectionResponse = introspect ( accessToken , oxdId ) ; LOG . trace ( ""access_token: "" + accessToken + "", introspection: "" + introspectionResponse + "", clientId: "" + rp . getClientId ( ) ) ; if ( StringUtils . isBlank ( introspectionResponse . getClientId ( ) ) ) { LOG . error ( ""AS returned introspection response with empty/blank client_id which is required by oxd. Please check your AS installation and make sure AS return client_id for introspection call (CE 3.1.0 or later)."" ) ; throw new HttpException ( ErrorResponseCode . NO_CLIENT_ID_IN_INTROSPECTION_RESPONSE ) ; } if ( ! introspectionResponse . getScope ( ) . contains ( ""oxd"" ) ) { LOG . error ( ""access_token does not have `oxd` scope. Make sure a) scope exists on AS b) register_site is registered with 'oxd' scope c) get_client_token has 'oxd' scope in request"" ) ; throw new HttpException ( ErrorResponseCode . ACCESS_TOKEN_INSUFFICIENT_SCOPE ) ; } if ( introspectionResponse . getClientId ( ) . equals ( rp . getClientId ( ) ) ) { return ; } LOG . error ( ""No access token provided in Authorization header. Forbidden."" ) ; throw new HttpException ( ErrorResponseCode . INVALID_ACCESS_TOKEN ) ; } +" +856,"public void validateAccessToken ( String accessToken , String oxdId ) { if ( StringUtils . isBlank ( accessToken ) ) { throw new HttpException ( ErrorResponseCode . BLANK_ACCESS_TOKEN ) ; } final RpSyncService rpSyncService = ServerLauncher . getInjector ( ) . getInstance ( RpSyncService . class ) ; final Rp rp = rpSyncService . getRp ( oxdId ) ; final IntrospectionResponse introspectionResponse = introspect ( accessToken , oxdId ) ; LOG . trace ( ""access_token: "" + accessToken + "", introspection: "" + introspectionResponse + "", clientId: "" + rp . getClientId ( ) ) ; if ( StringUtils . isBlank ( introspectionResponse . getClientId ( ) ) ) { LOG . error ( ""AS returned introspection response with empty/blank client_id which is required by oxd. Please check your AS installation and make sure AS return client_id for introspection call (CE 3.1.0 or later)."" ) ; throw new HttpException ( ErrorResponseCode . NO_CLIENT_ID_IN_INTROSPECTION_RESPONSE ) ; } if ( ! introspectionResponse . getScope ( ) . contains ( ""oxd"" ) ) { throw new HttpException ( ErrorResponseCode . ACCESS_TOKEN_INSUFFICIENT_SCOPE ) ; } if ( introspectionResponse . getClientId ( ) . equals ( rp . getClientId ( ) ) ) { return ; } LOG . error ( ""No access token provided in Authorization header. Forbidden."" ) ; throw new HttpException ( ErrorResponseCode . INVALID_ACCESS_TOKEN ) ; } +","public void validateAccessToken ( String accessToken , String oxdId ) { if ( StringUtils . isBlank ( accessToken ) ) { throw new HttpException ( ErrorResponseCode . BLANK_ACCESS_TOKEN ) ; } final RpSyncService rpSyncService = ServerLauncher . getInjector ( ) . getInstance ( RpSyncService . class ) ; final Rp rp = rpSyncService . getRp ( oxdId ) ; final IntrospectionResponse introspectionResponse = introspect ( accessToken , oxdId ) ; LOG . trace ( ""access_token: "" + accessToken + "", introspection: "" + introspectionResponse + "", clientId: "" + rp . getClientId ( ) ) ; if ( StringUtils . isBlank ( introspectionResponse . getClientId ( ) ) ) { LOG . error ( ""AS returned introspection response with empty/blank client_id which is required by oxd. Please check your AS installation and make sure AS return client_id for introspection call (CE 3.1.0 or later)."" ) ; throw new HttpException ( ErrorResponseCode . NO_CLIENT_ID_IN_INTROSPECTION_RESPONSE ) ; } if ( ! introspectionResponse . getScope ( ) . contains ( ""oxd"" ) ) { LOG . error ( ""access_token does not have `oxd` scope. Make sure a) scope exists on AS b) register_site is registered with 'oxd' scope c) get_client_token has 'oxd' scope in request"" ) ; throw new HttpException ( ErrorResponseCode . ACCESS_TOKEN_INSUFFICIENT_SCOPE ) ; } if ( introspectionResponse . getClientId ( ) . equals ( rp . getClientId ( ) ) ) { return ; } LOG . error ( ""No access token provided in Authorization header. Forbidden."" ) ; throw new HttpException ( ErrorResponseCode . INVALID_ACCESS_TOKEN ) ; } +" +857,"public void validateAccessToken ( String accessToken , String oxdId ) { if ( StringUtils . isBlank ( accessToken ) ) { throw new HttpException ( ErrorResponseCode . BLANK_ACCESS_TOKEN ) ; } final RpSyncService rpSyncService = ServerLauncher . getInjector ( ) . getInstance ( RpSyncService . class ) ; final Rp rp = rpSyncService . getRp ( oxdId ) ; final IntrospectionResponse introspectionResponse = introspect ( accessToken , oxdId ) ; LOG . trace ( ""access_token: "" + accessToken + "", introspection: "" + introspectionResponse + "", clientId: "" + rp . getClientId ( ) ) ; if ( StringUtils . isBlank ( introspectionResponse . getClientId ( ) ) ) { LOG . error ( ""AS returned introspection response with empty/blank client_id which is required by oxd. Please check your AS installation and make sure AS return client_id for introspection call (CE 3.1.0 or later)."" ) ; throw new HttpException ( ErrorResponseCode . NO_CLIENT_ID_IN_INTROSPECTION_RESPONSE ) ; } if ( ! introspectionResponse . getScope ( ) . contains ( ""oxd"" ) ) { LOG . error ( ""access_token does not have `oxd` scope. Make sure a) scope exists on AS b) register_site is registered with 'oxd' scope c) get_client_token has 'oxd' scope in request"" ) ; throw new HttpException ( ErrorResponseCode . ACCESS_TOKEN_INSUFFICIENT_SCOPE ) ; } if ( introspectionResponse . getClientId ( ) . equals ( rp . getClientId ( ) ) ) { return ; } throw new HttpException ( ErrorResponseCode . INVALID_ACCESS_TOKEN ) ; } +","public void validateAccessToken ( String accessToken , String oxdId ) { if ( StringUtils . isBlank ( accessToken ) ) { throw new HttpException ( ErrorResponseCode . BLANK_ACCESS_TOKEN ) ; } final RpSyncService rpSyncService = ServerLauncher . getInjector ( ) . getInstance ( RpSyncService . class ) ; final Rp rp = rpSyncService . getRp ( oxdId ) ; final IntrospectionResponse introspectionResponse = introspect ( accessToken , oxdId ) ; LOG . trace ( ""access_token: "" + accessToken + "", introspection: "" + introspectionResponse + "", clientId: "" + rp . getClientId ( ) ) ; if ( StringUtils . isBlank ( introspectionResponse . getClientId ( ) ) ) { LOG . error ( ""AS returned introspection response with empty/blank client_id which is required by oxd. Please check your AS installation and make sure AS return client_id for introspection call (CE 3.1.0 or later)."" ) ; throw new HttpException ( ErrorResponseCode . NO_CLIENT_ID_IN_INTROSPECTION_RESPONSE ) ; } if ( ! introspectionResponse . getScope ( ) . contains ( ""oxd"" ) ) { LOG . error ( ""access_token does not have `oxd` scope. Make sure a) scope exists on AS b) register_site is registered with 'oxd' scope c) get_client_token has 'oxd' scope in request"" ) ; throw new HttpException ( ErrorResponseCode . ACCESS_TOKEN_INSUFFICIENT_SCOPE ) ; } if ( introspectionResponse . getClientId ( ) . equals ( rp . getClientId ( ) ) ) { return ; } LOG . error ( ""No access token provided in Authorization header. Forbidden."" ) ; throw new HttpException ( ErrorResponseCode . INVALID_ACCESS_TOKEN ) ; } +" +858,"@ GET @ Path ( ""/select"" ) @ Produces ( { MediaType . APPLICATION_XML , MediaType . APPLICATION_JSON } ) public Response search ( @ QueryParam ( ""q"" ) final String query , @ QueryParam ( ""rows"" ) @ DefaultValue ( ""10"" ) final String rows , @ QueryParam ( ""start"" ) @ DefaultValue ( ""0"" ) final String start , @ QueryParam ( ""hl"" ) @ DefaultValue ( ""false"" ) final String highlight , @ QueryParam ( ""hl.simple.pre"" ) @ DefaultValue ( """" ) final String highlightPre , @ QueryParam ( ""hl.simple.post"" ) @ DefaultValue ( """" ) final String highlightPost , @ QueryParam ( ""wt"" ) @ DefaultValue ( ""xml"" ) final String writerType , @ QueryParam ( ""facet"" ) @ DefaultValue ( ""false"" ) final String facet , @ Context final HttpServletRequest request ) { try { return ok ( search ( new SearchRequest . SearchRequestBuilder ( ) . setRows ( rows ) . setStart ( start ) . setQuery ( query ) . setHighlight ( highlight ) . setHighlightPre ( highlightPre ) . setHighlightPost ( highlightPost ) . setFormat ( writerType ) . setFacet ( facet ) . build ( ) , request ) ) ; } catch ( IllegalArgumentException e ) { return badRequest ( e . getMessage ( ) ) ; } catch ( QueryException qe ) { throw RestServiceHelper . createWebApplicationException ( qe , request ) ; } catch ( Exception e ) { return internalServerError ( ""Unexpected error"" ) ; } } +","@ GET @ Path ( ""/select"" ) @ Produces ( { MediaType . APPLICATION_XML , MediaType . APPLICATION_JSON } ) public Response search ( @ QueryParam ( ""q"" ) final String query , @ QueryParam ( ""rows"" ) @ DefaultValue ( ""10"" ) final String rows , @ QueryParam ( ""start"" ) @ DefaultValue ( ""0"" ) final String start , @ QueryParam ( ""hl"" ) @ DefaultValue ( ""false"" ) final String highlight , @ QueryParam ( ""hl.simple.pre"" ) @ DefaultValue ( """" ) final String highlightPre , @ QueryParam ( ""hl.simple.post"" ) @ DefaultValue ( """" ) final String highlightPost , @ QueryParam ( ""wt"" ) @ DefaultValue ( ""xml"" ) final String writerType , @ QueryParam ( ""facet"" ) @ DefaultValue ( ""false"" ) final String facet , @ Context final HttpServletRequest request ) { try { return ok ( search ( new SearchRequest . SearchRequestBuilder ( ) . setRows ( rows ) . setStart ( start ) . setQuery ( query ) . setHighlight ( highlight ) . setHighlightPre ( highlightPre ) . setHighlightPost ( highlightPost ) . setFormat ( writerType ) . setFacet ( facet ) . build ( ) , request ) ) ; } catch ( IllegalArgumentException e ) { return badRequest ( e . getMessage ( ) ) ; } catch ( QueryException qe ) { throw RestServiceHelper . createWebApplicationException ( qe , request ) ; } catch ( Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; return internalServerError ( ""Unexpected error"" ) ; } } +" +859,"protected LogEventRequestType getLogEventRequestType ( EDXLDistribution body , AssertionType assertion , String direction , NhinTargetSystemType target , String _interface ) { LogEventRequestType result = new LogEventRequestType ( ) ; AuditMessageType auditMsg = new AuditMessageType ( ) ; boolean bRequiredFieldsAreNull = areRequiredUserTypeFieldsNull ( assertion ) ; if ( bRequiredFieldsAreNull ) { LOG . error ( ""One or more of the required fields needed to transform to an audit message request were null."" ) ; return null ; } UserType userInfo = assertion . getUserInfo ( ) ; CodedValueType eventID ; eventID = AuditDataTransformHelper . createEventId ( AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST , AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST ) ; auditMsg . setEventIdentification ( AuditDataTransformHelper . createEventIdentification ( AuditDataTransformConstants . EVENT_ACTION_CODE_CREATE , AuditDataTransformConstants . EVENT_OUTCOME_INDICATOR_SUCCESS , eventID ) ) ; AuditMessageType . ActiveParticipant participant = AuditDataTransformHelper . createActiveParticipantFromUser ( userInfo , true ) ; auditMsg . getActiveParticipant ( ) . add ( participant ) ; String communityId = getAdminDistributionMessageCommunityID ( assertion , direction , _interface , target ) ; AuditSourceIdentificationType auditSource = AuditDataTransformHelper . createAuditSourceIdentification ( communityId , communityId ) ; auditMsg . getAuditSourceIdentification ( ) . add ( auditSource ) ; result . setAuditMessage ( auditMsg ) ; result . setDirection ( _interface + "" "" + direction ) ; result . setRemoteHCID ( HomeCommunityMap . formatHomeCommunityId ( communityId ) ) ; result . setEventType ( NhincConstants . NHIN_ADMIN_DIST_SERVICE_NAME ) ; result . setEventID ( auditMsg . getEventIdentification ( ) . getEventID ( ) . getDisplayName ( ) ) ; result . setEventOutcomeIndicator ( auditMsg . getEventIdentification ( ) . getEventOutcomeIndicator ( ) ) ; result . setEventTimestamp ( auditMsg . getEventIdentification ( ) . getEventDateTime ( ) ) ; result . setAssertion ( assertion ) ; result . setRelatesTo ( getRelatesTo ( assertion ) ) ; result . setRequestMessageId ( assertion . getMessageId ( ) ) ; LOG . trace ( ""Exiting ADTransform-getLogEventRequestType() method."" ) ; return result ; } +","protected LogEventRequestType getLogEventRequestType ( EDXLDistribution body , AssertionType assertion , String direction , NhinTargetSystemType target , String _interface ) { LOG . trace ( ""Entering ADTransform-getLogEventRequestType() method."" ) ; LogEventRequestType result = new LogEventRequestType ( ) ; AuditMessageType auditMsg = new AuditMessageType ( ) ; boolean bRequiredFieldsAreNull = areRequiredUserTypeFieldsNull ( assertion ) ; if ( bRequiredFieldsAreNull ) { LOG . error ( ""One or more of the required fields needed to transform to an audit message request were null."" ) ; return null ; } UserType userInfo = assertion . getUserInfo ( ) ; CodedValueType eventID ; eventID = AuditDataTransformHelper . createEventId ( AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST , AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST ) ; auditMsg . setEventIdentification ( AuditDataTransformHelper . createEventIdentification ( AuditDataTransformConstants . EVENT_ACTION_CODE_CREATE , AuditDataTransformConstants . EVENT_OUTCOME_INDICATOR_SUCCESS , eventID ) ) ; AuditMessageType . ActiveParticipant participant = AuditDataTransformHelper . createActiveParticipantFromUser ( userInfo , true ) ; auditMsg . getActiveParticipant ( ) . add ( participant ) ; String communityId = getAdminDistributionMessageCommunityID ( assertion , direction , _interface , target ) ; AuditSourceIdentificationType auditSource = AuditDataTransformHelper . createAuditSourceIdentification ( communityId , communityId ) ; auditMsg . getAuditSourceIdentification ( ) . add ( auditSource ) ; result . setAuditMessage ( auditMsg ) ; result . setDirection ( _interface + "" "" + direction ) ; result . setRemoteHCID ( HomeCommunityMap . formatHomeCommunityId ( communityId ) ) ; result . setEventType ( NhincConstants . NHIN_ADMIN_DIST_SERVICE_NAME ) ; result . setEventID ( auditMsg . getEventIdentification ( ) . getEventID ( ) . getDisplayName ( ) ) ; result . setEventOutcomeIndicator ( auditMsg . getEventIdentification ( ) . getEventOutcomeIndicator ( ) ) ; result . setEventTimestamp ( auditMsg . getEventIdentification ( ) . getEventDateTime ( ) ) ; result . setAssertion ( assertion ) ; result . setRelatesTo ( getRelatesTo ( assertion ) ) ; result . setRequestMessageId ( assertion . getMessageId ( ) ) ; LOG . trace ( ""Exiting ADTransform-getLogEventRequestType() method."" ) ; return result ; } +" +860,"protected LogEventRequestType getLogEventRequestType ( EDXLDistribution body , AssertionType assertion , String direction , NhinTargetSystemType target , String _interface ) { LOG . trace ( ""Entering ADTransform-getLogEventRequestType() method."" ) ; LogEventRequestType result = new LogEventRequestType ( ) ; AuditMessageType auditMsg = new AuditMessageType ( ) ; boolean bRequiredFieldsAreNull = areRequiredUserTypeFieldsNull ( assertion ) ; if ( bRequiredFieldsAreNull ) { return null ; } UserType userInfo = assertion . getUserInfo ( ) ; CodedValueType eventID ; eventID = AuditDataTransformHelper . createEventId ( AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST , AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST ) ; auditMsg . setEventIdentification ( AuditDataTransformHelper . createEventIdentification ( AuditDataTransformConstants . EVENT_ACTION_CODE_CREATE , AuditDataTransformConstants . EVENT_OUTCOME_INDICATOR_SUCCESS , eventID ) ) ; AuditMessageType . ActiveParticipant participant = AuditDataTransformHelper . createActiveParticipantFromUser ( userInfo , true ) ; auditMsg . getActiveParticipant ( ) . add ( participant ) ; String communityId = getAdminDistributionMessageCommunityID ( assertion , direction , _interface , target ) ; AuditSourceIdentificationType auditSource = AuditDataTransformHelper . createAuditSourceIdentification ( communityId , communityId ) ; auditMsg . getAuditSourceIdentification ( ) . add ( auditSource ) ; result . setAuditMessage ( auditMsg ) ; result . setDirection ( _interface + "" "" + direction ) ; result . setRemoteHCID ( HomeCommunityMap . formatHomeCommunityId ( communityId ) ) ; result . setEventType ( NhincConstants . NHIN_ADMIN_DIST_SERVICE_NAME ) ; result . setEventID ( auditMsg . getEventIdentification ( ) . getEventID ( ) . getDisplayName ( ) ) ; result . setEventOutcomeIndicator ( auditMsg . getEventIdentification ( ) . getEventOutcomeIndicator ( ) ) ; result . setEventTimestamp ( auditMsg . getEventIdentification ( ) . getEventDateTime ( ) ) ; result . setAssertion ( assertion ) ; result . setRelatesTo ( getRelatesTo ( assertion ) ) ; result . setRequestMessageId ( assertion . getMessageId ( ) ) ; LOG . trace ( ""Exiting ADTransform-getLogEventRequestType() method."" ) ; return result ; } +","protected LogEventRequestType getLogEventRequestType ( EDXLDistribution body , AssertionType assertion , String direction , NhinTargetSystemType target , String _interface ) { LOG . trace ( ""Entering ADTransform-getLogEventRequestType() method."" ) ; LogEventRequestType result = new LogEventRequestType ( ) ; AuditMessageType auditMsg = new AuditMessageType ( ) ; boolean bRequiredFieldsAreNull = areRequiredUserTypeFieldsNull ( assertion ) ; if ( bRequiredFieldsAreNull ) { LOG . error ( ""One or more of the required fields needed to transform to an audit message request were null."" ) ; return null ; } UserType userInfo = assertion . getUserInfo ( ) ; CodedValueType eventID ; eventID = AuditDataTransformHelper . createEventId ( AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST , AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST ) ; auditMsg . setEventIdentification ( AuditDataTransformHelper . createEventIdentification ( AuditDataTransformConstants . EVENT_ACTION_CODE_CREATE , AuditDataTransformConstants . EVENT_OUTCOME_INDICATOR_SUCCESS , eventID ) ) ; AuditMessageType . ActiveParticipant participant = AuditDataTransformHelper . createActiveParticipantFromUser ( userInfo , true ) ; auditMsg . getActiveParticipant ( ) . add ( participant ) ; String communityId = getAdminDistributionMessageCommunityID ( assertion , direction , _interface , target ) ; AuditSourceIdentificationType auditSource = AuditDataTransformHelper . createAuditSourceIdentification ( communityId , communityId ) ; auditMsg . getAuditSourceIdentification ( ) . add ( auditSource ) ; result . setAuditMessage ( auditMsg ) ; result . setDirection ( _interface + "" "" + direction ) ; result . setRemoteHCID ( HomeCommunityMap . formatHomeCommunityId ( communityId ) ) ; result . setEventType ( NhincConstants . NHIN_ADMIN_DIST_SERVICE_NAME ) ; result . setEventID ( auditMsg . getEventIdentification ( ) . getEventID ( ) . getDisplayName ( ) ) ; result . setEventOutcomeIndicator ( auditMsg . getEventIdentification ( ) . getEventOutcomeIndicator ( ) ) ; result . setEventTimestamp ( auditMsg . getEventIdentification ( ) . getEventDateTime ( ) ) ; result . setAssertion ( assertion ) ; result . setRelatesTo ( getRelatesTo ( assertion ) ) ; result . setRequestMessageId ( assertion . getMessageId ( ) ) ; LOG . trace ( ""Exiting ADTransform-getLogEventRequestType() method."" ) ; return result ; } +" +861,"protected LogEventRequestType getLogEventRequestType ( EDXLDistribution body , AssertionType assertion , String direction , NhinTargetSystemType target , String _interface ) { LOG . trace ( ""Entering ADTransform-getLogEventRequestType() method."" ) ; LogEventRequestType result = new LogEventRequestType ( ) ; AuditMessageType auditMsg = new AuditMessageType ( ) ; boolean bRequiredFieldsAreNull = areRequiredUserTypeFieldsNull ( assertion ) ; if ( bRequiredFieldsAreNull ) { LOG . error ( ""One or more of the required fields needed to transform to an audit message request were null."" ) ; return null ; } UserType userInfo = assertion . getUserInfo ( ) ; CodedValueType eventID ; eventID = AuditDataTransformHelper . createEventId ( AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST , AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST ) ; auditMsg . setEventIdentification ( AuditDataTransformHelper . createEventIdentification ( AuditDataTransformConstants . EVENT_ACTION_CODE_CREATE , AuditDataTransformConstants . EVENT_OUTCOME_INDICATOR_SUCCESS , eventID ) ) ; AuditMessageType . ActiveParticipant participant = AuditDataTransformHelper . createActiveParticipantFromUser ( userInfo , true ) ; auditMsg . getActiveParticipant ( ) . add ( participant ) ; String communityId = getAdminDistributionMessageCommunityID ( assertion , direction , _interface , target ) ; AuditSourceIdentificationType auditSource = AuditDataTransformHelper . createAuditSourceIdentification ( communityId , communityId ) ; auditMsg . getAuditSourceIdentification ( ) . add ( auditSource ) ; result . setAuditMessage ( auditMsg ) ; result . setDirection ( _interface + "" "" + direction ) ; result . setRemoteHCID ( HomeCommunityMap . formatHomeCommunityId ( communityId ) ) ; result . setEventType ( NhincConstants . NHIN_ADMIN_DIST_SERVICE_NAME ) ; result . setEventID ( auditMsg . getEventIdentification ( ) . getEventID ( ) . getDisplayName ( ) ) ; result . setEventOutcomeIndicator ( auditMsg . getEventIdentification ( ) . getEventOutcomeIndicator ( ) ) ; result . setEventTimestamp ( auditMsg . getEventIdentification ( ) . getEventDateTime ( ) ) ; result . setAssertion ( assertion ) ; result . setRelatesTo ( getRelatesTo ( assertion ) ) ; result . setRequestMessageId ( assertion . getMessageId ( ) ) ; return result ; } +","protected LogEventRequestType getLogEventRequestType ( EDXLDistribution body , AssertionType assertion , String direction , NhinTargetSystemType target , String _interface ) { LOG . trace ( ""Entering ADTransform-getLogEventRequestType() method."" ) ; LogEventRequestType result = new LogEventRequestType ( ) ; AuditMessageType auditMsg = new AuditMessageType ( ) ; boolean bRequiredFieldsAreNull = areRequiredUserTypeFieldsNull ( assertion ) ; if ( bRequiredFieldsAreNull ) { LOG . error ( ""One or more of the required fields needed to transform to an audit message request were null."" ) ; return null ; } UserType userInfo = assertion . getUserInfo ( ) ; CodedValueType eventID ; eventID = AuditDataTransformHelper . createEventId ( AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST , AuditDataTransformConstants . EVENT_ID_CODE_SYS_NAME_T63 , AuditDataTransformConstants . EVENT_ID_DISPLAY_NAME_ADMIN_DIST ) ; auditMsg . setEventIdentification ( AuditDataTransformHelper . createEventIdentification ( AuditDataTransformConstants . EVENT_ACTION_CODE_CREATE , AuditDataTransformConstants . EVENT_OUTCOME_INDICATOR_SUCCESS , eventID ) ) ; AuditMessageType . ActiveParticipant participant = AuditDataTransformHelper . createActiveParticipantFromUser ( userInfo , true ) ; auditMsg . getActiveParticipant ( ) . add ( participant ) ; String communityId = getAdminDistributionMessageCommunityID ( assertion , direction , _interface , target ) ; AuditSourceIdentificationType auditSource = AuditDataTransformHelper . createAuditSourceIdentification ( communityId , communityId ) ; auditMsg . getAuditSourceIdentification ( ) . add ( auditSource ) ; result . setAuditMessage ( auditMsg ) ; result . setDirection ( _interface + "" "" + direction ) ; result . setRemoteHCID ( HomeCommunityMap . formatHomeCommunityId ( communityId ) ) ; result . setEventType ( NhincConstants . NHIN_ADMIN_DIST_SERVICE_NAME ) ; result . setEventID ( auditMsg . getEventIdentification ( ) . getEventID ( ) . getDisplayName ( ) ) ; result . setEventOutcomeIndicator ( auditMsg . getEventIdentification ( ) . getEventOutcomeIndicator ( ) ) ; result . setEventTimestamp ( auditMsg . getEventIdentification ( ) . getEventDateTime ( ) ) ; result . setAssertion ( assertion ) ; result . setRelatesTo ( getRelatesTo ( assertion ) ) ; result . setRequestMessageId ( assertion . getMessageId ( ) ) ; LOG . trace ( ""Exiting ADTransform-getLogEventRequestType() method."" ) ; return result ; } +" +862,"public void setRootLogLevel ( String level ) { ch . qos . logback . classic . Logger root = ( ch . qos . logback . classic . Logger ) LoggerFactory . getLogger ( Logger . ROOT_LOGGER_NAME ) ; if ( level == null || level . length ( ) == 0 ) { stopLogging ( ) ; return ; } else if ( ""debug"" . equalsIgnoreCase ( level ) ) { root . setLevel ( ch . qos . logback . classic . Level . DEBUG ) ; } else if ( ""info"" . equalsIgnoreCase ( level ) ) { root . setLevel ( ch . qos . logback . classic . Level . INFO ) ; } else if ( ""warn"" . equalsIgnoreCase ( level ) ) { root . setLevel ( ch . qos . logback . classic . Level . WARN ) ; } else if ( ""error"" . equalsIgnoreCase ( level ) ) { root . setLevel ( ch . qos . logback . classic . Level . ERROR ) ; } else { } if ( ! isLogging ) { root . addAppender ( this ) ; } getLogLevel ( ) ; broadcastState ( ) ; } +","public void setRootLogLevel ( String level ) { ch . qos . logback . classic . Logger root = ( ch . qos . logback . classic . Logger ) LoggerFactory . getLogger ( Logger . ROOT_LOGGER_NAME ) ; if ( level == null || level . length ( ) == 0 ) { stopLogging ( ) ; return ; } else if ( ""debug"" . equalsIgnoreCase ( level ) ) { root . setLevel ( ch . qos . logback . classic . Level . DEBUG ) ; } else if ( ""info"" . equalsIgnoreCase ( level ) ) { root . setLevel ( ch . qos . logback . classic . Level . INFO ) ; } else if ( ""warn"" . equalsIgnoreCase ( level ) ) { root . setLevel ( ch . qos . logback . classic . Level . WARN ) ; } else if ( ""error"" . equalsIgnoreCase ( level ) ) { root . setLevel ( ch . qos . logback . classic . Level . ERROR ) ; } else { log . error ( ""unknown logging level {}"" , level ) ; } if ( ! isLogging ) { root . addAppender ( this ) ; } getLogLevel ( ) ; broadcastState ( ) ; } +" +863,"private TextUnit getNextTextUnit ( ) throws NoSuchElementException { TextUnitDTOWithComments textUnitDTO = textUnitsIterator . next ( ) ; if ( logger . isDebugEnabled ( ) ) { } TextUnit textUnit = new TextUnit ( """" ) ; textUnit . setName ( textUnitDTO . getName ( ) ) ; textUnitUtils . replaceSourceString ( textUnit , textUnitDTO . getSource ( ) ) ; TextContainer targetTextContainer = new TextContainer ( textUnitDTO . getTarget ( ) ) ; textUnit . setTarget ( targetLocale , targetTextContainer ) ; ImportExportNote importExportNote = new ImportExportNote ( ) ; importExportNote . setSourceComment ( textUnitDTO . getComment ( ) ) ; importExportNote . setTargetComment ( textUnitDTO . getTargetComment ( ) ) ; importExportNote . setStatus ( textUnitDTO . getStatus ( ) ) ; importExportNote . setIncludedInLocalizedFile ( textUnitDTO . isIncludedInLocalizedFile ( ) ) ; importExportNote . setCreatedDate ( textUnitDTO . getCreatedDate ( ) ) ; importExportNote . setVariantComments ( textUnitDTO . getTmTextUnitVariantComments ( ) ) ; importExportNote . setPluralForm ( textUnitDTO . getPluralForm ( ) ) ; importExportNote . setPluralFormOther ( textUnitDTO . getPluralFormOther ( ) ) ; importExportTextUnitUtils . setImportExportNote ( textUnit , importExportNote ) ; textUnit . setPreserveWhitespaces ( true ) ; return textUnit ; } +","private TextUnit getNextTextUnit ( ) throws NoSuchElementException { TextUnitDTOWithComments textUnitDTO = textUnitsIterator . next ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( ""Get next text unit for tuId: {}, name: {}, locale: {}, source: {}"" , textUnitDTO . getTmTextUnitId ( ) , textUnitDTO . getName ( ) , textUnitDTO . getTargetLocale ( ) , textUnitDTO . getSource ( ) ) ; } TextUnit textUnit = new TextUnit ( """" ) ; textUnit . setName ( textUnitDTO . getName ( ) ) ; textUnitUtils . replaceSourceString ( textUnit , textUnitDTO . getSource ( ) ) ; TextContainer targetTextContainer = new TextContainer ( textUnitDTO . getTarget ( ) ) ; textUnit . setTarget ( targetLocale , targetTextContainer ) ; ImportExportNote importExportNote = new ImportExportNote ( ) ; importExportNote . setSourceComment ( textUnitDTO . getComment ( ) ) ; importExportNote . setTargetComment ( textUnitDTO . getTargetComment ( ) ) ; importExportNote . setStatus ( textUnitDTO . getStatus ( ) ) ; importExportNote . setIncludedInLocalizedFile ( textUnitDTO . isIncludedInLocalizedFile ( ) ) ; importExportNote . setCreatedDate ( textUnitDTO . getCreatedDate ( ) ) ; importExportNote . setVariantComments ( textUnitDTO . getTmTextUnitVariantComments ( ) ) ; importExportNote . setPluralForm ( textUnitDTO . getPluralForm ( ) ) ; importExportNote . setPluralFormOther ( textUnitDTO . getPluralFormOther ( ) ) ; importExportTextUnitUtils . setImportExportNote ( textUnit , importExportNote ) ; textUnit . setPreserveWhitespaces ( true ) ; return textUnit ; } +" +864,"private void processCacheGroup ( CacheGroupContext grp ) throws IgniteCheckedException { assert grp . offheap ( ) instanceof GridCacheOffheapManager ; PendingEntriesTree oldPendingTree ; final IgniteCacheDatabaseSharedManager db = grp . shared ( ) . database ( ) ; db . checkpointReadLock ( ) ; try { IndexStorage indexStorage = ( ( GridCacheOffheapManager ) grp . offheap ( ) ) . getIndexStorage ( ) ; RootPage pendingRootPage = indexStorage . allocateIndex ( PENDING_ENTRIES_TREE_NAME ) ; if ( pendingRootPage . isAllocated ( ) ) { indexStorage . dropIndex ( PENDING_ENTRIES_TREE_NAME ) ; return ; } oldPendingTree = new PendingEntriesTree ( grp , PENDING_ENTRIES_TREE_NAME , grp . dataRegion ( ) . pageMemory ( ) , pendingRootPage . pageId ( ) . pageId ( ) , ( ( GridCacheOffheapManager ) grp . offheap ( ) ) . reuseListForIndex ( null ) , false , null , PageIdAllocator . FLAG_IDX ) ; } finally { db . checkpointReadUnlock ( ) ; } processPendingTree ( grp , oldPendingTree ) ; if ( Thread . currentThread ( ) . isInterrupted ( ) ) return ; db . checkpointReadLock ( ) ; try { oldPendingTree . destroy ( ) ; } finally { db . checkpointReadUnlock ( ) ; } } +","private void processCacheGroup ( CacheGroupContext grp ) throws IgniteCheckedException { assert grp . offheap ( ) instanceof GridCacheOffheapManager ; PendingEntriesTree oldPendingTree ; final IgniteCacheDatabaseSharedManager db = grp . shared ( ) . database ( ) ; db . checkpointReadLock ( ) ; try { IndexStorage indexStorage = ( ( GridCacheOffheapManager ) grp . offheap ( ) ) . getIndexStorage ( ) ; RootPage pendingRootPage = indexStorage . allocateIndex ( PENDING_ENTRIES_TREE_NAME ) ; if ( pendingRootPage . isAllocated ( ) ) { log . info ( ""No pending tree found for cache group: [grpId="" + grp . groupId ( ) + "", grpName="" + grp . name ( ) + ']' ) ; indexStorage . dropIndex ( PENDING_ENTRIES_TREE_NAME ) ; return ; } oldPendingTree = new PendingEntriesTree ( grp , PENDING_ENTRIES_TREE_NAME , grp . dataRegion ( ) . pageMemory ( ) , pendingRootPage . pageId ( ) . pageId ( ) , ( ( GridCacheOffheapManager ) grp . offheap ( ) ) . reuseListForIndex ( null ) , false , null , PageIdAllocator . FLAG_IDX ) ; } finally { db . checkpointReadUnlock ( ) ; } processPendingTree ( grp , oldPendingTree ) ; if ( Thread . currentThread ( ) . isInterrupted ( ) ) return ; db . checkpointReadLock ( ) ; try { oldPendingTree . destroy ( ) ; } finally { db . checkpointReadUnlock ( ) ; } } +" +865,"private void respondWithFiles ( HttpServletResponse response , JSONArray filesJson ) throws IOException { JSONObject responseObject = new JSONObject ( ) ; try { responseObject . put ( ""files"" , filesJson ) ; } catch ( JSONException e ) { } String responseString = responseObject . toString ( ) ; log . info ( ""response string: {}"" , responseString ) ; response . setContentType ( ""application/json"" ) ; PrintWriter writer = response . getWriter ( ) ; writer . write ( responseString ) ; writer . close ( ) ; } +","private void respondWithFiles ( HttpServletResponse response , JSONArray filesJson ) throws IOException { JSONObject responseObject = new JSONObject ( ) ; try { responseObject . put ( ""files"" , filesJson ) ; } catch ( JSONException e ) { log . error ( ""error adding files list to JSON"" , e ) ; } String responseString = responseObject . toString ( ) ; log . info ( ""response string: {}"" , responseString ) ; response . setContentType ( ""application/json"" ) ; PrintWriter writer = response . getWriter ( ) ; writer . write ( responseString ) ; writer . close ( ) ; } +" +866,"private void respondWithFiles ( HttpServletResponse response , JSONArray filesJson ) throws IOException { JSONObject responseObject = new JSONObject ( ) ; try { responseObject . put ( ""files"" , filesJson ) ; } catch ( JSONException e ) { log . error ( ""error adding files list to JSON"" , e ) ; } String responseString = responseObject . toString ( ) ; response . setContentType ( ""application/json"" ) ; PrintWriter writer = response . getWriter ( ) ; writer . write ( responseString ) ; writer . close ( ) ; } +","private void respondWithFiles ( HttpServletResponse response , JSONArray filesJson ) throws IOException { JSONObject responseObject = new JSONObject ( ) ; try { responseObject . put ( ""files"" , filesJson ) ; } catch ( JSONException e ) { log . error ( ""error adding files list to JSON"" , e ) ; } String responseString = responseObject . toString ( ) ; log . info ( ""response string: {}"" , responseString ) ; response . setContentType ( ""application/json"" ) ; PrintWriter writer = response . getWriter ( ) ; writer . write ( responseString ) ; writer . close ( ) ; } +" +867,"public static void error ( final Logger logger , final String format , final Supplier < Object > supplier ) { if ( logger . isErrorEnabled ( ) ) { } } +","public static void error ( final Logger logger , final String format , final Supplier < Object > supplier ) { if ( logger . isErrorEnabled ( ) ) { logger . error ( format , supplier . get ( ) ) ; } } +" +868,"public Response addRelease ( Release release ) throws SW360Exception { final AddDocumentRequestSummary addDocumentRequestSummary = componentDatabaseHandler . addRelease ( release , user ) ; final String releaseId = addDocumentRequestSummary . getId ( ) ; if ( releaseId == null || releaseId . isEmpty ( ) ) { throw new SW360Exception ( ""Id of added release should not be empty. "" + addDocumentRequestSummary . toString ( ) ) ; } return new Response ( releaseId , AddDocumentRequestStatus . SUCCESS . equals ( addDocumentRequestSummary . getRequestStatus ( ) ) ) ; } +","public Response addRelease ( Release release ) throws SW360Exception { log . debug ( ""create Release { name='"" + release . getName ( ) + ""', version='"" + release . getVersion ( ) + ""' }"" ) ; final AddDocumentRequestSummary addDocumentRequestSummary = componentDatabaseHandler . addRelease ( release , user ) ; final String releaseId = addDocumentRequestSummary . getId ( ) ; if ( releaseId == null || releaseId . isEmpty ( ) ) { throw new SW360Exception ( ""Id of added release should not be empty. "" + addDocumentRequestSummary . toString ( ) ) ; } return new Response ( releaseId , AddDocumentRequestStatus . SUCCESS . equals ( addDocumentRequestSummary . getRequestStatus ( ) ) ) ; } +" +869,"public void getRateLimitStatus ( final String ... resources ) { } +","public void getRateLimitStatus ( final String ... resources ) { getDispatcher ( ) . invokeLater ( new AsyncTask ( RATE_LIMIT_STATUS , listeners ) { @ Override public void invoke ( List < TwitterListener > listeners ) throws TwitterException { Map < String , RateLimitStatus > rateLimitStatus = twitter . getRateLimitStatus ( resources ) ; for ( TwitterListener listener : listeners ) { try { listener . gotRateLimitStatus ( rateLimitStatus ) ; } catch ( Exception e ) { logger . warn ( ""Exception at getRateLimitStatus"" , e ) ; } } } } ) ; } +" +870,"public void getRateLimitStatus ( final String ... resources ) { getDispatcher ( ) . invokeLater ( new AsyncTask ( RATE_LIMIT_STATUS , listeners ) { @ Override public void invoke ( List < TwitterListener > listeners ) throws TwitterException { Map < String , RateLimitStatus > rateLimitStatus = twitter . getRateLimitStatus ( resources ) ; for ( TwitterListener listener : listeners ) { try { listener . gotRateLimitStatus ( rateLimitStatus ) ; } catch ( Exception e ) { } } } } ) ; } +","public void getRateLimitStatus ( final String ... resources ) { getDispatcher ( ) . invokeLater ( new AsyncTask ( RATE_LIMIT_STATUS , listeners ) { @ Override public void invoke ( List < TwitterListener > listeners ) throws TwitterException { Map < String , RateLimitStatus > rateLimitStatus = twitter . getRateLimitStatus ( resources ) ; for ( TwitterListener listener : listeners ) { try { listener . gotRateLimitStatus ( rateLimitStatus ) ; } catch ( Exception e ) { logger . warn ( ""Exception at getRateLimitStatus"" , e ) ; } } } } ) ; } +" +871,"private Collection evaluateGetInternal ( EventBean event ) { Object value = getter . get ( event ) ; if ( value == null ) { return null ; } if ( ! ( value . getClass ( ) . isArray ( ) ) ) { return null ; } if ( componentType . getType ( ) . isPrimitive ( ) ) { return new ArrayWrappingCollection ( value ) ; } return Arrays . asList ( ( Object [ ] ) value ) ; } +","private Collection evaluateGetInternal ( EventBean event ) { Object value = getter . get ( event ) ; if ( value == null ) { return null ; } if ( ! ( value . getClass ( ) . isArray ( ) ) ) { log . warn ( ""Expected array-type input from property '"" + propertyName + ""' but received "" + value . getClass ( ) ) ; return null ; } if ( componentType . getType ( ) . isPrimitive ( ) ) { return new ArrayWrappingCollection ( value ) ; } return Arrays . asList ( ( Object [ ] ) value ) ; } +" +872,"@ Test public void testDisambiguation ( ) throws ParseException , IOException { AGDISTIS post = new AGDISTIS ( ) ; String subjectString = ""Tom Cruise"" ; String objectString = ""Katie Holmes"" ; String preAnnotatedText = """" + subjectString + """" + objectString + """" ; HashMap < String , String > realResults = new LinkedHashMap < String , String > ( ) ; realResults . put ( ""Katie Holmes"" , ""http://dbpedia.org/resource/Katie_Holmes"" ) ; realResults . put ( ""Tom Cruise"" , ""http://dbpedia.org/resource/Tom_Cruise"" ) ; HashMap < String , String > results = post . runDisambiguation ( preAnnotatedText ) ; for ( String namedEntity : results . keySet ( ) ) { Assert . assertTrue ( results . get ( namedEntity ) . equals ( realResults . get ( namedEntity ) ) ) ; log . debug ( ""named entity: "" + namedEntity + "" -> "" + results . get ( namedEntity ) ) ; } } +","@ Test public void testDisambiguation ( ) throws ParseException , IOException { AGDISTIS post = new AGDISTIS ( ) ; String subjectString = ""Tom Cruise"" ; String objectString = ""Katie Holmes"" ; String preAnnotatedText = """" + subjectString + """" + objectString + """" ; log . debug ( ""Disambiguation for: "" + preAnnotatedText ) ; HashMap < String , String > realResults = new LinkedHashMap < String , String > ( ) ; realResults . put ( ""Katie Holmes"" , ""http://dbpedia.org/resource/Katie_Holmes"" ) ; realResults . put ( ""Tom Cruise"" , ""http://dbpedia.org/resource/Tom_Cruise"" ) ; HashMap < String , String > results = post . runDisambiguation ( preAnnotatedText ) ; for ( String namedEntity : results . keySet ( ) ) { Assert . assertTrue ( results . get ( namedEntity ) . equals ( realResults . get ( namedEntity ) ) ) ; log . debug ( ""named entity: "" + namedEntity + "" -> "" + results . get ( namedEntity ) ) ; } } +" +873,"@ Test public void testDisambiguation ( ) throws ParseException , IOException { AGDISTIS post = new AGDISTIS ( ) ; String subjectString = ""Tom Cruise"" ; String objectString = ""Katie Holmes"" ; String preAnnotatedText = """" + subjectString + """" + objectString + """" ; log . debug ( ""Disambiguation for: "" + preAnnotatedText ) ; HashMap < String , String > realResults = new LinkedHashMap < String , String > ( ) ; realResults . put ( ""Katie Holmes"" , ""http://dbpedia.org/resource/Katie_Holmes"" ) ; realResults . put ( ""Tom Cruise"" , ""http://dbpedia.org/resource/Tom_Cruise"" ) ; HashMap < String , String > results = post . runDisambiguation ( preAnnotatedText ) ; for ( String namedEntity : results . keySet ( ) ) { Assert . assertTrue ( results . get ( namedEntity ) . equals ( realResults . get ( namedEntity ) ) ) ; } } +","@ Test public void testDisambiguation ( ) throws ParseException , IOException { AGDISTIS post = new AGDISTIS ( ) ; String subjectString = ""Tom Cruise"" ; String objectString = ""Katie Holmes"" ; String preAnnotatedText = """" + subjectString + """" + objectString + """" ; log . debug ( ""Disambiguation for: "" + preAnnotatedText ) ; HashMap < String , String > realResults = new LinkedHashMap < String , String > ( ) ; realResults . put ( ""Katie Holmes"" , ""http://dbpedia.org/resource/Katie_Holmes"" ) ; realResults . put ( ""Tom Cruise"" , ""http://dbpedia.org/resource/Tom_Cruise"" ) ; HashMap < String , String > results = post . runDisambiguation ( preAnnotatedText ) ; for ( String namedEntity : results . keySet ( ) ) { Assert . assertTrue ( results . get ( namedEntity ) . equals ( realResults . get ( namedEntity ) ) ) ; log . debug ( ""named entity: "" + namedEntity + "" -> "" + results . get ( namedEntity ) ) ; } } +" +874,"public Future < ? > submitCarbondataAutoCleanupTask ( ScheduledExecutorService executorService ) { Future < ? > result = null ; if ( null != executorService ) { result = executorService . submit ( new CarbondataAutoCleanerTask ( ) ) ; } return result ; } +","public Future < ? > submitCarbondataAutoCleanupTask ( ScheduledExecutorService executorService ) { Future < ? > result = null ; if ( null != executorService ) { result = executorService . submit ( new CarbondataAutoCleanerTask ( ) ) ; log . debug ( ""Submitting task to Vacuum Cleaner thread pool"" ) ; } return result ; } +" +875,"public CompletionStage < ? > handleSubscribe ( final String applicationId , final Topic topic , final MessageHandler handler , final ErrorHandler < ? extends Throwable > errorHandler ) { return subscribe ( applicationId , topic , ( messageTopic , payload ) -> { try { handleMessage ( handler , payload ) ; } catch ( final Exception e ) { try { errorHandler . handleError ( e , null ) ; } catch ( final Exception e1 ) { throw e1 ; } catch ( final Throwable e1 ) { throw new Exception ( e1 ) ; } } } ) ; } +","public CompletionStage < ? > handleSubscribe ( final String applicationId , final Topic topic , final MessageHandler handler , final ErrorHandler < ? extends Throwable > errorHandler ) { return subscribe ( applicationId , topic , ( messageTopic , payload ) -> { logger . debug ( ""Received message for: {}"" , topic ) ; try { handleMessage ( handler , payload ) ; } catch ( final Exception e ) { try { errorHandler . handleError ( e , null ) ; } catch ( final Exception e1 ) { throw e1 ; } catch ( final Throwable e1 ) { throw new Exception ( e1 ) ; } } } ) ; } +" +876,"public CompletableFuture < Void > stop ( ) { abortPendingSnapshots ( ) ; if ( snapshotListener != null ) { raft . getPersistedSnapshotStore ( ) . removeSnapshotListener ( snapshotListener ) ; } try { raft . getPersistedSnapshotStore ( ) . purgePendingSnapshots ( ) . join ( ) ; } catch ( final Exception e ) { } return super . stop ( ) ; } +","public CompletableFuture < Void > stop ( ) { abortPendingSnapshots ( ) ; if ( snapshotListener != null ) { raft . getPersistedSnapshotStore ( ) . removeSnapshotListener ( snapshotListener ) ; } try { raft . getPersistedSnapshotStore ( ) . purgePendingSnapshots ( ) . join ( ) ; } catch ( final Exception e ) { log . warn ( ""Failed to purge pending snapshots, which may result in unnecessary disk usage and should be monitored"" , e ) ; } return super . stop ( ) ; } +" +877,"private void markOutputPathForDeletion ( final int projectId , final VirtualFile outputPath , final String classname , final String srcUrl ) { final SourceUrlClassNamePair pair = new SourceUrlClassNamePair ( srcUrl , classname ) ; synchronized ( myDataLock ) { final Outputs outputs = myOutputsToDelete . get ( projectId ) ; try { outputs . put ( outputPath . getPath ( ) , pair ) ; if ( LOG . isDebugEnabled ( ) || DEBUG_MODE ) { final String message = ""ADD path to delete: "" + outputPath + ""; source: "" + srcUrl ; if ( DEBUG_MODE ) { System . out . println ( message ) ; } } } finally { outputs . release ( ) ; } } } +","private void markOutputPathForDeletion ( final int projectId , final VirtualFile outputPath , final String classname , final String srcUrl ) { final SourceUrlClassNamePair pair = new SourceUrlClassNamePair ( srcUrl , classname ) ; synchronized ( myDataLock ) { final Outputs outputs = myOutputsToDelete . get ( projectId ) ; try { outputs . put ( outputPath . getPath ( ) , pair ) ; if ( LOG . isDebugEnabled ( ) || DEBUG_MODE ) { final String message = ""ADD path to delete: "" + outputPath + ""; source: "" + srcUrl ; LOG . debug ( message ) ; if ( DEBUG_MODE ) { System . out . println ( message ) ; } } } finally { outputs . release ( ) ; } } } +" +878,"public static synchronized void updateAccumuloVersion ( VolumeManager fs , int oldVersion ) { for ( Volume volume : fs . getVolumes ( ) ) { try { if ( getAccumuloPersistentVersion ( volume ) == oldVersion ) { Path dataVersionLocation = ServerConstants . getDataVersionLocation ( volume ) ; fs . create ( new Path ( dataVersionLocation , Integer . toString ( ServerConstants . DATA_VERSION ) ) ) . close ( ) ; Path prevDataVersionLoc = new Path ( dataVersionLocation , Integer . toString ( oldVersion ) ) ; if ( ! fs . delete ( prevDataVersionLoc ) ) { throw new RuntimeException ( ""Could not delete previous data version location ("" + prevDataVersionLoc + "") for "" + volume ) ; } } } catch ( IOException e ) { throw new RuntimeException ( ""Unable to set accumulo version: an error occurred."" , e ) ; } } } +","public static synchronized void updateAccumuloVersion ( VolumeManager fs , int oldVersion ) { for ( Volume volume : fs . getVolumes ( ) ) { try { if ( getAccumuloPersistentVersion ( volume ) == oldVersion ) { log . debug ( ""Attempting to upgrade {}"" , volume ) ; Path dataVersionLocation = ServerConstants . getDataVersionLocation ( volume ) ; fs . create ( new Path ( dataVersionLocation , Integer . toString ( ServerConstants . DATA_VERSION ) ) ) . close ( ) ; Path prevDataVersionLoc = new Path ( dataVersionLocation , Integer . toString ( oldVersion ) ) ; if ( ! fs . delete ( prevDataVersionLoc ) ) { throw new RuntimeException ( ""Could not delete previous data version location ("" + prevDataVersionLoc + "") for "" + volume ) ; } } } catch ( IOException e ) { throw new RuntimeException ( ""Unable to set accumulo version: an error occurred."" , e ) ; } } } +" +879,"@ Test public void testTxLoadFromStore ( ) throws Exception { Ignite ignite0 = ignite ( 0 ) ; final IgniteTransactions txs = ignite0 . transactions ( ) ; for ( CacheConfiguration < Integer , Integer > ccfg : cacheConfigurations ( ) ) { if ( ccfg . getCacheStoreFactory ( ) == null ) continue ; logCacheInfo ( ccfg ) ; try { IgniteCache < Integer , Integer > cache = ignite0 . createCache ( ccfg ) ; List < Integer > keys = testKeys ( cache ) ; for ( Integer key : keys ) { Integer storeVal = - 1 ; storeMap . put ( key , storeVal ) ; try ( Transaction tx = txs . txStart ( OPTIMISTIC , SERIALIZABLE ) ) { Integer val = cache . get ( key ) ; assertEquals ( storeVal , val ) ; tx . commit ( ) ; } checkValue ( key , storeVal , cache . getName ( ) ) ; cache . remove ( key ) ; try ( Transaction tx = txs . txStart ( OPTIMISTIC , SERIALIZABLE ) ) { Integer val = cache . get ( key ) ; assertNull ( val ) ; tx . commit ( ) ; } checkValue ( key , null , cache . getName ( ) ) ; } } finally { destroyCache ( ccfg . getName ( ) ) ; } } } +","@ Test public void testTxLoadFromStore ( ) throws Exception { Ignite ignite0 = ignite ( 0 ) ; final IgniteTransactions txs = ignite0 . transactions ( ) ; for ( CacheConfiguration < Integer , Integer > ccfg : cacheConfigurations ( ) ) { if ( ccfg . getCacheStoreFactory ( ) == null ) continue ; logCacheInfo ( ccfg ) ; try { IgniteCache < Integer , Integer > cache = ignite0 . createCache ( ccfg ) ; List < Integer > keys = testKeys ( cache ) ; for ( Integer key : keys ) { log . info ( ""Test key: "" + key ) ; Integer storeVal = - 1 ; storeMap . put ( key , storeVal ) ; try ( Transaction tx = txs . txStart ( OPTIMISTIC , SERIALIZABLE ) ) { Integer val = cache . get ( key ) ; assertEquals ( storeVal , val ) ; tx . commit ( ) ; } checkValue ( key , storeVal , cache . getName ( ) ) ; cache . remove ( key ) ; try ( Transaction tx = txs . txStart ( OPTIMISTIC , SERIALIZABLE ) ) { Integer val = cache . get ( key ) ; assertNull ( val ) ; tx . commit ( ) ; } checkValue ( key , null , cache . getName ( ) ) ; } } finally { destroyCache ( ccfg . getName ( ) ) ; } } } +" +880,"public synchronized FailoverStrategy getStrategy ( ) { if ( failoverStrategy == null ) { failoverStrategy = new SequentialStrategy ( ) ; } return failoverStrategy ; } +","public synchronized FailoverStrategy getStrategy ( ) { if ( failoverStrategy == null ) { failoverStrategy = new SequentialStrategy ( ) ; getLogger ( ) . log ( Level . INFO , ""USING_STRATEGY"" , new Object [ ] { failoverStrategy } ) ; } return failoverStrategy ; } +" +881,"public void pushChange ( String indexName ) { } +","public void pushChange ( String indexName ) { lock . executeInWriteLock ( ( ) -> { pushCounter . increment ( ) ; LOGGER . trace ( ""index added for refresh: %s"" , indexName ) ; indexToMaxRefreshTime . put ( indexName , getTime ( ) ) ; } ) ; } +" +882,"public void pushChange ( String indexName ) { lock . executeInWriteLock ( ( ) -> { pushCounter . increment ( ) ; indexToMaxRefreshTime . put ( indexName , getTime ( ) ) ; } ) ; } +","public void pushChange ( String indexName ) { lock . executeInWriteLock ( ( ) -> { pushCounter . increment ( ) ; LOGGER . trace ( ""index added for refresh: %s"" , indexName ) ; indexToMaxRefreshTime . put ( indexName , getTime ( ) ) ; } ) ; } +" +883,"void completeAndAddScreenshotToRun ( Screenshot screenshot , ScreenshotRun screenshotRun ) { screenshot . setScreenshotRun ( screenshotRun ) ; for ( ScreenshotTextUnit screenshotTextUnit : screenshot . getScreenshotTextUnits ( ) ) { completeScreenshotTextUnit ( screenshotTextUnit , screenshot ) ; } Screenshot existingScreenshot = screenshotRepository . findByScreenshotRunAndNameAndLocale ( screenshotRun , screenshot . getName ( ) , screenshot . getLocale ( ) ) ; if ( existingScreenshot != null ) { screenshotRepository . delete ( existingScreenshot ) ; screenshotRepository . flush ( ) ; } logger . debug ( ""Create new screenshot"" ) ; screenshotRepository . save ( screenshot ) ; } +","void completeAndAddScreenshotToRun ( Screenshot screenshot , ScreenshotRun screenshotRun ) { screenshot . setScreenshotRun ( screenshotRun ) ; for ( ScreenshotTextUnit screenshotTextUnit : screenshot . getScreenshotTextUnits ( ) ) { completeScreenshotTextUnit ( screenshotTextUnit , screenshot ) ; } Screenshot existingScreenshot = screenshotRepository . findByScreenshotRunAndNameAndLocale ( screenshotRun , screenshot . getName ( ) , screenshot . getLocale ( ) ) ; if ( existingScreenshot != null ) { logger . debug ( ""Screenshot exists for locale: {} and name: {}, delete it "" , existingScreenshot . getLocale ( ) == null ? null : existingScreenshot . getLocale ( ) . getBcp47Tag ( ) , existingScreenshot . getName ( ) ) ; screenshotRepository . delete ( existingScreenshot ) ; screenshotRepository . flush ( ) ; } logger . debug ( ""Create new screenshot"" ) ; screenshotRepository . save ( screenshot ) ; } +" +884,"void completeAndAddScreenshotToRun ( Screenshot screenshot , ScreenshotRun screenshotRun ) { screenshot . setScreenshotRun ( screenshotRun ) ; for ( ScreenshotTextUnit screenshotTextUnit : screenshot . getScreenshotTextUnits ( ) ) { completeScreenshotTextUnit ( screenshotTextUnit , screenshot ) ; } Screenshot existingScreenshot = screenshotRepository . findByScreenshotRunAndNameAndLocale ( screenshotRun , screenshot . getName ( ) , screenshot . getLocale ( ) ) ; if ( existingScreenshot != null ) { logger . debug ( ""Screenshot exists for locale: {} and name: {}, delete it "" , existingScreenshot . getLocale ( ) == null ? null : existingScreenshot . getLocale ( ) . getBcp47Tag ( ) , existingScreenshot . getName ( ) ) ; screenshotRepository . delete ( existingScreenshot ) ; screenshotRepository . flush ( ) ; } screenshotRepository . save ( screenshot ) ; } +","void completeAndAddScreenshotToRun ( Screenshot screenshot , ScreenshotRun screenshotRun ) { screenshot . setScreenshotRun ( screenshotRun ) ; for ( ScreenshotTextUnit screenshotTextUnit : screenshot . getScreenshotTextUnits ( ) ) { completeScreenshotTextUnit ( screenshotTextUnit , screenshot ) ; } Screenshot existingScreenshot = screenshotRepository . findByScreenshotRunAndNameAndLocale ( screenshotRun , screenshot . getName ( ) , screenshot . getLocale ( ) ) ; if ( existingScreenshot != null ) { logger . debug ( ""Screenshot exists for locale: {} and name: {}, delete it "" , existingScreenshot . getLocale ( ) == null ? null : existingScreenshot . getLocale ( ) . getBcp47Tag ( ) , existingScreenshot . getName ( ) ) ; screenshotRepository . delete ( existingScreenshot ) ; screenshotRepository . flush ( ) ; } logger . debug ( ""Create new screenshot"" ) ; screenshotRepository . save ( screenshot ) ; } +" +885,"public void implementsInterfaces ( TypeRef [ ] interfaces ) { if ( m_baseClass ) { List < String > result = new ArrayList < String > ( ) ; for ( int i = 0 ; i < interfaces . length ; i ++ ) { if ( ! interfaces [ i ] . getBinary ( ) . equals ( ""scala/ScalaObject"" ) ) { result . add ( interfaces [ i ] . getFQN ( ) ) ; } } m_interfaces = result . toArray ( new String [ result . size ( ) ] ) ; } } +","public void implementsInterfaces ( TypeRef [ ] interfaces ) { if ( m_baseClass ) { List < String > result = new ArrayList < String > ( ) ; for ( int i = 0 ; i < interfaces . length ; i ++ ) { if ( ! interfaces [ i ] . getBinary ( ) . equals ( ""scala/ScalaObject"" ) ) { result . add ( interfaces [ i ] . getFQN ( ) ) ; } } m_interfaces = result . toArray ( new String [ result . size ( ) ] ) ; m_logger . debug ( ""implements: %s"" , Arrays . toString ( m_interfaces ) ) ; } } +" +886,"public void processOpts ( ) { super . processOpts ( ) ; String specDir = this . additionalProperties . get ( SPEC_DIR ) + """" ; if ( ! Files . isDirectory ( Paths . get ( specDir ) ) ) { } this . includeSpecMarkupLambda = new IncludeMarkupLambda ( SPEC_DIR , specDir ) ; additionalProperties . put ( ""specinclude"" , this . includeSpecMarkupLambda ) ; String snippetDir = this . additionalProperties . get ( SNIPPET_DIR ) + """" ; if ( ! Files . isDirectory ( Paths . get ( snippetDir ) ) ) { LOGGER . warn ( ""base part for include markup lambda not found: "" + snippetDir + "" as "" + Paths . get ( snippetDir ) . toAbsolutePath ( ) ) ; } this . includeSnippetMarkupLambda = new IncludeMarkupLambda ( SNIPPET_DIR , snippetDir ) ; additionalProperties . put ( ""snippetinclude"" , this . includeSnippetMarkupLambda ) ; this . linkSnippetMarkupLambda = new LinkMarkupLambda ( snippetDir ) ; additionalProperties . put ( ""snippetlink"" , this . linkSnippetMarkupLambda ) ; processBooleanFlag ( HEADER_ATTRIBUTES_FLAG , headerAttributes ) ; processBooleanFlag ( USE_INTRODUCTION_FLAG , useIntroduction ) ; processBooleanFlag ( SKIP_EXAMPLES_FLAG , skipExamples ) ; processBooleanFlag ( USE_METHOD_AND_PATH_FLAG , useMethodAndPath ) ; processBooleanFlag ( USE_TABLE_TITLES_FLAG , useTableTitles ) ; } +","public void processOpts ( ) { super . processOpts ( ) ; String specDir = this . additionalProperties . get ( SPEC_DIR ) + """" ; if ( ! Files . isDirectory ( Paths . get ( specDir ) ) ) { LOGGER . warn ( ""base part for include markup lambda not found: "" + specDir + "" as "" + Paths . get ( specDir ) . toAbsolutePath ( ) ) ; } this . includeSpecMarkupLambda = new IncludeMarkupLambda ( SPEC_DIR , specDir ) ; additionalProperties . put ( ""specinclude"" , this . includeSpecMarkupLambda ) ; String snippetDir = this . additionalProperties . get ( SNIPPET_DIR ) + """" ; if ( ! Files . isDirectory ( Paths . get ( snippetDir ) ) ) { LOGGER . warn ( ""base part for include markup lambda not found: "" + snippetDir + "" as "" + Paths . get ( snippetDir ) . toAbsolutePath ( ) ) ; } this . includeSnippetMarkupLambda = new IncludeMarkupLambda ( SNIPPET_DIR , snippetDir ) ; additionalProperties . put ( ""snippetinclude"" , this . includeSnippetMarkupLambda ) ; this . linkSnippetMarkupLambda = new LinkMarkupLambda ( snippetDir ) ; additionalProperties . put ( ""snippetlink"" , this . linkSnippetMarkupLambda ) ; processBooleanFlag ( HEADER_ATTRIBUTES_FLAG , headerAttributes ) ; processBooleanFlag ( USE_INTRODUCTION_FLAG , useIntroduction ) ; processBooleanFlag ( SKIP_EXAMPLES_FLAG , skipExamples ) ; processBooleanFlag ( USE_METHOD_AND_PATH_FLAG , useMethodAndPath ) ; processBooleanFlag ( USE_TABLE_TITLES_FLAG , useTableTitles ) ; } +" +887,"public void processOpts ( ) { super . processOpts ( ) ; String specDir = this . additionalProperties . get ( SPEC_DIR ) + """" ; if ( ! Files . isDirectory ( Paths . get ( specDir ) ) ) { LOGGER . warn ( ""base part for include markup lambda not found: "" + specDir + "" as "" + Paths . get ( specDir ) . toAbsolutePath ( ) ) ; } this . includeSpecMarkupLambda = new IncludeMarkupLambda ( SPEC_DIR , specDir ) ; additionalProperties . put ( ""specinclude"" , this . includeSpecMarkupLambda ) ; String snippetDir = this . additionalProperties . get ( SNIPPET_DIR ) + """" ; if ( ! Files . isDirectory ( Paths . get ( snippetDir ) ) ) { } this . includeSnippetMarkupLambda = new IncludeMarkupLambda ( SNIPPET_DIR , snippetDir ) ; additionalProperties . put ( ""snippetinclude"" , this . includeSnippetMarkupLambda ) ; this . linkSnippetMarkupLambda = new LinkMarkupLambda ( snippetDir ) ; additionalProperties . put ( ""snippetlink"" , this . linkSnippetMarkupLambda ) ; processBooleanFlag ( HEADER_ATTRIBUTES_FLAG , headerAttributes ) ; processBooleanFlag ( USE_INTRODUCTION_FLAG , useIntroduction ) ; processBooleanFlag ( SKIP_EXAMPLES_FLAG , skipExamples ) ; processBooleanFlag ( USE_METHOD_AND_PATH_FLAG , useMethodAndPath ) ; processBooleanFlag ( USE_TABLE_TITLES_FLAG , useTableTitles ) ; } +","public void processOpts ( ) { super . processOpts ( ) ; String specDir = this . additionalProperties . get ( SPEC_DIR ) + """" ; if ( ! Files . isDirectory ( Paths . get ( specDir ) ) ) { LOGGER . warn ( ""base part for include markup lambda not found: "" + specDir + "" as "" + Paths . get ( specDir ) . toAbsolutePath ( ) ) ; } this . includeSpecMarkupLambda = new IncludeMarkupLambda ( SPEC_DIR , specDir ) ; additionalProperties . put ( ""specinclude"" , this . includeSpecMarkupLambda ) ; String snippetDir = this . additionalProperties . get ( SNIPPET_DIR ) + """" ; if ( ! Files . isDirectory ( Paths . get ( snippetDir ) ) ) { LOGGER . warn ( ""base part for include markup lambda not found: "" + snippetDir + "" as "" + Paths . get ( snippetDir ) . toAbsolutePath ( ) ) ; } this . includeSnippetMarkupLambda = new IncludeMarkupLambda ( SNIPPET_DIR , snippetDir ) ; additionalProperties . put ( ""snippetinclude"" , this . includeSnippetMarkupLambda ) ; this . linkSnippetMarkupLambda = new LinkMarkupLambda ( snippetDir ) ; additionalProperties . put ( ""snippetlink"" , this . linkSnippetMarkupLambda ) ; processBooleanFlag ( HEADER_ATTRIBUTES_FLAG , headerAttributes ) ; processBooleanFlag ( USE_INTRODUCTION_FLAG , useIntroduction ) ; processBooleanFlag ( SKIP_EXAMPLES_FLAG , skipExamples ) ; processBooleanFlag ( USE_METHOD_AND_PATH_FLAG , useMethodAndPath ) ; processBooleanFlag ( USE_TABLE_TITLES_FLAG , useTableTitles ) ; } +" +888,"public static void warn ( final Object message , final Throwable t ) { Logger logger = Logger . getLogger ( getCallerClassName ( ) ) ; if ( verbose ) setVerboseLogger ( logger ) ; } +","public static void warn ( final Object message , final Throwable t ) { Logger logger = Logger . getLogger ( getCallerClassName ( ) ) ; if ( verbose ) setVerboseLogger ( logger ) ; logger . warn ( message , t ) ; } +" +889,"public void saveToFile ( ) { synchronized ( voyages ) { VoyageStore voyageStore = new VoyageStore ( this ) ; try { FileOutputStream fileOut = new FileOutputStream ( VOYAGESFILE ) ; ObjectOutputStream objectOut = new ObjectOutputStream ( fileOut ) ; objectOut . writeObject ( voyageStore ) ; objectOut . close ( ) ; fileOut . close ( ) ; } catch ( IOException e ) { } } } +","public void saveToFile ( ) { synchronized ( voyages ) { VoyageStore voyageStore = new VoyageStore ( this ) ; try { FileOutputStream fileOut = new FileOutputStream ( VOYAGESFILE ) ; ObjectOutputStream objectOut = new ObjectOutputStream ( fileOut ) ; objectOut . writeObject ( voyageStore ) ; objectOut . close ( ) ; fileOut . close ( ) ; } catch ( IOException e ) { LOG . error ( ""Failed to save routes file: "" + e . getMessage ( ) ) ; } } } +" +890,"@ Test public void testApplication ( ) throws IOException , Exception { for ( final Locality l : Locality . values ( ) ) { LocalMode . runApp ( new Benchmark . AbstractApplication ( ) { @ Override public Locality getLocality ( ) { return l ; } } , 60000 ) ; } } +","@ Test public void testApplication ( ) throws IOException , Exception { for ( final Locality l : Locality . values ( ) ) { logger . debug ( ""Running the with {} locality"" , l ) ; LocalMode . runApp ( new Benchmark . AbstractApplication ( ) { @ Override public Locality getLocality ( ) { return l ; } } , 60000 ) ; } } +" +891,"public void initialize ( ServerConfiguration conf , LedgerManager ledgerManager , LedgerDirsManager ledgerDirsManager , LedgerDirsManager indexDirsManager , StateManager stateManager , CheckpointSource checkpointSource , Checkpointer checkpointer , StatsLogger statsLogger , ByteBufAllocator allocator ) throws IOException { long writeCacheMaxSize = getLongVariableOrDefault ( conf , WRITE_CACHE_MAX_SIZE_MB , DEFAULT_WRITE_CACHE_MAX_SIZE_MB ) * MB ; long readCacheMaxSize = getLongVariableOrDefault ( conf , READ_AHEAD_CACHE_MAX_SIZE_MB , DEFAULT_READ_CACHE_MAX_SIZE_MB ) * MB ; this . allocator = allocator ; this . numberOfDirs = ledgerDirsManager . getAllLedgerDirs ( ) . size ( ) ; log . info ( "" - Number of directories: {}"" , numberOfDirs ) ; log . info ( "" - Write cache size: {} MB"" , writeCacheMaxSize / MB ) ; log . info ( "" - Read Cache: {} MB"" , readCacheMaxSize / MB ) ; if ( readCacheMaxSize + writeCacheMaxSize > PlatformDependent . maxDirectMemory ( ) ) { throw new IOException ( ""Read and write cache sizes exceed the configured max direct memory size"" ) ; } long perDirectoryWriteCacheSize = writeCacheMaxSize / numberOfDirs ; long perDirectoryReadCacheSize = readCacheMaxSize / numberOfDirs ; gcExecutor = Executors . newSingleThreadScheduledExecutor ( new DefaultThreadFactory ( ""GarbageCollector"" ) ) ; ledgerStorageList = Lists . newArrayList ( ) ; for ( File ledgerDir : ledgerDirsManager . getAllLedgerDirs ( ) ) { File [ ] dirs = new File [ 1 ] ; dirs [ 0 ] = ledgerDir . getParentFile ( ) ; LedgerDirsManager ldm = new LedgerDirsManager ( conf , dirs , ledgerDirsManager . getDiskChecker ( ) , statsLogger ) ; ledgerStorageList . add ( newSingleDirectoryDbLedgerStorage ( conf , ledgerManager , ldm , indexDirsManager , stateManager , checkpointSource , checkpointer , statsLogger , gcExecutor , perDirectoryWriteCacheSize , perDirectoryReadCacheSize ) ) ; ldm . getListeners ( ) . forEach ( ledgerDirsManager :: addLedgerDirsListener ) ; } this . stats = new DbLedgerStorageStats ( statsLogger , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheCount ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheCount ) . sum ( ) ) ; } +","public void initialize ( ServerConfiguration conf , LedgerManager ledgerManager , LedgerDirsManager ledgerDirsManager , LedgerDirsManager indexDirsManager , StateManager stateManager , CheckpointSource checkpointSource , Checkpointer checkpointer , StatsLogger statsLogger , ByteBufAllocator allocator ) throws IOException { long writeCacheMaxSize = getLongVariableOrDefault ( conf , WRITE_CACHE_MAX_SIZE_MB , DEFAULT_WRITE_CACHE_MAX_SIZE_MB ) * MB ; long readCacheMaxSize = getLongVariableOrDefault ( conf , READ_AHEAD_CACHE_MAX_SIZE_MB , DEFAULT_READ_CACHE_MAX_SIZE_MB ) * MB ; this . allocator = allocator ; this . numberOfDirs = ledgerDirsManager . getAllLedgerDirs ( ) . size ( ) ; log . info ( ""Started Db Ledger Storage"" ) ; log . info ( "" - Number of directories: {}"" , numberOfDirs ) ; log . info ( "" - Write cache size: {} MB"" , writeCacheMaxSize / MB ) ; log . info ( "" - Read Cache: {} MB"" , readCacheMaxSize / MB ) ; if ( readCacheMaxSize + writeCacheMaxSize > PlatformDependent . maxDirectMemory ( ) ) { throw new IOException ( ""Read and write cache sizes exceed the configured max direct memory size"" ) ; } long perDirectoryWriteCacheSize = writeCacheMaxSize / numberOfDirs ; long perDirectoryReadCacheSize = readCacheMaxSize / numberOfDirs ; gcExecutor = Executors . newSingleThreadScheduledExecutor ( new DefaultThreadFactory ( ""GarbageCollector"" ) ) ; ledgerStorageList = Lists . newArrayList ( ) ; for ( File ledgerDir : ledgerDirsManager . getAllLedgerDirs ( ) ) { File [ ] dirs = new File [ 1 ] ; dirs [ 0 ] = ledgerDir . getParentFile ( ) ; LedgerDirsManager ldm = new LedgerDirsManager ( conf , dirs , ledgerDirsManager . getDiskChecker ( ) , statsLogger ) ; ledgerStorageList . add ( newSingleDirectoryDbLedgerStorage ( conf , ledgerManager , ldm , indexDirsManager , stateManager , checkpointSource , checkpointer , statsLogger , gcExecutor , perDirectoryWriteCacheSize , perDirectoryReadCacheSize ) ) ; ldm . getListeners ( ) . forEach ( ledgerDirsManager :: addLedgerDirsListener ) ; } this . stats = new DbLedgerStorageStats ( statsLogger , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheCount ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheCount ) . sum ( ) ) ; } +" +892,"public void initialize ( ServerConfiguration conf , LedgerManager ledgerManager , LedgerDirsManager ledgerDirsManager , LedgerDirsManager indexDirsManager , StateManager stateManager , CheckpointSource checkpointSource , Checkpointer checkpointer , StatsLogger statsLogger , ByteBufAllocator allocator ) throws IOException { long writeCacheMaxSize = getLongVariableOrDefault ( conf , WRITE_CACHE_MAX_SIZE_MB , DEFAULT_WRITE_CACHE_MAX_SIZE_MB ) * MB ; long readCacheMaxSize = getLongVariableOrDefault ( conf , READ_AHEAD_CACHE_MAX_SIZE_MB , DEFAULT_READ_CACHE_MAX_SIZE_MB ) * MB ; this . allocator = allocator ; this . numberOfDirs = ledgerDirsManager . getAllLedgerDirs ( ) . size ( ) ; log . info ( ""Started Db Ledger Storage"" ) ; log . info ( "" - Write cache size: {} MB"" , writeCacheMaxSize / MB ) ; log . info ( "" - Read Cache: {} MB"" , readCacheMaxSize / MB ) ; if ( readCacheMaxSize + writeCacheMaxSize > PlatformDependent . maxDirectMemory ( ) ) { throw new IOException ( ""Read and write cache sizes exceed the configured max direct memory size"" ) ; } long perDirectoryWriteCacheSize = writeCacheMaxSize / numberOfDirs ; long perDirectoryReadCacheSize = readCacheMaxSize / numberOfDirs ; gcExecutor = Executors . newSingleThreadScheduledExecutor ( new DefaultThreadFactory ( ""GarbageCollector"" ) ) ; ledgerStorageList = Lists . newArrayList ( ) ; for ( File ledgerDir : ledgerDirsManager . getAllLedgerDirs ( ) ) { File [ ] dirs = new File [ 1 ] ; dirs [ 0 ] = ledgerDir . getParentFile ( ) ; LedgerDirsManager ldm = new LedgerDirsManager ( conf , dirs , ledgerDirsManager . getDiskChecker ( ) , statsLogger ) ; ledgerStorageList . add ( newSingleDirectoryDbLedgerStorage ( conf , ledgerManager , ldm , indexDirsManager , stateManager , checkpointSource , checkpointer , statsLogger , gcExecutor , perDirectoryWriteCacheSize , perDirectoryReadCacheSize ) ) ; ldm . getListeners ( ) . forEach ( ledgerDirsManager :: addLedgerDirsListener ) ; } this . stats = new DbLedgerStorageStats ( statsLogger , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheCount ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheCount ) . sum ( ) ) ; } +","public void initialize ( ServerConfiguration conf , LedgerManager ledgerManager , LedgerDirsManager ledgerDirsManager , LedgerDirsManager indexDirsManager , StateManager stateManager , CheckpointSource checkpointSource , Checkpointer checkpointer , StatsLogger statsLogger , ByteBufAllocator allocator ) throws IOException { long writeCacheMaxSize = getLongVariableOrDefault ( conf , WRITE_CACHE_MAX_SIZE_MB , DEFAULT_WRITE_CACHE_MAX_SIZE_MB ) * MB ; long readCacheMaxSize = getLongVariableOrDefault ( conf , READ_AHEAD_CACHE_MAX_SIZE_MB , DEFAULT_READ_CACHE_MAX_SIZE_MB ) * MB ; this . allocator = allocator ; this . numberOfDirs = ledgerDirsManager . getAllLedgerDirs ( ) . size ( ) ; log . info ( ""Started Db Ledger Storage"" ) ; log . info ( "" - Number of directories: {}"" , numberOfDirs ) ; log . info ( "" - Write cache size: {} MB"" , writeCacheMaxSize / MB ) ; log . info ( "" - Read Cache: {} MB"" , readCacheMaxSize / MB ) ; if ( readCacheMaxSize + writeCacheMaxSize > PlatformDependent . maxDirectMemory ( ) ) { throw new IOException ( ""Read and write cache sizes exceed the configured max direct memory size"" ) ; } long perDirectoryWriteCacheSize = writeCacheMaxSize / numberOfDirs ; long perDirectoryReadCacheSize = readCacheMaxSize / numberOfDirs ; gcExecutor = Executors . newSingleThreadScheduledExecutor ( new DefaultThreadFactory ( ""GarbageCollector"" ) ) ; ledgerStorageList = Lists . newArrayList ( ) ; for ( File ledgerDir : ledgerDirsManager . getAllLedgerDirs ( ) ) { File [ ] dirs = new File [ 1 ] ; dirs [ 0 ] = ledgerDir . getParentFile ( ) ; LedgerDirsManager ldm = new LedgerDirsManager ( conf , dirs , ledgerDirsManager . getDiskChecker ( ) , statsLogger ) ; ledgerStorageList . add ( newSingleDirectoryDbLedgerStorage ( conf , ledgerManager , ldm , indexDirsManager , stateManager , checkpointSource , checkpointer , statsLogger , gcExecutor , perDirectoryWriteCacheSize , perDirectoryReadCacheSize ) ) ; ldm . getListeners ( ) . forEach ( ledgerDirsManager :: addLedgerDirsListener ) ; } this . stats = new DbLedgerStorageStats ( statsLogger , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheCount ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheCount ) . sum ( ) ) ; } +" +893,"public void initialize ( ServerConfiguration conf , LedgerManager ledgerManager , LedgerDirsManager ledgerDirsManager , LedgerDirsManager indexDirsManager , StateManager stateManager , CheckpointSource checkpointSource , Checkpointer checkpointer , StatsLogger statsLogger , ByteBufAllocator allocator ) throws IOException { long writeCacheMaxSize = getLongVariableOrDefault ( conf , WRITE_CACHE_MAX_SIZE_MB , DEFAULT_WRITE_CACHE_MAX_SIZE_MB ) * MB ; long readCacheMaxSize = getLongVariableOrDefault ( conf , READ_AHEAD_CACHE_MAX_SIZE_MB , DEFAULT_READ_CACHE_MAX_SIZE_MB ) * MB ; this . allocator = allocator ; this . numberOfDirs = ledgerDirsManager . getAllLedgerDirs ( ) . size ( ) ; log . info ( ""Started Db Ledger Storage"" ) ; log . info ( "" - Number of directories: {}"" , numberOfDirs ) ; log . info ( "" - Read Cache: {} MB"" , readCacheMaxSize / MB ) ; if ( readCacheMaxSize + writeCacheMaxSize > PlatformDependent . maxDirectMemory ( ) ) { throw new IOException ( ""Read and write cache sizes exceed the configured max direct memory size"" ) ; } long perDirectoryWriteCacheSize = writeCacheMaxSize / numberOfDirs ; long perDirectoryReadCacheSize = readCacheMaxSize / numberOfDirs ; gcExecutor = Executors . newSingleThreadScheduledExecutor ( new DefaultThreadFactory ( ""GarbageCollector"" ) ) ; ledgerStorageList = Lists . newArrayList ( ) ; for ( File ledgerDir : ledgerDirsManager . getAllLedgerDirs ( ) ) { File [ ] dirs = new File [ 1 ] ; dirs [ 0 ] = ledgerDir . getParentFile ( ) ; LedgerDirsManager ldm = new LedgerDirsManager ( conf , dirs , ledgerDirsManager . getDiskChecker ( ) , statsLogger ) ; ledgerStorageList . add ( newSingleDirectoryDbLedgerStorage ( conf , ledgerManager , ldm , indexDirsManager , stateManager , checkpointSource , checkpointer , statsLogger , gcExecutor , perDirectoryWriteCacheSize , perDirectoryReadCacheSize ) ) ; ldm . getListeners ( ) . forEach ( ledgerDirsManager :: addLedgerDirsListener ) ; } this . stats = new DbLedgerStorageStats ( statsLogger , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheCount ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheCount ) . sum ( ) ) ; } +","public void initialize ( ServerConfiguration conf , LedgerManager ledgerManager , LedgerDirsManager ledgerDirsManager , LedgerDirsManager indexDirsManager , StateManager stateManager , CheckpointSource checkpointSource , Checkpointer checkpointer , StatsLogger statsLogger , ByteBufAllocator allocator ) throws IOException { long writeCacheMaxSize = getLongVariableOrDefault ( conf , WRITE_CACHE_MAX_SIZE_MB , DEFAULT_WRITE_CACHE_MAX_SIZE_MB ) * MB ; long readCacheMaxSize = getLongVariableOrDefault ( conf , READ_AHEAD_CACHE_MAX_SIZE_MB , DEFAULT_READ_CACHE_MAX_SIZE_MB ) * MB ; this . allocator = allocator ; this . numberOfDirs = ledgerDirsManager . getAllLedgerDirs ( ) . size ( ) ; log . info ( ""Started Db Ledger Storage"" ) ; log . info ( "" - Number of directories: {}"" , numberOfDirs ) ; log . info ( "" - Write cache size: {} MB"" , writeCacheMaxSize / MB ) ; log . info ( "" - Read Cache: {} MB"" , readCacheMaxSize / MB ) ; if ( readCacheMaxSize + writeCacheMaxSize > PlatformDependent . maxDirectMemory ( ) ) { throw new IOException ( ""Read and write cache sizes exceed the configured max direct memory size"" ) ; } long perDirectoryWriteCacheSize = writeCacheMaxSize / numberOfDirs ; long perDirectoryReadCacheSize = readCacheMaxSize / numberOfDirs ; gcExecutor = Executors . newSingleThreadScheduledExecutor ( new DefaultThreadFactory ( ""GarbageCollector"" ) ) ; ledgerStorageList = Lists . newArrayList ( ) ; for ( File ledgerDir : ledgerDirsManager . getAllLedgerDirs ( ) ) { File [ ] dirs = new File [ 1 ] ; dirs [ 0 ] = ledgerDir . getParentFile ( ) ; LedgerDirsManager ldm = new LedgerDirsManager ( conf , dirs , ledgerDirsManager . getDiskChecker ( ) , statsLogger ) ; ledgerStorageList . add ( newSingleDirectoryDbLedgerStorage ( conf , ledgerManager , ldm , indexDirsManager , stateManager , checkpointSource , checkpointer , statsLogger , gcExecutor , perDirectoryWriteCacheSize , perDirectoryReadCacheSize ) ) ; ldm . getListeners ( ) . forEach ( ledgerDirsManager :: addLedgerDirsListener ) ; } this . stats = new DbLedgerStorageStats ( statsLogger , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheCount ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheCount ) . sum ( ) ) ; } +" +894,"public void initialize ( ServerConfiguration conf , LedgerManager ledgerManager , LedgerDirsManager ledgerDirsManager , LedgerDirsManager indexDirsManager , StateManager stateManager , CheckpointSource checkpointSource , Checkpointer checkpointer , StatsLogger statsLogger , ByteBufAllocator allocator ) throws IOException { long writeCacheMaxSize = getLongVariableOrDefault ( conf , WRITE_CACHE_MAX_SIZE_MB , DEFAULT_WRITE_CACHE_MAX_SIZE_MB ) * MB ; long readCacheMaxSize = getLongVariableOrDefault ( conf , READ_AHEAD_CACHE_MAX_SIZE_MB , DEFAULT_READ_CACHE_MAX_SIZE_MB ) * MB ; this . allocator = allocator ; this . numberOfDirs = ledgerDirsManager . getAllLedgerDirs ( ) . size ( ) ; log . info ( ""Started Db Ledger Storage"" ) ; log . info ( "" - Number of directories: {}"" , numberOfDirs ) ; log . info ( "" - Write cache size: {} MB"" , writeCacheMaxSize / MB ) ; if ( readCacheMaxSize + writeCacheMaxSize > PlatformDependent . maxDirectMemory ( ) ) { throw new IOException ( ""Read and write cache sizes exceed the configured max direct memory size"" ) ; } long perDirectoryWriteCacheSize = writeCacheMaxSize / numberOfDirs ; long perDirectoryReadCacheSize = readCacheMaxSize / numberOfDirs ; gcExecutor = Executors . newSingleThreadScheduledExecutor ( new DefaultThreadFactory ( ""GarbageCollector"" ) ) ; ledgerStorageList = Lists . newArrayList ( ) ; for ( File ledgerDir : ledgerDirsManager . getAllLedgerDirs ( ) ) { File [ ] dirs = new File [ 1 ] ; dirs [ 0 ] = ledgerDir . getParentFile ( ) ; LedgerDirsManager ldm = new LedgerDirsManager ( conf , dirs , ledgerDirsManager . getDiskChecker ( ) , statsLogger ) ; ledgerStorageList . add ( newSingleDirectoryDbLedgerStorage ( conf , ledgerManager , ldm , indexDirsManager , stateManager , checkpointSource , checkpointer , statsLogger , gcExecutor , perDirectoryWriteCacheSize , perDirectoryReadCacheSize ) ) ; ldm . getListeners ( ) . forEach ( ledgerDirsManager :: addLedgerDirsListener ) ; } this . stats = new DbLedgerStorageStats ( statsLogger , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheCount ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheCount ) . sum ( ) ) ; } +","public void initialize ( ServerConfiguration conf , LedgerManager ledgerManager , LedgerDirsManager ledgerDirsManager , LedgerDirsManager indexDirsManager , StateManager stateManager , CheckpointSource checkpointSource , Checkpointer checkpointer , StatsLogger statsLogger , ByteBufAllocator allocator ) throws IOException { long writeCacheMaxSize = getLongVariableOrDefault ( conf , WRITE_CACHE_MAX_SIZE_MB , DEFAULT_WRITE_CACHE_MAX_SIZE_MB ) * MB ; long readCacheMaxSize = getLongVariableOrDefault ( conf , READ_AHEAD_CACHE_MAX_SIZE_MB , DEFAULT_READ_CACHE_MAX_SIZE_MB ) * MB ; this . allocator = allocator ; this . numberOfDirs = ledgerDirsManager . getAllLedgerDirs ( ) . size ( ) ; log . info ( ""Started Db Ledger Storage"" ) ; log . info ( "" - Number of directories: {}"" , numberOfDirs ) ; log . info ( "" - Write cache size: {} MB"" , writeCacheMaxSize / MB ) ; log . info ( "" - Read Cache: {} MB"" , readCacheMaxSize / MB ) ; if ( readCacheMaxSize + writeCacheMaxSize > PlatformDependent . maxDirectMemory ( ) ) { throw new IOException ( ""Read and write cache sizes exceed the configured max direct memory size"" ) ; } long perDirectoryWriteCacheSize = writeCacheMaxSize / numberOfDirs ; long perDirectoryReadCacheSize = readCacheMaxSize / numberOfDirs ; gcExecutor = Executors . newSingleThreadScheduledExecutor ( new DefaultThreadFactory ( ""GarbageCollector"" ) ) ; ledgerStorageList = Lists . newArrayList ( ) ; for ( File ledgerDir : ledgerDirsManager . getAllLedgerDirs ( ) ) { File [ ] dirs = new File [ 1 ] ; dirs [ 0 ] = ledgerDir . getParentFile ( ) ; LedgerDirsManager ldm = new LedgerDirsManager ( conf , dirs , ledgerDirsManager . getDiskChecker ( ) , statsLogger ) ; ledgerStorageList . add ( newSingleDirectoryDbLedgerStorage ( conf , ledgerManager , ldm , indexDirsManager , stateManager , checkpointSource , checkpointer , statsLogger , gcExecutor , perDirectoryWriteCacheSize , perDirectoryReadCacheSize ) ) ; ldm . getListeners ( ) . forEach ( ledgerDirsManager :: addLedgerDirsListener ) ; } this . stats = new DbLedgerStorageStats ( statsLogger , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getWriteCacheCount ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheSize ) . sum ( ) , ( ) -> ledgerStorageList . stream ( ) . mapToLong ( SingleDirectoryDbLedgerStorage :: getReadCacheCount ) . sum ( ) ) ; } +" +895,"@ Test public void testSingleAspInAs ( ) throws Exception { server . start ( ) ; Thread . sleep ( 100 ) ; logger . info ( ""Starting Client"" ) ; client . start ( ) ; Thread . sleep ( 10000 ) ; assertEquals ( AspState . getState ( remAsp . getPeerFSM ( ) . getState ( ) . getName ( ) ) , AspState . ACTIVE ) ; assertEquals ( AsState . getState ( remAs . getLocalFSM ( ) . getState ( ) . getName ( ) ) , AsState . ACTIVE ) ; assertEquals ( AspState . getState ( localAsp . getLocalFSM ( ) . getState ( ) . getName ( ) ) , AspState . ACTIVE ) ; assertEquals ( AsState . getState ( localAs . getPeerFSM ( ) . getState ( ) . getName ( ) ) , AsState . ACTIVE ) ; client . sendRkmRegister ( ) ; Thread . sleep ( 2000 ) ; assertEquals ( serverM3uaMgmt . getAppServers ( ) . size ( ) , 2 ) ; assertEquals ( serverM3uaMgmt . getRoute ( ) . size ( ) , 3 ) ; client . sendRkmDeregister ( ) ; Thread . sleep ( 2000 ) ; assertEquals ( serverM3uaMgmt . getAppServers ( ) . size ( ) , 1 ) ; assertEquals ( serverM3uaMgmt . getRoute ( ) . size ( ) , 1 ) ; client . stop ( ) ; server . stop ( ) ; Thread . sleep ( 100 ) ; } +","@ Test public void testSingleAspInAs ( ) throws Exception { logger . info ( ""Starting server"" ) ; server . start ( ) ; Thread . sleep ( 100 ) ; logger . info ( ""Starting Client"" ) ; client . start ( ) ; Thread . sleep ( 10000 ) ; assertEquals ( AspState . getState ( remAsp . getPeerFSM ( ) . getState ( ) . getName ( ) ) , AspState . ACTIVE ) ; assertEquals ( AsState . getState ( remAs . getLocalFSM ( ) . getState ( ) . getName ( ) ) , AsState . ACTIVE ) ; assertEquals ( AspState . getState ( localAsp . getLocalFSM ( ) . getState ( ) . getName ( ) ) , AspState . ACTIVE ) ; assertEquals ( AsState . getState ( localAs . getPeerFSM ( ) . getState ( ) . getName ( ) ) , AsState . ACTIVE ) ; client . sendRkmRegister ( ) ; Thread . sleep ( 2000 ) ; assertEquals ( serverM3uaMgmt . getAppServers ( ) . size ( ) , 2 ) ; assertEquals ( serverM3uaMgmt . getRoute ( ) . size ( ) , 3 ) ; client . sendRkmDeregister ( ) ; Thread . sleep ( 2000 ) ; assertEquals ( serverM3uaMgmt . getAppServers ( ) . size ( ) , 1 ) ; assertEquals ( serverM3uaMgmt . getRoute ( ) . size ( ) , 1 ) ; client . stop ( ) ; server . stop ( ) ; Thread . sleep ( 100 ) ; } +" +896,"@ Test public void testSingleAspInAs ( ) throws Exception { logger . info ( ""Starting server"" ) ; server . start ( ) ; Thread . sleep ( 100 ) ; client . start ( ) ; Thread . sleep ( 10000 ) ; assertEquals ( AspState . getState ( remAsp . getPeerFSM ( ) . getState ( ) . getName ( ) ) , AspState . ACTIVE ) ; assertEquals ( AsState . getState ( remAs . getLocalFSM ( ) . getState ( ) . getName ( ) ) , AsState . ACTIVE ) ; assertEquals ( AspState . getState ( localAsp . getLocalFSM ( ) . getState ( ) . getName ( ) ) , AspState . ACTIVE ) ; assertEquals ( AsState . getState ( localAs . getPeerFSM ( ) . getState ( ) . getName ( ) ) , AsState . ACTIVE ) ; client . sendRkmRegister ( ) ; Thread . sleep ( 2000 ) ; assertEquals ( serverM3uaMgmt . getAppServers ( ) . size ( ) , 2 ) ; assertEquals ( serverM3uaMgmt . getRoute ( ) . size ( ) , 3 ) ; client . sendRkmDeregister ( ) ; Thread . sleep ( 2000 ) ; assertEquals ( serverM3uaMgmt . getAppServers ( ) . size ( ) , 1 ) ; assertEquals ( serverM3uaMgmt . getRoute ( ) . size ( ) , 1 ) ; client . stop ( ) ; server . stop ( ) ; Thread . sleep ( 100 ) ; } +","@ Test public void testSingleAspInAs ( ) throws Exception { logger . info ( ""Starting server"" ) ; server . start ( ) ; Thread . sleep ( 100 ) ; logger . info ( ""Starting Client"" ) ; client . start ( ) ; Thread . sleep ( 10000 ) ; assertEquals ( AspState . getState ( remAsp . getPeerFSM ( ) . getState ( ) . getName ( ) ) , AspState . ACTIVE ) ; assertEquals ( AsState . getState ( remAs . getLocalFSM ( ) . getState ( ) . getName ( ) ) , AsState . ACTIVE ) ; assertEquals ( AspState . getState ( localAsp . getLocalFSM ( ) . getState ( ) . getName ( ) ) , AspState . ACTIVE ) ; assertEquals ( AsState . getState ( localAs . getPeerFSM ( ) . getState ( ) . getName ( ) ) , AsState . ACTIVE ) ; client . sendRkmRegister ( ) ; Thread . sleep ( 2000 ) ; assertEquals ( serverM3uaMgmt . getAppServers ( ) . size ( ) , 2 ) ; assertEquals ( serverM3uaMgmt . getRoute ( ) . size ( ) , 3 ) ; client . sendRkmDeregister ( ) ; Thread . sleep ( 2000 ) ; assertEquals ( serverM3uaMgmt . getAppServers ( ) . size ( ) , 1 ) ; assertEquals ( serverM3uaMgmt . getRoute ( ) . size ( ) , 1 ) ; client . stop ( ) ; server . stop ( ) ; Thread . sleep ( 100 ) ; } +" +897,"private JCheckBox getActivationCheckBox ( ) { if ( myActiveCheckBox == null ) { myActiveCheckBox = new JCheckBox ( ""Active"" ) ; myActiveCheckBox . setFocusPainted ( false ) ; } return myActiveCheckBox ; } +","private JCheckBox getActivationCheckBox ( ) { if ( myActiveCheckBox == null ) { myActiveCheckBox = new JCheckBox ( ""Active"" ) ; myActiveCheckBox . setFocusPainted ( false ) ; myActiveCheckBox . addActionListener ( new ActionListener ( ) { @ Override public void actionPerformed ( ActionEvent e ) { EventQueueUtilities . waitCursorRun ( myActiveCheckBox , ( ) -> { try { new DefaultDataGroupActivator ( myToolbox . getEventManager ( ) ) . setGroupActive ( myDGI , myActiveCheckBox . isSelected ( ) ) ; } catch ( InterruptedException e1 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( e1 , e1 ) ; } } } ) ; } } ) ; } return myActiveCheckBox ; } +" +898,"private JCheckBox getActivationCheckBox ( ) { if ( myActiveCheckBox == null ) { myActiveCheckBox = new JCheckBox ( ""Active"" ) ; myActiveCheckBox . setFocusPainted ( false ) ; myActiveCheckBox . addActionListener ( new ActionListener ( ) { @ Override public void actionPerformed ( ActionEvent e ) { } } ) ; } return myActiveCheckBox ; } +","private JCheckBox getActivationCheckBox ( ) { if ( myActiveCheckBox == null ) { myActiveCheckBox = new JCheckBox ( ""Active"" ) ; myActiveCheckBox . setFocusPainted ( false ) ; myActiveCheckBox . addActionListener ( new ActionListener ( ) { @ Override public void actionPerformed ( ActionEvent e ) { EventQueueUtilities . waitCursorRun ( myActiveCheckBox , ( ) -> { try { new DefaultDataGroupActivator ( myToolbox . getEventManager ( ) ) . setGroupActive ( myDGI , myActiveCheckBox . isSelected ( ) ) ; } catch ( InterruptedException e1 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( e1 , e1 ) ; } } } ) ; } } ) ; } return myActiveCheckBox ; } +" +899,"private JCheckBox getActivationCheckBox ( ) { if ( myActiveCheckBox == null ) { myActiveCheckBox = new JCheckBox ( ""Active"" ) ; myActiveCheckBox . setFocusPainted ( false ) ; myActiveCheckBox . addActionListener ( new ActionListener ( ) { @ Override public void actionPerformed ( ActionEvent e ) { EventQueueUtilities . waitCursorRun ( myActiveCheckBox , ( ) -> { try { new DefaultDataGroupActivator ( myToolbox . getEventManager ( ) ) . setGroupActive ( myDGI , myActiveCheckBox . isSelected ( ) ) ; } catch ( InterruptedException e1 ) { if ( LOGGER . isDebugEnabled ( ) ) { } } } ) ; } } ) ; } return myActiveCheckBox ; } +","private JCheckBox getActivationCheckBox ( ) { if ( myActiveCheckBox == null ) { myActiveCheckBox = new JCheckBox ( ""Active"" ) ; myActiveCheckBox . setFocusPainted ( false ) ; myActiveCheckBox . addActionListener ( new ActionListener ( ) { @ Override public void actionPerformed ( ActionEvent e ) { EventQueueUtilities . waitCursorRun ( myActiveCheckBox , ( ) -> { try { new DefaultDataGroupActivator ( myToolbox . getEventManager ( ) ) . setGroupActive ( myDGI , myActiveCheckBox . isSelected ( ) ) ; } catch ( InterruptedException e1 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( e1 , e1 ) ; } } } ) ; } } ) ; } return myActiveCheckBox ; } +" +900,"protected Set < BookieId > addDefaultRackBookiesIfMinNumRacksIsEnforced ( Set < BookieId > excludeBookies ) { Set < BookieId > comprehensiveExclusionBookiesSet ; if ( enforceMinNumRacksPerWriteQuorum ) { Set < BookieId > bookiesInDefaultRack = null ; Set < Node > defaultRackLeaves = topology . getLeaves ( getDefaultRack ( ) ) ; for ( Node node : defaultRackLeaves ) { if ( node instanceof BookieNode ) { if ( bookiesInDefaultRack == null ) { bookiesInDefaultRack = new HashSet < BookieId > ( excludeBookies ) ; } bookiesInDefaultRack . add ( ( ( BookieNode ) node ) . getAddr ( ) ) ; } else { } } if ( ( bookiesInDefaultRack == null ) || bookiesInDefaultRack . isEmpty ( ) ) { comprehensiveExclusionBookiesSet = excludeBookies ; } else { comprehensiveExclusionBookiesSet = new HashSet < BookieId > ( excludeBookies ) ; comprehensiveExclusionBookiesSet . addAll ( bookiesInDefaultRack ) ; LOG . info ( ""enforceMinNumRacksPerWriteQuorum is enabled, so Excluding bookies of defaultRack: {}"" , bookiesInDefaultRack ) ; } } else { comprehensiveExclusionBookiesSet = excludeBookies ; } return comprehensiveExclusionBookiesSet ; } +","protected Set < BookieId > addDefaultRackBookiesIfMinNumRacksIsEnforced ( Set < BookieId > excludeBookies ) { Set < BookieId > comprehensiveExclusionBookiesSet ; if ( enforceMinNumRacksPerWriteQuorum ) { Set < BookieId > bookiesInDefaultRack = null ; Set < Node > defaultRackLeaves = topology . getLeaves ( getDefaultRack ( ) ) ; for ( Node node : defaultRackLeaves ) { if ( node instanceof BookieNode ) { if ( bookiesInDefaultRack == null ) { bookiesInDefaultRack = new HashSet < BookieId > ( excludeBookies ) ; } bookiesInDefaultRack . add ( ( ( BookieNode ) node ) . getAddr ( ) ) ; } else { LOG . error ( ""found non-BookieNode: {} as leaf of defaultrack: {}"" , node , getDefaultRack ( ) ) ; } } if ( ( bookiesInDefaultRack == null ) || bookiesInDefaultRack . isEmpty ( ) ) { comprehensiveExclusionBookiesSet = excludeBookies ; } else { comprehensiveExclusionBookiesSet = new HashSet < BookieId > ( excludeBookies ) ; comprehensiveExclusionBookiesSet . addAll ( bookiesInDefaultRack ) ; LOG . info ( ""enforceMinNumRacksPerWriteQuorum is enabled, so Excluding bookies of defaultRack: {}"" , bookiesInDefaultRack ) ; } } else { comprehensiveExclusionBookiesSet = excludeBookies ; } return comprehensiveExclusionBookiesSet ; } +" +901,"protected Set < BookieId > addDefaultRackBookiesIfMinNumRacksIsEnforced ( Set < BookieId > excludeBookies ) { Set < BookieId > comprehensiveExclusionBookiesSet ; if ( enforceMinNumRacksPerWriteQuorum ) { Set < BookieId > bookiesInDefaultRack = null ; Set < Node > defaultRackLeaves = topology . getLeaves ( getDefaultRack ( ) ) ; for ( Node node : defaultRackLeaves ) { if ( node instanceof BookieNode ) { if ( bookiesInDefaultRack == null ) { bookiesInDefaultRack = new HashSet < BookieId > ( excludeBookies ) ; } bookiesInDefaultRack . add ( ( ( BookieNode ) node ) . getAddr ( ) ) ; } else { LOG . error ( ""found non-BookieNode: {} as leaf of defaultrack: {}"" , node , getDefaultRack ( ) ) ; } } if ( ( bookiesInDefaultRack == null ) || bookiesInDefaultRack . isEmpty ( ) ) { comprehensiveExclusionBookiesSet = excludeBookies ; } else { comprehensiveExclusionBookiesSet = new HashSet < BookieId > ( excludeBookies ) ; comprehensiveExclusionBookiesSet . addAll ( bookiesInDefaultRack ) ; } } else { comprehensiveExclusionBookiesSet = excludeBookies ; } return comprehensiveExclusionBookiesSet ; } +","protected Set < BookieId > addDefaultRackBookiesIfMinNumRacksIsEnforced ( Set < BookieId > excludeBookies ) { Set < BookieId > comprehensiveExclusionBookiesSet ; if ( enforceMinNumRacksPerWriteQuorum ) { Set < BookieId > bookiesInDefaultRack = null ; Set < Node > defaultRackLeaves = topology . getLeaves ( getDefaultRack ( ) ) ; for ( Node node : defaultRackLeaves ) { if ( node instanceof BookieNode ) { if ( bookiesInDefaultRack == null ) { bookiesInDefaultRack = new HashSet < BookieId > ( excludeBookies ) ; } bookiesInDefaultRack . add ( ( ( BookieNode ) node ) . getAddr ( ) ) ; } else { LOG . error ( ""found non-BookieNode: {} as leaf of defaultrack: {}"" , node , getDefaultRack ( ) ) ; } } if ( ( bookiesInDefaultRack == null ) || bookiesInDefaultRack . isEmpty ( ) ) { comprehensiveExclusionBookiesSet = excludeBookies ; } else { comprehensiveExclusionBookiesSet = new HashSet < BookieId > ( excludeBookies ) ; comprehensiveExclusionBookiesSet . addAll ( bookiesInDefaultRack ) ; LOG . info ( ""enforceMinNumRacksPerWriteQuorum is enabled, so Excluding bookies of defaultRack: {}"" , bookiesInDefaultRack ) ; } } else { comprehensiveExclusionBookiesSet = excludeBookies ; } return comprehensiveExclusionBookiesSet ; } +" +902,"public Response toResponse ( Throwable exception ) { try { return Response . temporaryRedirect ( new URI ( getRedirectURI ( ) ) ) . build ( ) ; } catch ( Exception e ) { log . error ( ""Jersey error."" , e ) ; return Response . status ( 500 ) . entity ( ""Something bad happened. Please try again later!"" ) . type ( ""text/plain"" ) . build ( ) ; } } +","public Response toResponse ( Throwable exception ) { try { log . error ( ""Jersey error."" , exception ) ; return Response . temporaryRedirect ( new URI ( getRedirectURI ( ) ) ) . build ( ) ; } catch ( Exception e ) { log . error ( ""Jersey error."" , e ) ; return Response . status ( 500 ) . entity ( ""Something bad happened. Please try again later!"" ) . type ( ""text/plain"" ) . build ( ) ; } } +" +903,"public Response toResponse ( Throwable exception ) { try { log . error ( ""Jersey error."" , exception ) ; return Response . temporaryRedirect ( new URI ( getRedirectURI ( ) ) ) . build ( ) ; } catch ( Exception e ) { return Response . status ( 500 ) . entity ( ""Something bad happened. Please try again later!"" ) . type ( ""text/plain"" ) . build ( ) ; } } +","public Response toResponse ( Throwable exception ) { try { log . error ( ""Jersey error."" , exception ) ; return Response . temporaryRedirect ( new URI ( getRedirectURI ( ) ) ) . build ( ) ; } catch ( Exception e ) { log . error ( ""Jersey error."" , e ) ; return Response . status ( 500 ) . entity ( ""Something bad happened. Please try again later!"" ) . type ( ""text/plain"" ) . build ( ) ; } } +" +904,"private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +","private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +" +905,"private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +","private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +" +906,"private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +","private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +" +907,"private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +","private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +" +908,"private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } return res ; } +","private RuleServiceResult createSessionAndExecuteRules ( KieBase kbase , RuleServiceRequest req ) { logger . info ( ""Entering createSessionAndExecuteRules() and creating Stateless session."" ) ; StatelessKieSession ksession = kbase . newStatelessKieSession ( ) ; ksession . setGlobal ( RuleServiceConstants . RULE_RESULT_LIST_NAME , new ArrayList < Object > ( ) ) ; final RuleExecutionStatistics stats = new RuleExecutionStatistics ( ) ; List < EventListener > listeners = createEventListeners ( stats ) ; RuleEventListenerUtils . addEventListenersToKieSEssion ( ksession , listeners ) ; logger . debug ( ""About to run rules."" ) ; Collection < ? > requestObjects = req . getRequestObjects ( ) ; ksession . execute ( requestObjects ) ; logger . debug ( ""ran rules against kie (knowledge is everything)"" ) ; Globals globals = ksession . getGlobals ( ) ; Collection < String > keys = globals . getGlobalKeys ( ) ; Iterator < String > iter = keys . iterator ( ) ; RuleServiceResult res = null ; logger . debug ( ""Retrieved Rule execution statistics objects."" ) ; while ( iter . hasNext ( ) ) { if ( iter . next ( ) . equals ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ) { @ SuppressWarnings ( ""unchecked"" ) List < RuleHitDetail > resList = ( List < RuleHitDetail > ) globals . get ( RuleServiceConstants . RULE_RESULT_LIST_NAME ) ; res = new BasicRuleServiceResult ( resList , stats ) ; } } logger . info ( ""Retrieved Rule execution result objects and exit createSessionAndExecuteRules()."" ) ; return res ; } +" +909,"public void reserve ( NicProfile nic , Network network , VirtualMachineProfile vm , DeployDestination dest , ReservationContext context ) throws InsufficientAddressCapacityException , ConcurrentOperationException , InsufficientVirtualNetworkCapacityException { if ( dest . getHost ( ) . getHypervisorType ( ) != HypervisorType . BareMetal ) { super . reserve ( nic , network , vm , dest , context ) ; return ; } HostVO host = _hostDao . findById ( dest . getHost ( ) . getId ( ) ) ; _hostDao . loadDetails ( host ) ; String intentIp = host . getDetail ( ApiConstants . IP_ADDRESS ) ; if ( intentIp == null ) { super . reserve ( nic , network , vm , dest , context ) ; return ; } String oldIp = nic . getIPv4Address ( ) ; boolean getNewIp = false ; if ( oldIp == null ) { getNewIp = true ; } else { final IPAddressVO ipVO = _ipAddressDao . findByIpAndSourceNetworkId ( network . getId ( ) , oldIp ) ; if ( ipVO != null ) { PodVlanMapVO mapVO = _podVlanDao . listPodVlanMapsByVlan ( ipVO . getVlanId ( ) ) ; if ( mapVO . getPodId ( ) != dest . getPod ( ) . getId ( ) ) { Transaction . execute ( new TransactionCallbackNoReturn ( ) { @ Override public void doInTransactionWithoutResult ( TransactionStatus status ) { _ipAddrMgr . markIpAsUnavailable ( ipVO . getId ( ) ) ; _ipAddressDao . unassignIpAddress ( ipVO . getId ( ) ) ; } } ) ; nic . setIPv4Address ( null ) ; getNewIp = true ; } } } if ( getNewIp ) { getBaremetalIp ( nic , dest . getPod ( ) , vm , network , intentIp ) ; } DataCenter dc = _dcDao . findById ( network . getDataCenterId ( ) ) ; nic . setIPv4Dns1 ( dc . getDns1 ( ) ) ; nic . setIPv4Dns2 ( dc . getDns2 ( ) ) ; } +","public void reserve ( NicProfile nic , Network network , VirtualMachineProfile vm , DeployDestination dest , ReservationContext context ) throws InsufficientAddressCapacityException , ConcurrentOperationException , InsufficientVirtualNetworkCapacityException { if ( dest . getHost ( ) . getHypervisorType ( ) != HypervisorType . BareMetal ) { super . reserve ( nic , network , vm , dest , context ) ; return ; } HostVO host = _hostDao . findById ( dest . getHost ( ) . getId ( ) ) ; _hostDao . loadDetails ( host ) ; String intentIp = host . getDetail ( ApiConstants . IP_ADDRESS ) ; if ( intentIp == null ) { super . reserve ( nic , network , vm , dest , context ) ; return ; } String oldIp = nic . getIPv4Address ( ) ; boolean getNewIp = false ; if ( oldIp == null ) { getNewIp = true ; } else { final IPAddressVO ipVO = _ipAddressDao . findByIpAndSourceNetworkId ( network . getId ( ) , oldIp ) ; if ( ipVO != null ) { PodVlanMapVO mapVO = _podVlanDao . listPodVlanMapsByVlan ( ipVO . getVlanId ( ) ) ; if ( mapVO . getPodId ( ) != dest . getPod ( ) . getId ( ) ) { Transaction . execute ( new TransactionCallbackNoReturn ( ) { @ Override public void doInTransactionWithoutResult ( TransactionStatus status ) { _ipAddrMgr . markIpAsUnavailable ( ipVO . getId ( ) ) ; _ipAddressDao . unassignIpAddress ( ipVO . getId ( ) ) ; } } ) ; nic . setIPv4Address ( null ) ; getNewIp = true ; } } } if ( getNewIp ) { getBaremetalIp ( nic , dest . getPod ( ) , vm , network , intentIp ) ; } DataCenter dc = _dcDao . findById ( network . getDataCenterId ( ) ) ; nic . setIPv4Dns1 ( dc . getDns1 ( ) ) ; nic . setIPv4Dns2 ( dc . getDns2 ( ) ) ; s_logger . debug ( ""Allocated a nic "" + nic + "" for "" + vm ) ; } +" +910,"public void commitReservation ( UUID reservationId ) throws VcProviderException { removeReservation ( reservationId ) ; } +","public void commitReservation ( UUID reservationId ) throws VcProviderException { removeReservation ( reservationId ) ; logger . info ( ""current VMs Cloning is done, commit Reservation."" ) ; } +" +911,"protected void checkSchema ( final JsonNode node , final URI uri ) throws JsonModelValidationException { try { final String jsonString = objectMapper . writeValueAsString ( node ) ; objectMapper . readValue ( jsonString , Schema . class ) ; } catch ( final IOException e ) { throw new JsonModelValidationException ( ""Invalid JSON content in resource: "" + uri . toString ( ) , e ) ; } } +","protected void checkSchema ( final JsonNode node , final URI uri ) throws JsonModelValidationException { try { final String jsonString = objectMapper . writeValueAsString ( node ) ; objectMapper . readValue ( jsonString , Schema . class ) ; } catch ( final IOException e ) { ModelTest . log . warn ( ""The file '"" + uri + ""' did not pass validation."" , e ) ; throw new JsonModelValidationException ( ""Invalid JSON content in resource: "" + uri . toString ( ) , e ) ; } } +" +912,"public boolean startup ( IPentahoSession arg0 ) { ISystemConfig systemSettings = PentahoSystem . get ( ISystemConfig . class ) ; Boolean enableAsyncLoading = true ; if ( systemSettings != null ) { String disableLoadAsyncStr = systemSettings . getProperty ( ""system.enable-async-default-content-loading"" ) ; enableAsyncLoading = Boolean . valueOf ( disableLoadAsyncStr ) ; } Runnable runnable = new Runnable ( ) { @ Override public void run ( ) { try { } catch ( Exception e ) { Logger . error ( this . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; } } } ; if ( enableAsyncLoading ) { Thread t = new Thread ( runnable ) ; t . setDaemon ( true ) ; t . setName ( ""Default Content Loader Thread"" ) ; t . start ( ) ; } else { runnable . run ( ) ; } return true ; } +","public boolean startup ( IPentahoSession arg0 ) { ISystemConfig systemSettings = PentahoSystem . get ( ISystemConfig . class ) ; Boolean enableAsyncLoading = true ; if ( systemSettings != null ) { String disableLoadAsyncStr = systemSettings . getProperty ( ""system.enable-async-default-content-loading"" ) ; enableAsyncLoading = Boolean . valueOf ( disableLoadAsyncStr ) ; } Runnable runnable = new Runnable ( ) { @ Override public void run ( ) { try { SecurityHelper . getInstance ( ) . runAsSystem ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { Logger . info ( this . getClass ( ) . getName ( ) , ""Default content importer has started"" ) ; String solutionPath = PentahoSystem . getApplicationContext ( ) . getSolutionPath ( DEFAULT_CONTENT_FOLDER ) ; File directory = new File ( solutionPath ) ; IPlatformImporter importer = PentahoSystem . get ( IPlatformImporter . class ) ; ArchiveLoader archiveLoader = new ArchiveLoader ( importer ) ; archiveLoader . loadAll ( directory , ArchiveLoader . ZIPS_FILTER ) ; return null ; } } ) ; } catch ( Exception e ) { Logger . error ( this . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; } } } ; if ( enableAsyncLoading ) { Thread t = new Thread ( runnable ) ; t . setDaemon ( true ) ; t . setName ( ""Default Content Loader Thread"" ) ; t . start ( ) ; } else { runnable . run ( ) ; } return true ; } +" +913,"public boolean startup ( IPentahoSession arg0 ) { ISystemConfig systemSettings = PentahoSystem . get ( ISystemConfig . class ) ; Boolean enableAsyncLoading = true ; if ( systemSettings != null ) { String disableLoadAsyncStr = systemSettings . getProperty ( ""system.enable-async-default-content-loading"" ) ; enableAsyncLoading = Boolean . valueOf ( disableLoadAsyncStr ) ; } Runnable runnable = new Runnable ( ) { @ Override public void run ( ) { try { SecurityHelper . getInstance ( ) . runAsSystem ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { String solutionPath = PentahoSystem . getApplicationContext ( ) . getSolutionPath ( DEFAULT_CONTENT_FOLDER ) ; File directory = new File ( solutionPath ) ; IPlatformImporter importer = PentahoSystem . get ( IPlatformImporter . class ) ; ArchiveLoader archiveLoader = new ArchiveLoader ( importer ) ; archiveLoader . loadAll ( directory , ArchiveLoader . ZIPS_FILTER ) ; return null ; } } ) ; } catch ( Exception e ) { Logger . error ( this . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; } } } ; if ( enableAsyncLoading ) { Thread t = new Thread ( runnable ) ; t . setDaemon ( true ) ; t . setName ( ""Default Content Loader Thread"" ) ; t . start ( ) ; } else { runnable . run ( ) ; } return true ; } +","public boolean startup ( IPentahoSession arg0 ) { ISystemConfig systemSettings = PentahoSystem . get ( ISystemConfig . class ) ; Boolean enableAsyncLoading = true ; if ( systemSettings != null ) { String disableLoadAsyncStr = systemSettings . getProperty ( ""system.enable-async-default-content-loading"" ) ; enableAsyncLoading = Boolean . valueOf ( disableLoadAsyncStr ) ; } Runnable runnable = new Runnable ( ) { @ Override public void run ( ) { try { SecurityHelper . getInstance ( ) . runAsSystem ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { Logger . info ( this . getClass ( ) . getName ( ) , ""Default content importer has started"" ) ; String solutionPath = PentahoSystem . getApplicationContext ( ) . getSolutionPath ( DEFAULT_CONTENT_FOLDER ) ; File directory = new File ( solutionPath ) ; IPlatformImporter importer = PentahoSystem . get ( IPlatformImporter . class ) ; ArchiveLoader archiveLoader = new ArchiveLoader ( importer ) ; archiveLoader . loadAll ( directory , ArchiveLoader . ZIPS_FILTER ) ; return null ; } } ) ; } catch ( Exception e ) { Logger . error ( this . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; } } } ; if ( enableAsyncLoading ) { Thread t = new Thread ( runnable ) ; t . setDaemon ( true ) ; t . setName ( ""Default Content Loader Thread"" ) ; t . start ( ) ; } else { runnable . run ( ) ; } return true ; } +" +914,"public boolean startup ( IPentahoSession arg0 ) { ISystemConfig systemSettings = PentahoSystem . get ( ISystemConfig . class ) ; Boolean enableAsyncLoading = true ; if ( systemSettings != null ) { String disableLoadAsyncStr = systemSettings . getProperty ( ""system.enable-async-default-content-loading"" ) ; enableAsyncLoading = Boolean . valueOf ( disableLoadAsyncStr ) ; } Runnable runnable = new Runnable ( ) { @ Override public void run ( ) { try { SecurityHelper . getInstance ( ) . runAsSystem ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { Logger . info ( this . getClass ( ) . getName ( ) , ""Default content importer has started"" ) ; String solutionPath = PentahoSystem . getApplicationContext ( ) . getSolutionPath ( DEFAULT_CONTENT_FOLDER ) ; File directory = new File ( solutionPath ) ; IPlatformImporter importer = PentahoSystem . get ( IPlatformImporter . class ) ; ArchiveLoader archiveLoader = new ArchiveLoader ( importer ) ; archiveLoader . loadAll ( directory , ArchiveLoader . ZIPS_FILTER ) ; return null ; } } ) ; } catch ( Exception e ) { } } } ; if ( enableAsyncLoading ) { Thread t = new Thread ( runnable ) ; t . setDaemon ( true ) ; t . setName ( ""Default Content Loader Thread"" ) ; t . start ( ) ; } else { runnable . run ( ) ; } return true ; } +","public boolean startup ( IPentahoSession arg0 ) { ISystemConfig systemSettings = PentahoSystem . get ( ISystemConfig . class ) ; Boolean enableAsyncLoading = true ; if ( systemSettings != null ) { String disableLoadAsyncStr = systemSettings . getProperty ( ""system.enable-async-default-content-loading"" ) ; enableAsyncLoading = Boolean . valueOf ( disableLoadAsyncStr ) ; } Runnable runnable = new Runnable ( ) { @ Override public void run ( ) { try { SecurityHelper . getInstance ( ) . runAsSystem ( new Callable < Void > ( ) { @ Override public Void call ( ) throws Exception { Logger . info ( this . getClass ( ) . getName ( ) , ""Default content importer has started"" ) ; String solutionPath = PentahoSystem . getApplicationContext ( ) . getSolutionPath ( DEFAULT_CONTENT_FOLDER ) ; File directory = new File ( solutionPath ) ; IPlatformImporter importer = PentahoSystem . get ( IPlatformImporter . class ) ; ArchiveLoader archiveLoader = new ArchiveLoader ( importer ) ; archiveLoader . loadAll ( directory , ArchiveLoader . ZIPS_FILTER ) ; return null ; } } ) ; } catch ( Exception e ) { Logger . error ( this . getClass ( ) . getName ( ) , e . getMessage ( ) ) ; } } } ; if ( enableAsyncLoading ) { Thread t = new Thread ( runnable ) ; t . setDaemon ( true ) ; t . setName ( ""Default Content Loader Thread"" ) ; t . start ( ) ; } else { runnable . run ( ) ; } return true ; } +" +915,"public static String checkHeartbeatNamespace ( ServiceUnitId ns ) { Matcher m = HEARTBEAT_NAMESPACE_PATTERN . matcher ( ns . getNamespaceObject ( ) . toString ( ) ) ; if ( m . matches ( ) ) { return String . format ( ""http://%s"" , m . group ( 1 ) ) ; } else { return null ; } } +","public static String checkHeartbeatNamespace ( ServiceUnitId ns ) { Matcher m = HEARTBEAT_NAMESPACE_PATTERN . matcher ( ns . getNamespaceObject ( ) . toString ( ) ) ; if ( m . matches ( ) ) { LOG . debug ( ""SLAMonitoring namespace matched the lookup namespace {}"" , ns . getNamespaceObject ( ) . toString ( ) ) ; return String . format ( ""http://%s"" , m . group ( 1 ) ) ; } else { return null ; } } +" +916,"private void sendResponse ( UUID nodeId , Object resTopic , long reqId , @ Nullable Throwable err , boolean forceLocDep ) { byte [ ] errBytes ; try { errBytes = err != null ? U . marshal ( marsh , err ) : null ; } catch ( Exception e ) { U . error ( log , ""Failed to marshal error [err="" + err + "", marshErr="" + e + ']' , e ) ; errBytes = marshErrBytes ; } DataStreamerResponse res = new DataStreamerResponse ( reqId , errBytes , forceLocDep ) ; try { ctx . io ( ) . sendToCustomTopic ( nodeId , resTopic , res , threadIoPolicy ( ) ) ; } catch ( IgniteCheckedException e ) { if ( ctx . discovery ( ) . alive ( nodeId ) ) U . error ( log , ""Failed to respond to node [nodeId="" + nodeId + "", res="" + res + ']' , e ) ; else if ( log . isDebugEnabled ( ) ) } } +","private void sendResponse ( UUID nodeId , Object resTopic , long reqId , @ Nullable Throwable err , boolean forceLocDep ) { byte [ ] errBytes ; try { errBytes = err != null ? U . marshal ( marsh , err ) : null ; } catch ( Exception e ) { U . error ( log , ""Failed to marshal error [err="" + err + "", marshErr="" + e + ']' , e ) ; errBytes = marshErrBytes ; } DataStreamerResponse res = new DataStreamerResponse ( reqId , errBytes , forceLocDep ) ; try { ctx . io ( ) . sendToCustomTopic ( nodeId , resTopic , res , threadIoPolicy ( ) ) ; } catch ( IgniteCheckedException e ) { if ( ctx . discovery ( ) . alive ( nodeId ) ) U . error ( log , ""Failed to respond to node [nodeId="" + nodeId + "", res="" + res + ']' , e ) ; else if ( log . isDebugEnabled ( ) ) log . debug ( ""Node has left the grid: "" + nodeId ) ; } } +" +917,"public static void reactiveInvoke ( Invocation invocation , AsyncResponse asyncResp ) { try { invocation . onStart ( null , System . nanoTime ( ) ) ; invocation . setSync ( false ) ; ReactiveResponseExecutor respExecutor = new ReactiveResponseExecutor ( ) ; invocation . setResponseExecutor ( respExecutor ) ; invocation . onStartHandlersRequest ( ) ; invocation . next ( ar -> { ContextUtils . setInvocationContext ( invocation . getParentContext ( ) ) ; try { invocation . getInvocationStageTrace ( ) . finishHandlersResponse ( ) ; invocation . onFinish ( ar ) ; asyncResp . handle ( ar ) ; } finally { ContextUtils . removeInvocationContext ( ) ; } } ) ; } catch ( Throwable e ) { invocation . getInvocationStageTrace ( ) . finishHandlersResponse ( ) ; Response response = Response . createConsumerFail ( e ) ; invocation . onFinish ( response ) ; asyncResp . handle ( response ) ; } } +","public static void reactiveInvoke ( Invocation invocation , AsyncResponse asyncResp ) { try { invocation . onStart ( null , System . nanoTime ( ) ) ; invocation . setSync ( false ) ; ReactiveResponseExecutor respExecutor = new ReactiveResponseExecutor ( ) ; invocation . setResponseExecutor ( respExecutor ) ; invocation . onStartHandlersRequest ( ) ; invocation . next ( ar -> { ContextUtils . setInvocationContext ( invocation . getParentContext ( ) ) ; try { invocation . getInvocationStageTrace ( ) . finishHandlersResponse ( ) ; invocation . onFinish ( ar ) ; asyncResp . handle ( ar ) ; } finally { ContextUtils . removeInvocationContext ( ) ; } } ) ; } catch ( Throwable e ) { invocation . getInvocationStageTrace ( ) . finishHandlersResponse ( ) ; Response response = Response . createConsumerFail ( e ) ; invocation . onFinish ( response ) ; LOGGER . error ( ""invoke failed, {}"" , invocation . getOperationMeta ( ) . getMicroserviceQualifiedName ( ) ) ; asyncResp . handle ( response ) ; } } +" +918,"protected void upgradeResourcePermission ( String name ) throws Exception { StringBundler sb1 = new StringBundler ( 4 ) ; sb1 . append ( ""select resourceActionId, bitwiseValue from "" ) ; sb1 . append ( ""ResourceAction where actionId = 'ADD_ENTRY' and name = '"" ) ; sb1 . append ( name ) ; sb1 . append ( ""'"" ) ; StringBundler sb2 = new StringBundler ( 5 ) ; sb2 . append ( ""select resourcePermissionId, companyId, scope, primKey, "" ) ; sb2 . append ( ""primKeyId, roleId, actionIds from ResourcePermission "" ) ; sb2 . append ( ""where name = '"" ) ; sb2 . append ( name ) ; sb2 . append ( ""'"" ) ; try ( PreparedStatement preparedStatement1 = connection . prepareStatement ( sb1 . toString ( ) ) ; ResultSet resultSet1 = preparedStatement1 . executeQuery ( ) ) { if ( ! resultSet1 . next ( ) ) { if ( ! _ignoreMissingAddEntryResourceAction ) { } return ; } long bitwiseValue = resultSet1 . getLong ( ""bitwiseValue"" ) ; try ( PreparedStatement preparedStatement2 = connection . prepareStatement ( sb2 . toString ( ) ) ; ResultSet resultSet = preparedStatement2 . executeQuery ( ) ) { while ( resultSet . next ( ) ) { long actionIds = resultSet . getLong ( ""actionIds"" ) ; if ( ( bitwiseValue & actionIds ) == 0 ) { continue ; } long resourcePermissionId = resultSet . getLong ( ""resourcePermissionId"" ) ; long companyId = resultSet . getLong ( ""companyId"" ) ; int scope = resultSet . getInt ( ""scope"" ) ; String primKey = resultSet . getString ( ""primKey"" ) ; long primKeyId = resultSet . getLong ( ""primKeyId"" ) ; updateResourcePermission ( resourcePermissionId , actionIds - bitwiseValue ) ; if ( scope == ResourceConstants . SCOPE_INDIVIDUAL ) { if ( primKey . contains ( ""_LAYOUT_"" ) ) { primKey = String . valueOf ( companyId ) ; primKeyId = companyId ; scope = ResourceConstants . SCOPE_COMPANY ; } else { continue ; } } long roleId = resultSet . getLong ( ""roleId"" ) ; addAnnouncementsAdminViewResourcePermission ( companyId , scope , primKey , primKeyId , roleId ) ; } } long resourceActionId = resultSet1 . getLong ( ""resourceActionId"" ) ; deleteResourceAction ( resourceActionId ) ; } } +","protected void upgradeResourcePermission ( String name ) throws Exception { StringBundler sb1 = new StringBundler ( 4 ) ; sb1 . append ( ""select resourceActionId, bitwiseValue from "" ) ; sb1 . append ( ""ResourceAction where actionId = 'ADD_ENTRY' and name = '"" ) ; sb1 . append ( name ) ; sb1 . append ( ""'"" ) ; StringBundler sb2 = new StringBundler ( 5 ) ; sb2 . append ( ""select resourcePermissionId, companyId, scope, primKey, "" ) ; sb2 . append ( ""primKeyId, roleId, actionIds from ResourcePermission "" ) ; sb2 . append ( ""where name = '"" ) ; sb2 . append ( name ) ; sb2 . append ( ""'"" ) ; try ( PreparedStatement preparedStatement1 = connection . prepareStatement ( sb1 . toString ( ) ) ; ResultSet resultSet1 = preparedStatement1 . executeQuery ( ) ) { if ( ! resultSet1 . next ( ) ) { if ( ! _ignoreMissingAddEntryResourceAction ) { _log . error ( StringBundler . concat ( ""Unable to upgrade ADD_ENTRY action, "" , ""ResourceAction for "" , name , "" is not initialized"" ) ) ; } return ; } long bitwiseValue = resultSet1 . getLong ( ""bitwiseValue"" ) ; try ( PreparedStatement preparedStatement2 = connection . prepareStatement ( sb2 . toString ( ) ) ; ResultSet resultSet = preparedStatement2 . executeQuery ( ) ) { while ( resultSet . next ( ) ) { long actionIds = resultSet . getLong ( ""actionIds"" ) ; if ( ( bitwiseValue & actionIds ) == 0 ) { continue ; } long resourcePermissionId = resultSet . getLong ( ""resourcePermissionId"" ) ; long companyId = resultSet . getLong ( ""companyId"" ) ; int scope = resultSet . getInt ( ""scope"" ) ; String primKey = resultSet . getString ( ""primKey"" ) ; long primKeyId = resultSet . getLong ( ""primKeyId"" ) ; updateResourcePermission ( resourcePermissionId , actionIds - bitwiseValue ) ; if ( scope == ResourceConstants . SCOPE_INDIVIDUAL ) { if ( primKey . contains ( ""_LAYOUT_"" ) ) { primKey = String . valueOf ( companyId ) ; primKeyId = companyId ; scope = ResourceConstants . SCOPE_COMPANY ; } else { continue ; } } long roleId = resultSet . getLong ( ""roleId"" ) ; addAnnouncementsAdminViewResourcePermission ( companyId , scope , primKey , primKeyId , roleId ) ; } } long resourceActionId = resultSet1 . getLong ( ""resourceActionId"" ) ; deleteResourceAction ( resourceActionId ) ; } } +" +919,"@ SuppressWarnings ( ""unchecked"" ) public K apply ( File ... files ) { try ( Context context = defaultContext ( ) ) { Map < File , ExecResult > execResults = execRecursive ( APPLY , files , Comparator . comparing ( File :: getName ) . reversed ( ) ) ; for ( Map . Entry < File , ExecResult > entry : execResults . entrySet ( ) ) { if ( ! entry . getValue ( ) . exitStatus ( ) ) { LOGGER . debug ( entry . getValue ( ) . err ( ) ) ; } } return ( K ) this ; } } +","@ SuppressWarnings ( ""unchecked"" ) public K apply ( File ... files ) { try ( Context context = defaultContext ( ) ) { Map < File , ExecResult > execResults = execRecursive ( APPLY , files , Comparator . comparing ( File :: getName ) . reversed ( ) ) ; for ( Map . Entry < File , ExecResult > entry : execResults . entrySet ( ) ) { if ( ! entry . getValue ( ) . exitStatus ( ) ) { LOGGER . warn ( ""Failed to apply {}!"" , entry . getKey ( ) . getAbsolutePath ( ) ) ; LOGGER . debug ( entry . getValue ( ) . err ( ) ) ; } } return ( K ) this ; } } +" +920,"@ SuppressWarnings ( ""unchecked"" ) public K apply ( File ... files ) { try ( Context context = defaultContext ( ) ) { Map < File , ExecResult > execResults = execRecursive ( APPLY , files , Comparator . comparing ( File :: getName ) . reversed ( ) ) ; for ( Map . Entry < File , ExecResult > entry : execResults . entrySet ( ) ) { if ( ! entry . getValue ( ) . exitStatus ( ) ) { LOGGER . warn ( ""Failed to apply {}!"" , entry . getKey ( ) . getAbsolutePath ( ) ) ; } } return ( K ) this ; } } +","@ SuppressWarnings ( ""unchecked"" ) public K apply ( File ... files ) { try ( Context context = defaultContext ( ) ) { Map < File , ExecResult > execResults = execRecursive ( APPLY , files , Comparator . comparing ( File :: getName ) . reversed ( ) ) ; for ( Map . Entry < File , ExecResult > entry : execResults . entrySet ( ) ) { if ( ! entry . getValue ( ) . exitStatus ( ) ) { LOGGER . warn ( ""Failed to apply {}!"" , entry . getKey ( ) . getAbsolutePath ( ) ) ; LOGGER . debug ( entry . getValue ( ) . err ( ) ) ; } } return ( K ) this ; } } +" +921,"public void updateAccessList ( LoadBalancerEndpointConfiguration config , LoadBalancer loadBalancer ) throws InsufficientRequestException , StmRollBackException { if ( loadBalancer . getAccessLists ( ) != null && ! loadBalancer . getAccessLists ( ) . isEmpty ( ) ) { String name = ZxtmNameBuilder . genVSName ( loadBalancer ) ; updateProtection ( config , loadBalancer ) ; LOG . info ( String . format ( ""Successfully updated Access List on '%s'..."" , name ) ) ; } } +","public void updateAccessList ( LoadBalancerEndpointConfiguration config , LoadBalancer loadBalancer ) throws InsufficientRequestException , StmRollBackException { if ( loadBalancer . getAccessLists ( ) != null && ! loadBalancer . getAccessLists ( ) . isEmpty ( ) ) { String name = ZxtmNameBuilder . genVSName ( loadBalancer ) ; LOG . info ( String . format ( ""Updating Access List on '%s'..."" , name ) ) ; updateProtection ( config , loadBalancer ) ; LOG . info ( String . format ( ""Successfully updated Access List on '%s'..."" , name ) ) ; } } +" +922,"public void updateAccessList ( LoadBalancerEndpointConfiguration config , LoadBalancer loadBalancer ) throws InsufficientRequestException , StmRollBackException { if ( loadBalancer . getAccessLists ( ) != null && ! loadBalancer . getAccessLists ( ) . isEmpty ( ) ) { String name = ZxtmNameBuilder . genVSName ( loadBalancer ) ; LOG . info ( String . format ( ""Updating Access List on '%s'..."" , name ) ) ; updateProtection ( config , loadBalancer ) ; } } +","public void updateAccessList ( LoadBalancerEndpointConfiguration config , LoadBalancer loadBalancer ) throws InsufficientRequestException , StmRollBackException { if ( loadBalancer . getAccessLists ( ) != null && ! loadBalancer . getAccessLists ( ) . isEmpty ( ) ) { String name = ZxtmNameBuilder . genVSName ( loadBalancer ) ; LOG . info ( String . format ( ""Updating Access List on '%s'..."" , name ) ) ; updateProtection ( config , loadBalancer ) ; LOG . info ( String . format ( ""Successfully updated Access List on '%s'..."" , name ) ) ; } } +" +923,"@ Nonnull public static TextDiffType getDiffType ( boolean hasDeleted , boolean hasInserted ) { if ( hasDeleted && hasInserted ) { return TextDiffType . MODIFIED ; } else if ( hasDeleted ) { return TextDiffType . DELETED ; } else if ( hasInserted ) { return TextDiffType . INSERTED ; } else { return TextDiffType . MODIFIED ; } } +","@ Nonnull public static TextDiffType getDiffType ( boolean hasDeleted , boolean hasInserted ) { if ( hasDeleted && hasInserted ) { return TextDiffType . MODIFIED ; } else if ( hasDeleted ) { return TextDiffType . DELETED ; } else if ( hasInserted ) { return TextDiffType . INSERTED ; } else { LOG . error ( ""Diff fragment should not be empty"" ) ; return TextDiffType . MODIFIED ; } } +" +924,"private AccessToken executePost ( OkHttpClient httpClient , Request newRequest ) throws AccessTokenAcquisitionException { try { Response response = httpClient . newCall ( newRequest ) . execute ( ) ; String body = response . body ( ) . string ( ) ; if ( response . code ( ) >= 300 ) { throw new AccessTokenAcquisitionException ( String . format ( ""Got HTTP %d during oauth2 request."" , response . code ( ) ) ) ; } return parseTokenResponse ( body ) ; } catch ( IOException e ) { throw new AccessTokenAcquisitionException ( e ) ; } } +","private AccessToken executePost ( OkHttpClient httpClient , Request newRequest ) throws AccessTokenAcquisitionException { try { Response response = httpClient . newCall ( newRequest ) . execute ( ) ; String body = response . body ( ) . string ( ) ; if ( response . code ( ) >= 300 ) { getLogger ( ) . error ( String . format ( ""Bad response from the server during oauth2 request:\n%s"" , body ) ) ; throw new AccessTokenAcquisitionException ( String . format ( ""Got HTTP %d during oauth2 request."" , response . code ( ) ) ) ; } return parseTokenResponse ( body ) ; } catch ( IOException e ) { throw new AccessTokenAcquisitionException ( e ) ; } } +" +925,"public MockHost getHost ( String guid ) { TransactionLegacy txn = TransactionLegacy . open ( TransactionLegacy . SIMULATOR_DB ) ; try { txn . start ( ) ; MockHost _host = _mockHostDao . findByGuid ( guid ) ; txn . commit ( ) ; if ( _host != null ) { return _host ; } else { return null ; } } catch ( Exception ex ) { txn . rollback ( ) ; throw new CloudRuntimeException ( ""Unable to get host "" + guid + "" due to "" + ex . getMessage ( ) , ex ) ; } finally { txn . close ( ) ; txn = TransactionLegacy . open ( TransactionLegacy . CLOUD_DB ) ; txn . close ( ) ; } } +","public MockHost getHost ( String guid ) { TransactionLegacy txn = TransactionLegacy . open ( TransactionLegacy . SIMULATOR_DB ) ; try { txn . start ( ) ; MockHost _host = _mockHostDao . findByGuid ( guid ) ; txn . commit ( ) ; if ( _host != null ) { return _host ; } else { s_logger . error ( ""Host with guid "" + guid + "" was not found"" ) ; return null ; } } catch ( Exception ex ) { txn . rollback ( ) ; throw new CloudRuntimeException ( ""Unable to get host "" + guid + "" due to "" + ex . getMessage ( ) , ex ) ; } finally { txn . close ( ) ; txn = TransactionLegacy . open ( TransactionLegacy . CLOUD_DB ) ; txn . close ( ) ; } } +" +926,"@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +","@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +" +927,"@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +","@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +" +928,"@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +","@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +" +929,"@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +","@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +" +930,"@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { } } +","@ MCRCommand ( syntax = ""repair mcrdata.xml for derivate {0} in content store {1}"" , help = ""repair the entries in mcrdata.xml with data from content store {1} for MCRDerivate {0}"" ) public static void repairMcrdataXmlForDerivate ( String derivateId , String contentStore ) { LOGGER . info ( ""Start repair of mcrdata.xml for derivate {} in store {}"" , derivateId , contentStore ) ; MCRObjectID mcrDerivateId ; try { mcrDerivateId = MCRObjectID . getInstance ( derivateId ) ; } catch ( MCRException e ) { LOGGER . error ( ""Wrong derivate parameter, it is not a MCRObjectID"" ) ; return ; } if ( contentStore == null || contentStore . length ( ) == 0 ) { LOGGER . error ( ""Empty content store parameter"" ) ; return ; } MCRContentStore store = MCRContentStoreFactory . getStore ( contentStore ) ; if ( ! ( store instanceof MCRCStoreIFS2 ) ) { LOGGER . error ( ""The content store is not a IFS2 type"" ) ; return ; } try { MCRFileCollection fileCollection = ( ( MCRCStoreIFS2 ) store ) . getIFS2FileCollection ( mcrDerivateId ) ; fileCollection . repairMetadata ( ) ; } catch ( IOException e ) { LOGGER . error ( ""Error while repair derivate with ID {}"" , mcrDerivateId ) ; } } +" +931,"@ GET @ Path ( ""capture/{agent}/upcoming"" ) @ Produces ( MediaType . APPLICATION_JSON ) @ RestQuery ( name = ""upcomingcapture"" , description = ""Get the upcoming capture event catalog as JSON"" , returnDescription = ""The upcoming capture event catalog as JSON"" , pathParameters = { @ RestParameter ( name = ""agent"" , isRequired = true , type = Type . STRING , description = ""The agent identifier"" ) } , responses = { @ RestResponse ( responseCode = HttpServletResponse . SC_OK , description = ""DublinCore of the upcomfing capture event is in the body of response"" ) , @ RestResponse ( responseCode = HttpServletResponse . SC_NOT_FOUND , description = ""There is no upcoming recording"" ) , @ RestResponse ( responseCode = HttpServletResponse . SC_SERVICE_UNAVAILABLE , description = ""The agent is not ready to communicate"" ) } ) public Response upcomingCapture ( @ PathParam ( ""agent"" ) String agentId ) throws NotFoundException { if ( service == null || agentService == null ) return Response . serverError ( ) . status ( Response . Status . SERVICE_UNAVAILABLE ) . entity ( ""Scheduler service is unavailable, please wait..."" ) . build ( ) ; try { Opt < MediaPackage > upcoming = service . getUpcomingRecording ( agentId ) ; if ( upcoming . isNone ( ) ) { throw new NotFoundException ( ""No recording to stop found for agent: "" + agentId ) ; } else { DublinCoreCatalog catalog = DublinCoreUtil . loadEpisodeDublinCore ( workspace , upcoming . get ( ) ) . get ( ) ; return Response . ok ( catalog . toJson ( ) ) . build ( ) ; } } catch ( NotFoundException e ) { throw e ; } catch ( Exception e ) { logger . error ( ""Unable to get the immediate recording for agent '{}': {}"" , agentId , e ) ; throw new WebApplicationException ( Response . Status . INTERNAL_SERVER_ERROR ) ; } } +","@ GET @ Path ( ""capture/{agent}/upcoming"" ) @ Produces ( MediaType . APPLICATION_JSON ) @ RestQuery ( name = ""upcomingcapture"" , description = ""Get the upcoming capture event catalog as JSON"" , returnDescription = ""The upcoming capture event catalog as JSON"" , pathParameters = { @ RestParameter ( name = ""agent"" , isRequired = true , type = Type . STRING , description = ""The agent identifier"" ) } , responses = { @ RestResponse ( responseCode = HttpServletResponse . SC_OK , description = ""DublinCore of the upcomfing capture event is in the body of response"" ) , @ RestResponse ( responseCode = HttpServletResponse . SC_NOT_FOUND , description = ""There is no upcoming recording"" ) , @ RestResponse ( responseCode = HttpServletResponse . SC_SERVICE_UNAVAILABLE , description = ""The agent is not ready to communicate"" ) } ) public Response upcomingCapture ( @ PathParam ( ""agent"" ) String agentId ) throws NotFoundException { if ( service == null || agentService == null ) return Response . serverError ( ) . status ( Response . Status . SERVICE_UNAVAILABLE ) . entity ( ""Scheduler service is unavailable, please wait..."" ) . build ( ) ; try { Opt < MediaPackage > upcoming = service . getUpcomingRecording ( agentId ) ; if ( upcoming . isNone ( ) ) { logger . info ( ""No recording to stop found for agent '{}'!"" , agentId ) ; throw new NotFoundException ( ""No recording to stop found for agent: "" + agentId ) ; } else { DublinCoreCatalog catalog = DublinCoreUtil . loadEpisodeDublinCore ( workspace , upcoming . get ( ) ) . get ( ) ; return Response . ok ( catalog . toJson ( ) ) . build ( ) ; } } catch ( NotFoundException e ) { throw e ; } catch ( Exception e ) { logger . error ( ""Unable to get the immediate recording for agent '{}': {}"" , agentId , e ) ; throw new WebApplicationException ( Response . Status . INTERNAL_SERVER_ERROR ) ; } } +" +932,"@ GET @ Path ( ""capture/{agent}/upcoming"" ) @ Produces ( MediaType . APPLICATION_JSON ) @ RestQuery ( name = ""upcomingcapture"" , description = ""Get the upcoming capture event catalog as JSON"" , returnDescription = ""The upcoming capture event catalog as JSON"" , pathParameters = { @ RestParameter ( name = ""agent"" , isRequired = true , type = Type . STRING , description = ""The agent identifier"" ) } , responses = { @ RestResponse ( responseCode = HttpServletResponse . SC_OK , description = ""DublinCore of the upcomfing capture event is in the body of response"" ) , @ RestResponse ( responseCode = HttpServletResponse . SC_NOT_FOUND , description = ""There is no upcoming recording"" ) , @ RestResponse ( responseCode = HttpServletResponse . SC_SERVICE_UNAVAILABLE , description = ""The agent is not ready to communicate"" ) } ) public Response upcomingCapture ( @ PathParam ( ""agent"" ) String agentId ) throws NotFoundException { if ( service == null || agentService == null ) return Response . serverError ( ) . status ( Response . Status . SERVICE_UNAVAILABLE ) . entity ( ""Scheduler service is unavailable, please wait..."" ) . build ( ) ; try { Opt < MediaPackage > upcoming = service . getUpcomingRecording ( agentId ) ; if ( upcoming . isNone ( ) ) { logger . info ( ""No recording to stop found for agent '{}'!"" , agentId ) ; throw new NotFoundException ( ""No recording to stop found for agent: "" + agentId ) ; } else { DublinCoreCatalog catalog = DublinCoreUtil . loadEpisodeDublinCore ( workspace , upcoming . get ( ) ) . get ( ) ; return Response . ok ( catalog . toJson ( ) ) . build ( ) ; } } catch ( NotFoundException e ) { throw e ; } catch ( Exception e ) { throw new WebApplicationException ( Response . Status . INTERNAL_SERVER_ERROR ) ; } } +","@ GET @ Path ( ""capture/{agent}/upcoming"" ) @ Produces ( MediaType . APPLICATION_JSON ) @ RestQuery ( name = ""upcomingcapture"" , description = ""Get the upcoming capture event catalog as JSON"" , returnDescription = ""The upcoming capture event catalog as JSON"" , pathParameters = { @ RestParameter ( name = ""agent"" , isRequired = true , type = Type . STRING , description = ""The agent identifier"" ) } , responses = { @ RestResponse ( responseCode = HttpServletResponse . SC_OK , description = ""DublinCore of the upcomfing capture event is in the body of response"" ) , @ RestResponse ( responseCode = HttpServletResponse . SC_NOT_FOUND , description = ""There is no upcoming recording"" ) , @ RestResponse ( responseCode = HttpServletResponse . SC_SERVICE_UNAVAILABLE , description = ""The agent is not ready to communicate"" ) } ) public Response upcomingCapture ( @ PathParam ( ""agent"" ) String agentId ) throws NotFoundException { if ( service == null || agentService == null ) return Response . serverError ( ) . status ( Response . Status . SERVICE_UNAVAILABLE ) . entity ( ""Scheduler service is unavailable, please wait..."" ) . build ( ) ; try { Opt < MediaPackage > upcoming = service . getUpcomingRecording ( agentId ) ; if ( upcoming . isNone ( ) ) { logger . info ( ""No recording to stop found for agent '{}'!"" , agentId ) ; throw new NotFoundException ( ""No recording to stop found for agent: "" + agentId ) ; } else { DublinCoreCatalog catalog = DublinCoreUtil . loadEpisodeDublinCore ( workspace , upcoming . get ( ) ) . get ( ) ; return Response . ok ( catalog . toJson ( ) ) . build ( ) ; } } catch ( NotFoundException e ) { throw e ; } catch ( Exception e ) { logger . error ( ""Unable to get the immediate recording for agent '{}': {}"" , agentId , e ) ; throw new WebApplicationException ( Response . Status . INTERNAL_SERVER_ERROR ) ; } } +" +933,"public void initialize ( ) throws InitializationException { CacheConfiguration cacheConfiguration = new CacheConfiguration ( ""localization."" + getId ( ) ) ; try { this . documentBundlesCache = this . cacheManager . createNewCache ( cacheConfiguration ) ; } catch ( CacheException e ) { } } +","public void initialize ( ) throws InitializationException { CacheConfiguration cacheConfiguration = new CacheConfiguration ( ""localization."" + getId ( ) ) ; try { this . documentBundlesCache = this . cacheManager . createNewCache ( cacheConfiguration ) ; } catch ( CacheException e ) { this . logger . error ( ""Failed to create cache [{}]"" , cacheConfiguration . getConfigurationId ( ) , e ) ; } } +" +934,"public static void runZKFromConfig ( ServerConfig config , ServerCnxnFactory cnxnFactory ) throws IOException { FileTxnSnapLog txnLog = null ; try { ZooKeeperServer zkServer = new ZooKeeperServer ( ) ; txnLog = new FileTxnSnapLog ( new File ( config . getDataDir ( ) ) , new File ( config . getDataDir ( ) ) ) ; zkServer . setTxnLogFactory ( txnLog ) ; zkServer . setTickTime ( config . getTickTime ( ) ) ; zkServer . setMinSessionTimeout ( config . getMinSessionTimeout ( ) ) ; zkServer . setMaxSessionTimeout ( config . getMaxSessionTimeout ( ) ) ; cnxnFactory = ServerCnxnFactory . createFactory ( ) ; cnxnFactory . configure ( config . getClientPortAddress ( ) , config . getMaxClientCnxns ( ) ) ; cnxnFactory . startup ( zkServer ) ; cnxnFactory . join ( ) ; if ( zkServer . isRunning ( ) ) { zkServer . shutdown ( ) ; } } catch ( InterruptedException e ) { AiravataZKUtils . logger . warn ( ""Server interrupted"" , e ) ; System . exit ( 1 ) ; } finally { if ( txnLog != null ) { txnLog . close ( ) ; } } } +","public static void runZKFromConfig ( ServerConfig config , ServerCnxnFactory cnxnFactory ) throws IOException { AiravataZKUtils . logger . info ( ""Starting Zookeeper server..."" ) ; FileTxnSnapLog txnLog = null ; try { ZooKeeperServer zkServer = new ZooKeeperServer ( ) ; txnLog = new FileTxnSnapLog ( new File ( config . getDataDir ( ) ) , new File ( config . getDataDir ( ) ) ) ; zkServer . setTxnLogFactory ( txnLog ) ; zkServer . setTickTime ( config . getTickTime ( ) ) ; zkServer . setMinSessionTimeout ( config . getMinSessionTimeout ( ) ) ; zkServer . setMaxSessionTimeout ( config . getMaxSessionTimeout ( ) ) ; cnxnFactory = ServerCnxnFactory . createFactory ( ) ; cnxnFactory . configure ( config . getClientPortAddress ( ) , config . getMaxClientCnxns ( ) ) ; cnxnFactory . startup ( zkServer ) ; cnxnFactory . join ( ) ; if ( zkServer . isRunning ( ) ) { zkServer . shutdown ( ) ; } } catch ( InterruptedException e ) { AiravataZKUtils . logger . warn ( ""Server interrupted"" , e ) ; System . exit ( 1 ) ; } finally { if ( txnLog != null ) { txnLog . close ( ) ; } } } +" +935,"public static void runZKFromConfig ( ServerConfig config , ServerCnxnFactory cnxnFactory ) throws IOException { AiravataZKUtils . logger . info ( ""Starting Zookeeper server..."" ) ; FileTxnSnapLog txnLog = null ; try { ZooKeeperServer zkServer = new ZooKeeperServer ( ) ; txnLog = new FileTxnSnapLog ( new File ( config . getDataDir ( ) ) , new File ( config . getDataDir ( ) ) ) ; zkServer . setTxnLogFactory ( txnLog ) ; zkServer . setTickTime ( config . getTickTime ( ) ) ; zkServer . setMinSessionTimeout ( config . getMinSessionTimeout ( ) ) ; zkServer . setMaxSessionTimeout ( config . getMaxSessionTimeout ( ) ) ; cnxnFactory = ServerCnxnFactory . createFactory ( ) ; cnxnFactory . configure ( config . getClientPortAddress ( ) , config . getMaxClientCnxns ( ) ) ; cnxnFactory . startup ( zkServer ) ; cnxnFactory . join ( ) ; if ( zkServer . isRunning ( ) ) { zkServer . shutdown ( ) ; } } catch ( InterruptedException e ) { System . exit ( 1 ) ; } finally { if ( txnLog != null ) { txnLog . close ( ) ; } } } +","public static void runZKFromConfig ( ServerConfig config , ServerCnxnFactory cnxnFactory ) throws IOException { AiravataZKUtils . logger . info ( ""Starting Zookeeper server..."" ) ; FileTxnSnapLog txnLog = null ; try { ZooKeeperServer zkServer = new ZooKeeperServer ( ) ; txnLog = new FileTxnSnapLog ( new File ( config . getDataDir ( ) ) , new File ( config . getDataDir ( ) ) ) ; zkServer . setTxnLogFactory ( txnLog ) ; zkServer . setTickTime ( config . getTickTime ( ) ) ; zkServer . setMinSessionTimeout ( config . getMinSessionTimeout ( ) ) ; zkServer . setMaxSessionTimeout ( config . getMaxSessionTimeout ( ) ) ; cnxnFactory = ServerCnxnFactory . createFactory ( ) ; cnxnFactory . configure ( config . getClientPortAddress ( ) , config . getMaxClientCnxns ( ) ) ; cnxnFactory . startup ( zkServer ) ; cnxnFactory . join ( ) ; if ( zkServer . isRunning ( ) ) { zkServer . shutdown ( ) ; } } catch ( InterruptedException e ) { AiravataZKUtils . logger . warn ( ""Server interrupted"" , e ) ; System . exit ( 1 ) ; } finally { if ( txnLog != null ) { txnLog . close ( ) ; } } } +" +936,"protected void internalCompleteAnnotation ( AjaxRequestTarget aTarget , CAS aCas ) throws IOException , AnnotationException { AnnotatorState state = getModelObject ( ) ; int sentenceNumber = getSentenceNumber ( aCas , state . getSelection ( ) . getBegin ( ) ) ; state . setFocusUnitIndex ( sentenceNumber ) ; editorPage . writeEditorCas ( aCas ) ; LOG . trace ( ""actionAnnotate() remembering feature editor values"" ) ; state . rememberFeatures ( ) ; loadFeatureEditorModels ( aTarget ) ; autoScroll ( aCas ) ; getForwardAnnotationTextField ( ) . setModelObject ( null ) ; if ( state . getSelection ( ) . getAnnotation ( ) . isNotSet ( ) ) { if ( layerSelectionPanel . getSelectableLayers ( ) . isEmpty ( ) ) { state . setSelectedAnnotationLayer ( new AnnotationLayer ( ) ) ; } else if ( state . getSelectedAnnotationLayer ( ) == null ) { if ( state . getRememberedSpanLayer ( ) == null ) { state . setSelectedAnnotationLayer ( layerSelectionPanel . getSelectableLayers ( ) . get ( 0 ) ) ; } else { state . setSelectedAnnotationLayer ( state . getRememberedSpanLayer ( ) ) ; } } } } +","protected void internalCompleteAnnotation ( AjaxRequestTarget aTarget , CAS aCas ) throws IOException , AnnotationException { AnnotatorState state = getModelObject ( ) ; LOG . trace ( ""actionAnnotate() updating progress information"" ) ; int sentenceNumber = getSentenceNumber ( aCas , state . getSelection ( ) . getBegin ( ) ) ; state . setFocusUnitIndex ( sentenceNumber ) ; editorPage . writeEditorCas ( aCas ) ; LOG . trace ( ""actionAnnotate() remembering feature editor values"" ) ; state . rememberFeatures ( ) ; loadFeatureEditorModels ( aTarget ) ; autoScroll ( aCas ) ; getForwardAnnotationTextField ( ) . setModelObject ( null ) ; if ( state . getSelection ( ) . getAnnotation ( ) . isNotSet ( ) ) { if ( layerSelectionPanel . getSelectableLayers ( ) . isEmpty ( ) ) { state . setSelectedAnnotationLayer ( new AnnotationLayer ( ) ) ; } else if ( state . getSelectedAnnotationLayer ( ) == null ) { if ( state . getRememberedSpanLayer ( ) == null ) { state . setSelectedAnnotationLayer ( layerSelectionPanel . getSelectableLayers ( ) . get ( 0 ) ) ; } else { state . setSelectedAnnotationLayer ( state . getRememberedSpanLayer ( ) ) ; } } } } +" +937,"protected void internalCompleteAnnotation ( AjaxRequestTarget aTarget , CAS aCas ) throws IOException , AnnotationException { AnnotatorState state = getModelObject ( ) ; LOG . trace ( ""actionAnnotate() updating progress information"" ) ; int sentenceNumber = getSentenceNumber ( aCas , state . getSelection ( ) . getBegin ( ) ) ; state . setFocusUnitIndex ( sentenceNumber ) ; editorPage . writeEditorCas ( aCas ) ; state . rememberFeatures ( ) ; loadFeatureEditorModels ( aTarget ) ; autoScroll ( aCas ) ; getForwardAnnotationTextField ( ) . setModelObject ( null ) ; if ( state . getSelection ( ) . getAnnotation ( ) . isNotSet ( ) ) { if ( layerSelectionPanel . getSelectableLayers ( ) . isEmpty ( ) ) { state . setSelectedAnnotationLayer ( new AnnotationLayer ( ) ) ; } else if ( state . getSelectedAnnotationLayer ( ) == null ) { if ( state . getRememberedSpanLayer ( ) == null ) { state . setSelectedAnnotationLayer ( layerSelectionPanel . getSelectableLayers ( ) . get ( 0 ) ) ; } else { state . setSelectedAnnotationLayer ( state . getRememberedSpanLayer ( ) ) ; } } } } +","protected void internalCompleteAnnotation ( AjaxRequestTarget aTarget , CAS aCas ) throws IOException , AnnotationException { AnnotatorState state = getModelObject ( ) ; LOG . trace ( ""actionAnnotate() updating progress information"" ) ; int sentenceNumber = getSentenceNumber ( aCas , state . getSelection ( ) . getBegin ( ) ) ; state . setFocusUnitIndex ( sentenceNumber ) ; editorPage . writeEditorCas ( aCas ) ; LOG . trace ( ""actionAnnotate() remembering feature editor values"" ) ; state . rememberFeatures ( ) ; loadFeatureEditorModels ( aTarget ) ; autoScroll ( aCas ) ; getForwardAnnotationTextField ( ) . setModelObject ( null ) ; if ( state . getSelection ( ) . getAnnotation ( ) . isNotSet ( ) ) { if ( layerSelectionPanel . getSelectableLayers ( ) . isEmpty ( ) ) { state . setSelectedAnnotationLayer ( new AnnotationLayer ( ) ) ; } else if ( state . getSelectedAnnotationLayer ( ) == null ) { if ( state . getRememberedSpanLayer ( ) == null ) { state . setSelectedAnnotationLayer ( layerSelectionPanel . getSelectableLayers ( ) . get ( 0 ) ) ; } else { state . setSelectedAnnotationLayer ( state . getRememberedSpanLayer ( ) ) ; } } } } +" +938,"private void onNextAttemptCompleted ( Boolean success , Throwable error ) { assert executor . inEventLoop ( ) ; if ( success ) { reallyStop ( ) ; } else { if ( error != null && ! ( error instanceof CancellationException ) ) { Loggers . warnWithException ( LOG , ""[{}] Uncaught error while starting reconnection attempt"" , logPrefix , error ) ; } if ( state == State . STOP_AFTER_CURRENT ) { reallyStop ( ) ; } else { assert state == State . ATTEMPT_IN_PROGRESS ; scheduleNextAttempt ( ) ; } } } +","private void onNextAttemptCompleted ( Boolean success , Throwable error ) { assert executor . inEventLoop ( ) ; if ( success ) { LOG . debug ( ""[{}] Reconnection successful"" , logPrefix ) ; reallyStop ( ) ; } else { if ( error != null && ! ( error instanceof CancellationException ) ) { Loggers . warnWithException ( LOG , ""[{}] Uncaught error while starting reconnection attempt"" , logPrefix , error ) ; } if ( state == State . STOP_AFTER_CURRENT ) { reallyStop ( ) ; } else { assert state == State . ATTEMPT_IN_PROGRESS ; scheduleNextAttempt ( ) ; } } } +" +939,"protected void exportPreview ( PortletDataContext portletDataContext , FileEntry fileEntry , Element fileEntryElement , String binPathSuffix , String previewType , int fileIndex ) throws Exception { if ( portletDataContext . isPerformDirectBinaryImport ( ) ) { return ; } FileVersion fileVersion = fileEntry . getFileVersion ( ) ; if ( ! hasPreview ( fileVersion , previewType ) ) { if ( _log . isWarnEnabled ( ) ) { } return ; } String binPathSegment = null ; if ( fileIndex < 0 ) { binPathSegment = previewType ; } else { binPathSegment = String . valueOf ( fileIndex + 1 ) ; } String binPath = getBinPath ( portletDataContext , fileEntry , binPathSegment ) ; StringBundler sb = new StringBundler ( 4 ) ; sb . append ( ""bin-path-preview-"" ) ; sb . append ( binPathSegment ) ; sb . append ( ""-"" ) ; sb . append ( binPathSuffix ) ; String binPathName = sb . toString ( ) ; fileEntryElement . addAttribute ( binPathName , binPath ) ; if ( fileIndex < 0 ) { fileIndex = 0 ; } else { fileIndex ++ ; } try ( InputStream inputStream = doGetPreviewAsStream ( fileVersion , fileIndex , previewType ) ) { exportBinary ( portletDataContext , fileEntryElement , fileVersion , inputStream , binPath , binPathName ) ; } } +","protected void exportPreview ( PortletDataContext portletDataContext , FileEntry fileEntry , Element fileEntryElement , String binPathSuffix , String previewType , int fileIndex ) throws Exception { if ( portletDataContext . isPerformDirectBinaryImport ( ) ) { return ; } FileVersion fileVersion = fileEntry . getFileVersion ( ) ; if ( ! hasPreview ( fileVersion , previewType ) ) { if ( _log . isWarnEnabled ( ) ) { _log . warn ( ""No preview found for file entry "" + fileEntry . getFileEntryId ( ) ) ; } return ; } String binPathSegment = null ; if ( fileIndex < 0 ) { binPathSegment = previewType ; } else { binPathSegment = String . valueOf ( fileIndex + 1 ) ; } String binPath = getBinPath ( portletDataContext , fileEntry , binPathSegment ) ; StringBundler sb = new StringBundler ( 4 ) ; sb . append ( ""bin-path-preview-"" ) ; sb . append ( binPathSegment ) ; sb . append ( ""-"" ) ; sb . append ( binPathSuffix ) ; String binPathName = sb . toString ( ) ; fileEntryElement . addAttribute ( binPathName , binPath ) ; if ( fileIndex < 0 ) { fileIndex = 0 ; } else { fileIndex ++ ; } try ( InputStream inputStream = doGetPreviewAsStream ( fileVersion , fileIndex , previewType ) ) { exportBinary ( portletDataContext , fileEntryElement , fileVersion , inputStream , binPath , binPathName ) ; } } +" +940,"@ Test public void multipleNamesFirstProfileActive ( ) throws Exception { this . environment . setActiveProfiles ( ""production"" ) ; initialize ( ""multi-profile-names.xml"" ) ; this . out . expect ( containsString ( ""Hello"" ) ) ; } +","@ Test public void multipleNamesFirstProfileActive ( ) throws Exception { this . environment . setActiveProfiles ( ""production"" ) ; initialize ( ""multi-profile-names.xml"" ) ; this . logger . trace ( ""Hello"" ) ; this . out . expect ( containsString ( ""Hello"" ) ) ; } +" +941,"public void setupProperties ( ) { oauthClientId . setValue ( """" ) ; oauthClientSecret . setValue ( """" ) ; if ( _logger . isTraceEnabled ( ) ) { } } +","public void setupProperties ( ) { oauthClientId . setValue ( """" ) ; oauthClientSecret . setValue ( """" ) ; if ( _logger . isTraceEnabled ( ) ) { _logger . trace ( ""Properties set "" + System . identityHashCode ( this ) ) ; } } +" +942,"@ Transition ( to = ""INIT"" , from = ""RUNNING"" ) public void onBecomeInitFromRunning ( Message msg , NotificationContext context ) { String taskPartition = msg . getPartitionName ( ) ; if ( _taskRunner == null ) { throw new IllegalStateException ( String . format ( ""Invalid state transition. There is no running task for partition %s."" , taskPartition ) ) ; } _taskRunner . cancel ( ) ; TaskResult r = _taskRunner . waitTillDone ( ) ; _taskRunner = null ; } +","@ Transition ( to = ""INIT"" , from = ""RUNNING"" ) public void onBecomeInitFromRunning ( Message msg , NotificationContext context ) { String taskPartition = msg . getPartitionName ( ) ; if ( _taskRunner == null ) { throw new IllegalStateException ( String . format ( ""Invalid state transition. There is no running task for partition %s."" , taskPartition ) ) ; } _taskRunner . cancel ( ) ; TaskResult r = _taskRunner . waitTillDone ( ) ; LOG . info ( String . format ( ""Task partition %s returned result %s."" , msg . getPartitionName ( ) , r ) ) ; _taskRunner = null ; } +" +943,"public HttpResponse < String > getOperationalEnvById ( String id ) { Properties headers = createHeaders ( ) ; String url = String . format ( ""%s%s%s/%s"" , aaiConfig . getHttpRequestConfig ( ) . getServerRootUrl ( ) , aaiConfig . getHttpRequestConfig ( ) . getResourceNamespaces ( ) . get ( OPERATIONAL_ENV_RESOURCE_CONFIG_PARAM ) , OPERATIONAL_ENV_RESOURCE , id ) ; SupplierThrows < HttpResponse < String > , Exception > httpGet = ( ) -> HttpRequest . get ( url , headers , aaiConfig . getHttpClientConfig ( ) ) ; long maxRetries = aaiConfig . getHttpClientConfig ( ) . getNumOfRetries ( ) ; try { return FunctionalInterfaces . retryMethodOnException ( httpGet , this :: retryOnException , maxRetries ) ; } catch ( Exception e ) { return Responses . INTERNAL_SERVER_ERROR ; } } +","public HttpResponse < String > getOperationalEnvById ( String id ) { Properties headers = createHeaders ( ) ; String url = String . format ( ""%s%s%s/%s"" , aaiConfig . getHttpRequestConfig ( ) . getServerRootUrl ( ) , aaiConfig . getHttpRequestConfig ( ) . getResourceNamespaces ( ) . get ( OPERATIONAL_ENV_RESOURCE_CONFIG_PARAM ) , OPERATIONAL_ENV_RESOURCE , id ) ; SupplierThrows < HttpResponse < String > , Exception > httpGet = ( ) -> HttpRequest . get ( url , headers , aaiConfig . getHttpClientConfig ( ) ) ; long maxRetries = aaiConfig . getHttpClientConfig ( ) . getNumOfRetries ( ) ; try { return FunctionalInterfaces . retryMethodOnException ( httpGet , this :: retryOnException , maxRetries ) ; } catch ( Exception e ) { logger . debug ( ""Request failed with exception {}"" , getCause ( e ) . getMessage ( ) ) ; return Responses . INTERNAL_SERVER_ERROR ; } } +" +944,"public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +","public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +" +945,"public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +","public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +" +946,"public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +","public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +" +947,"public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +","public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +" +948,"public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +","public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +" +949,"public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; } +","public void refresh ( ) { Map < String , UnderDatabaseFactory > map = new HashMap < > ( ) ; String libDir = PathUtils . concatPath ( ServerConfiguration . global ( ) . get ( PropertyKey . HOME ) , ""lib"" ) ; LOG . info ( ""Loading udb jars from {}"" , libDir ) ; List < File > files = new ArrayList < > ( ) ; try ( DirectoryStream < Path > stream = Files . newDirectoryStream ( Paths . get ( libDir ) , UDB_EXTENSION_PATTERN ) ) { for ( Path entry : stream ) { if ( entry . toFile ( ) . isFile ( ) ) { files . add ( entry . toFile ( ) ) ; } } } catch ( IOException e ) { LOG . warn ( ""Failed to load udb libs from {}. error: {}"" , libDir , e . toString ( ) ) ; } for ( File jar : files ) { try { URL extensionURL = jar . toURI ( ) . toURL ( ) ; ClassLoader extensionsClassLoader = new ExtensionsClassLoader ( new URL [ ] { extensionURL } , ClassLoader . getSystemClassLoader ( ) ) ; for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , extensionsClassLoader ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } } catch ( Throwable t ) { LOG . warn ( ""Failed to load udb jar {}"" , jar , t ) ; } } for ( UnderDatabaseFactory factory : ServiceLoader . load ( UnderDatabaseFactory . class , UnderDatabaseRegistry . class . getClassLoader ( ) ) ) { UnderDatabaseFactory existingFactory = map . get ( factory . getType ( ) ) ; if ( existingFactory != null ) { LOG . warn ( ""Ignoring duplicate under database type '{}' found in {}. Existing factory: {}"" , factory . getType ( ) , factory . getClass ( ) , existingFactory . getClass ( ) ) ; } map . put ( factory . getType ( ) , factory ) ; } mFactories = map ; LOG . info ( ""Registered UDBs: "" + String . join ( "","" , mFactories . keySet ( ) ) ) ; } +" +950,"private long getRowCountFromTable ( String tableName , Text signalColumn , Authorizations authorizations ) { try { Scanner scanner = createScanner ( tableName , null , authorizations ) ; try { scanner . fetchColumnFamily ( signalColumn ) ; IteratorSetting countingIterator = new IteratorSetting ( 100 , CountingIterator . class . getSimpleName ( ) , CountingIterator . class ) ; scanner . addScanIterator ( countingIterator ) ; GRAPH_LOGGER . logStartIterator ( tableName , scanner ) ; long count = 0 ; for ( Map . Entry < Key , Value > entry : scanner ) { Long countForKey = LongCombiner . FIXED_LEN_ENCODER . decode ( entry . getValue ( ) . get ( ) ) ; LOGGER . debug ( ""getRowCountFromTable(%s): %s: %d"" , tableName , entry . getKey ( ) . getRow ( ) , countForKey ) ; count += countForKey ; } LOGGER . debug ( ""getRowCountFromTable(%s): TOTAL: %d"" , tableName , count ) ; return count ; } finally { scanner . close ( ) ; } } catch ( TableNotFoundException ex ) { throw new VertexiumException ( ""Could not get count from table: "" + tableName , ex ) ; } } +","private long getRowCountFromTable ( String tableName , Text signalColumn , Authorizations authorizations ) { try { LOGGER . debug ( ""BEGIN getRowCountFromTable(%s)"" , tableName ) ; Scanner scanner = createScanner ( tableName , null , authorizations ) ; try { scanner . fetchColumnFamily ( signalColumn ) ; IteratorSetting countingIterator = new IteratorSetting ( 100 , CountingIterator . class . getSimpleName ( ) , CountingIterator . class ) ; scanner . addScanIterator ( countingIterator ) ; GRAPH_LOGGER . logStartIterator ( tableName , scanner ) ; long count = 0 ; for ( Map . Entry < Key , Value > entry : scanner ) { Long countForKey = LongCombiner . FIXED_LEN_ENCODER . decode ( entry . getValue ( ) . get ( ) ) ; LOGGER . debug ( ""getRowCountFromTable(%s): %s: %d"" , tableName , entry . getKey ( ) . getRow ( ) , countForKey ) ; count += countForKey ; } LOGGER . debug ( ""getRowCountFromTable(%s): TOTAL: %d"" , tableName , count ) ; return count ; } finally { scanner . close ( ) ; } } catch ( TableNotFoundException ex ) { throw new VertexiumException ( ""Could not get count from table: "" + tableName , ex ) ; } } +" +951,"private long getRowCountFromTable ( String tableName , Text signalColumn , Authorizations authorizations ) { try { LOGGER . debug ( ""BEGIN getRowCountFromTable(%s)"" , tableName ) ; Scanner scanner = createScanner ( tableName , null , authorizations ) ; try { scanner . fetchColumnFamily ( signalColumn ) ; IteratorSetting countingIterator = new IteratorSetting ( 100 , CountingIterator . class . getSimpleName ( ) , CountingIterator . class ) ; scanner . addScanIterator ( countingIterator ) ; GRAPH_LOGGER . logStartIterator ( tableName , scanner ) ; long count = 0 ; for ( Map . Entry < Key , Value > entry : scanner ) { Long countForKey = LongCombiner . FIXED_LEN_ENCODER . decode ( entry . getValue ( ) . get ( ) ) ; count += countForKey ; } LOGGER . debug ( ""getRowCountFromTable(%s): TOTAL: %d"" , tableName , count ) ; return count ; } finally { scanner . close ( ) ; } } catch ( TableNotFoundException ex ) { throw new VertexiumException ( ""Could not get count from table: "" + tableName , ex ) ; } } +","private long getRowCountFromTable ( String tableName , Text signalColumn , Authorizations authorizations ) { try { LOGGER . debug ( ""BEGIN getRowCountFromTable(%s)"" , tableName ) ; Scanner scanner = createScanner ( tableName , null , authorizations ) ; try { scanner . fetchColumnFamily ( signalColumn ) ; IteratorSetting countingIterator = new IteratorSetting ( 100 , CountingIterator . class . getSimpleName ( ) , CountingIterator . class ) ; scanner . addScanIterator ( countingIterator ) ; GRAPH_LOGGER . logStartIterator ( tableName , scanner ) ; long count = 0 ; for ( Map . Entry < Key , Value > entry : scanner ) { Long countForKey = LongCombiner . FIXED_LEN_ENCODER . decode ( entry . getValue ( ) . get ( ) ) ; LOGGER . debug ( ""getRowCountFromTable(%s): %s: %d"" , tableName , entry . getKey ( ) . getRow ( ) , countForKey ) ; count += countForKey ; } LOGGER . debug ( ""getRowCountFromTable(%s): TOTAL: %d"" , tableName , count ) ; return count ; } finally { scanner . close ( ) ; } } catch ( TableNotFoundException ex ) { throw new VertexiumException ( ""Could not get count from table: "" + tableName , ex ) ; } } +" +952,"private long getRowCountFromTable ( String tableName , Text signalColumn , Authorizations authorizations ) { try { LOGGER . debug ( ""BEGIN getRowCountFromTable(%s)"" , tableName ) ; Scanner scanner = createScanner ( tableName , null , authorizations ) ; try { scanner . fetchColumnFamily ( signalColumn ) ; IteratorSetting countingIterator = new IteratorSetting ( 100 , CountingIterator . class . getSimpleName ( ) , CountingIterator . class ) ; scanner . addScanIterator ( countingIterator ) ; GRAPH_LOGGER . logStartIterator ( tableName , scanner ) ; long count = 0 ; for ( Map . Entry < Key , Value > entry : scanner ) { Long countForKey = LongCombiner . FIXED_LEN_ENCODER . decode ( entry . getValue ( ) . get ( ) ) ; LOGGER . debug ( ""getRowCountFromTable(%s): %s: %d"" , tableName , entry . getKey ( ) . getRow ( ) , countForKey ) ; count += countForKey ; } return count ; } finally { scanner . close ( ) ; } } catch ( TableNotFoundException ex ) { throw new VertexiumException ( ""Could not get count from table: "" + tableName , ex ) ; } } +","private long getRowCountFromTable ( String tableName , Text signalColumn , Authorizations authorizations ) { try { LOGGER . debug ( ""BEGIN getRowCountFromTable(%s)"" , tableName ) ; Scanner scanner = createScanner ( tableName , null , authorizations ) ; try { scanner . fetchColumnFamily ( signalColumn ) ; IteratorSetting countingIterator = new IteratorSetting ( 100 , CountingIterator . class . getSimpleName ( ) , CountingIterator . class ) ; scanner . addScanIterator ( countingIterator ) ; GRAPH_LOGGER . logStartIterator ( tableName , scanner ) ; long count = 0 ; for ( Map . Entry < Key , Value > entry : scanner ) { Long countForKey = LongCombiner . FIXED_LEN_ENCODER . decode ( entry . getValue ( ) . get ( ) ) ; LOGGER . debug ( ""getRowCountFromTable(%s): %s: %d"" , tableName , entry . getKey ( ) . getRow ( ) , countForKey ) ; count += countForKey ; } LOGGER . debug ( ""getRowCountFromTable(%s): TOTAL: %d"" , tableName , count ) ; return count ; } finally { scanner . close ( ) ; } } catch ( TableNotFoundException ex ) { throw new VertexiumException ( ""Could not get count from table: "" + tableName , ex ) ; } } +" +953,"public void startSelfTest ( @ Identification final String deviceIdentification , @ Identification final String organisationIdentification , final String correlationUid , final String messageType , final int messagePriority ) throws FunctionalException { this . findOrganisation ( organisationIdentification ) ; final Device device = this . findActiveDevice ( deviceIdentification ) ; this . osgpCoreRequestMessageSender . send ( new RequestMessage ( correlationUid , organisationIdentification , deviceIdentification , null ) , messageType , messagePriority , device . getIpAddress ( ) ) ; } +","public void startSelfTest ( @ Identification final String deviceIdentification , @ Identification final String organisationIdentification , final String correlationUid , final String messageType , final int messagePriority ) throws FunctionalException { LOGGER . debug ( ""startSelfTest called with organisation {} and device {}"" , organisationIdentification , deviceIdentification ) ; this . findOrganisation ( organisationIdentification ) ; final Device device = this . findActiveDevice ( deviceIdentification ) ; this . osgpCoreRequestMessageSender . send ( new RequestMessage ( correlationUid , organisationIdentification , deviceIdentification , null ) , messageType , messagePriority , device . getIpAddress ( ) ) ; } +" +954,"public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +","public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +" +955,"public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +","public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +" +956,"public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +","public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +" +957,"public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +","public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +" +958,"public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +","public Boolean createVbds ( Xen . Vm vm , VirtualMachineTO spec ) { if ( spec . getDisks ( ) == null ) { LOGGER . info ( ""No disks defined for "" + vm . getVmName ( ) ) ; return false ; } for ( DiskTO disk : spec . getDisks ( ) ) { try { if ( disk . getType ( ) == Volume . Type . ROOT ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addRootDisk ( diskFile ) ; vm . setPrimaryPoolUuid ( vol . getDataStore ( ) . getUuid ( ) ) ; LOGGER . debug ( ""Adding root disk: "" + diskFile ) ; } else if ( disk . getType ( ) == Volume . Type . ISO ) { DataTO isoTO = disk . getData ( ) ; if ( isoTO . getPath ( ) != null ) { TemplateObjectTO template = ( TemplateObjectTO ) isoTO ; DataStoreTO store = template . getDataStore ( ) ; if ( ! ( store instanceof NfsTO ) ) { throw new CloudRuntimeException ( ""unsupported protocol"" ) ; } NfsTO nfsStore = ( NfsTO ) store ; String secPoolUuid = pool . setupSecondaryStorage ( nfsStore . getUrl ( ) ) ; String isoPath = config . getAgentSecStoragePath ( ) + ""/"" + secPoolUuid + ""/"" + template . getPath ( ) ; vm . addIso ( isoPath ) ; LOGGER . debug ( ""Adding ISO: "" + isoPath ) ; } } else if ( disk . getType ( ) == Volume . Type . DATADISK ) { VolumeObjectTO vol = ( VolumeObjectTO ) disk . getData ( ) ; String diskFile = processor . getVirtualDiskPath ( vol . getUuid ( ) , vol . getDataStore ( ) . getUuid ( ) ) ; vm . addDataDisk ( diskFile ) ; LOGGER . debug ( ""Adding data disk: "" + diskFile ) ; } else { throw new CloudRuntimeException ( ""Unknown disk type: "" + disk . getType ( ) ) ; } } catch ( Exception e ) { LOGGER . debug ( ""CreateVbds failed"" , e ) ; throw new CloudRuntimeException ( ""Exception"" + e . getMessage ( ) , e ) ; } } return true ; } +" +959,"public Authentication authenticate ( Authentication authentication ) throws AuthenticationException { Authentication authed = null ; Cache userCache = cacheManager . getCache ( ""UserCache"" ) ; md . reset ( ) ; byte [ ] hashKey = md . digest ( ( authentication . getName ( ) + authentication . getCredentials ( ) ) . getBytes ( ) ) ; String userKey = Arrays . toString ( hashKey ) ; Element authedUser = userCache . get ( userKey ) ; if ( null != authedUser ) { authed = ( Authentication ) authedUser . getObjectValue ( ) ; SecurityContextHolder . getContext ( ) . setAuthentication ( authed ) ; } else { try { authed = super . authenticate ( authentication ) ; userCache . put ( new Element ( userKey , authed ) ) ; } catch ( AuthenticationException e ) { throw e ; } UserDetails user = new User ( authentication . getName ( ) , ""skippped-ldap"" , authed . getAuthorities ( ) ) ; if ( ! userService . userExists ( authentication . getName ( ) ) ) { userService . createUser ( user ) ; } else { userService . updateUser ( user ) ; } } return authed ; } +","public Authentication authenticate ( Authentication authentication ) throws AuthenticationException { Authentication authed = null ; Cache userCache = cacheManager . getCache ( ""UserCache"" ) ; md . reset ( ) ; byte [ ] hashKey = md . digest ( ( authentication . getName ( ) + authentication . getCredentials ( ) ) . getBytes ( ) ) ; String userKey = Arrays . toString ( hashKey ) ; Element authedUser = userCache . get ( userKey ) ; if ( null != authedUser ) { authed = ( Authentication ) authedUser . getObjectValue ( ) ; SecurityContextHolder . getContext ( ) . setAuthentication ( authed ) ; } else { try { authed = super . authenticate ( authentication ) ; userCache . put ( new Element ( userKey , authed ) ) ; } catch ( AuthenticationException e ) { logger . error ( ""Failed to auth user: "" + authentication . getName ( ) , e ) ; throw e ; } UserDetails user = new User ( authentication . getName ( ) , ""skippped-ldap"" , authed . getAuthorities ( ) ) ; if ( ! userService . userExists ( authentication . getName ( ) ) ) { userService . createUser ( user ) ; } else { userService . updateUser ( user ) ; } } return authed ; } +" +960,"public void upgrade ( ) { final FindIterable < Document > documentsWithMissingFields = collection . find ( or ( not ( exists ( ContentPack . FIELD_META_ID ) ) , not ( exists ( ContentPack . FIELD_META_REVISION ) ) ) ) ; for ( Document document : documentsWithMissingFields ) { final ObjectId objectId = document . getObjectId ( ""_id"" ) ; final String id = document . get ( ""id"" , objectId . toHexString ( ) ) ; final int rev = document . get ( ""rev"" , 0 ) ; document . put ( ""id"" , id ) ; document . put ( ""rev"" , rev ) ; final UpdateResult updateResult = collection . replaceOne ( eq ( ""_id"" , objectId ) , document ) ; if ( updateResult . wasAcknowledged ( ) ) { LOG . debug ( ""Successfully updated document with ID <{}>"" , objectId ) ; } else { LOG . error ( ""Failed to update document with ID <{}>"" , objectId ) ; } } } +","public void upgrade ( ) { final FindIterable < Document > documentsWithMissingFields = collection . find ( or ( not ( exists ( ContentPack . FIELD_META_ID ) ) , not ( exists ( ContentPack . FIELD_META_REVISION ) ) ) ) ; for ( Document document : documentsWithMissingFields ) { final ObjectId objectId = document . getObjectId ( ""_id"" ) ; LOG . debug ( ""Found document with missing \""id\"" or \""rev\"" field with ID <{}>"" , objectId ) ; final String id = document . get ( ""id"" , objectId . toHexString ( ) ) ; final int rev = document . get ( ""rev"" , 0 ) ; document . put ( ""id"" , id ) ; document . put ( ""rev"" , rev ) ; final UpdateResult updateResult = collection . replaceOne ( eq ( ""_id"" , objectId ) , document ) ; if ( updateResult . wasAcknowledged ( ) ) { LOG . debug ( ""Successfully updated document with ID <{}>"" , objectId ) ; } else { LOG . error ( ""Failed to update document with ID <{}>"" , objectId ) ; } } } +" +961,"public void upgrade ( ) { final FindIterable < Document > documentsWithMissingFields = collection . find ( or ( not ( exists ( ContentPack . FIELD_META_ID ) ) , not ( exists ( ContentPack . FIELD_META_REVISION ) ) ) ) ; for ( Document document : documentsWithMissingFields ) { final ObjectId objectId = document . getObjectId ( ""_id"" ) ; LOG . debug ( ""Found document with missing \""id\"" or \""rev\"" field with ID <{}>"" , objectId ) ; final String id = document . get ( ""id"" , objectId . toHexString ( ) ) ; final int rev = document . get ( ""rev"" , 0 ) ; document . put ( ""id"" , id ) ; document . put ( ""rev"" , rev ) ; final UpdateResult updateResult = collection . replaceOne ( eq ( ""_id"" , objectId ) , document ) ; if ( updateResult . wasAcknowledged ( ) ) { } else { LOG . error ( ""Failed to update document with ID <{}>"" , objectId ) ; } } } +","public void upgrade ( ) { final FindIterable < Document > documentsWithMissingFields = collection . find ( or ( not ( exists ( ContentPack . FIELD_META_ID ) ) , not ( exists ( ContentPack . FIELD_META_REVISION ) ) ) ) ; for ( Document document : documentsWithMissingFields ) { final ObjectId objectId = document . getObjectId ( ""_id"" ) ; LOG . debug ( ""Found document with missing \""id\"" or \""rev\"" field with ID <{}>"" , objectId ) ; final String id = document . get ( ""id"" , objectId . toHexString ( ) ) ; final int rev = document . get ( ""rev"" , 0 ) ; document . put ( ""id"" , id ) ; document . put ( ""rev"" , rev ) ; final UpdateResult updateResult = collection . replaceOne ( eq ( ""_id"" , objectId ) , document ) ; if ( updateResult . wasAcknowledged ( ) ) { LOG . debug ( ""Successfully updated document with ID <{}>"" , objectId ) ; } else { LOG . error ( ""Failed to update document with ID <{}>"" , objectId ) ; } } } +" +962,"public void upgrade ( ) { final FindIterable < Document > documentsWithMissingFields = collection . find ( or ( not ( exists ( ContentPack . FIELD_META_ID ) ) , not ( exists ( ContentPack . FIELD_META_REVISION ) ) ) ) ; for ( Document document : documentsWithMissingFields ) { final ObjectId objectId = document . getObjectId ( ""_id"" ) ; LOG . debug ( ""Found document with missing \""id\"" or \""rev\"" field with ID <{}>"" , objectId ) ; final String id = document . get ( ""id"" , objectId . toHexString ( ) ) ; final int rev = document . get ( ""rev"" , 0 ) ; document . put ( ""id"" , id ) ; document . put ( ""rev"" , rev ) ; final UpdateResult updateResult = collection . replaceOne ( eq ( ""_id"" , objectId ) , document ) ; if ( updateResult . wasAcknowledged ( ) ) { LOG . debug ( ""Successfully updated document with ID <{}>"" , objectId ) ; } else { } } } +","public void upgrade ( ) { final FindIterable < Document > documentsWithMissingFields = collection . find ( or ( not ( exists ( ContentPack . FIELD_META_ID ) ) , not ( exists ( ContentPack . FIELD_META_REVISION ) ) ) ) ; for ( Document document : documentsWithMissingFields ) { final ObjectId objectId = document . getObjectId ( ""_id"" ) ; LOG . debug ( ""Found document with missing \""id\"" or \""rev\"" field with ID <{}>"" , objectId ) ; final String id = document . get ( ""id"" , objectId . toHexString ( ) ) ; final int rev = document . get ( ""rev"" , 0 ) ; document . put ( ""id"" , id ) ; document . put ( ""rev"" , rev ) ; final UpdateResult updateResult = collection . replaceOne ( eq ( ""_id"" , objectId ) , document ) ; if ( updateResult . wasAcknowledged ( ) ) { LOG . debug ( ""Successfully updated document with ID <{}>"" , objectId ) ; } else { LOG . error ( ""Failed to update document with ID <{}>"" , objectId ) ; } } } +" +963,"public static NamedRelatedResourceRep toNamedRelatedResource ( NamedURI resource ) { ResourceTypeEnum resourceType = null ; try { resourceType = ResourceTypeMapping . getResourceType ( URIUtil . getModelClass ( resource . getURI ( ) ) ) ; } catch ( Exception e ) { } return new NamedRelatedResourceRep ( resource . getURI ( ) , toLink ( resourceType , resource . getURI ( ) ) , resource . getName ( ) ) ; } +","public static NamedRelatedResourceRep toNamedRelatedResource ( NamedURI resource ) { ResourceTypeEnum resourceType = null ; try { resourceType = ResourceTypeMapping . getResourceType ( URIUtil . getModelClass ( resource . getURI ( ) ) ) ; } catch ( Exception e ) { _log . error ( ""Resource Type not found for "" + resource . getURI ( ) , e ) ; } return new NamedRelatedResourceRep ( resource . getURI ( ) , toLink ( resourceType , resource . getURI ( ) ) , resource . getName ( ) ) ; } +" +964,"private Response putObjectToNetwork ( final String nwcId , final String path , final Object body ) { try { Response resp = sendRequest ( nwcId , Request . Method . PUT , path , LogMessage . getSavedTxid ( ) , body ) ; if ( resp . isError ( ""PUT"" ) ) { log . warn ( ""invalid PUT({}) to {}: '{}' {}"" , resp . statusCode , nwcId , path , resp . getBodyValue ( ) ) ; } return resp ; } catch ( Exception e ) { log . error ( ""Recieved Message Exception."" , e ) ; return new Response ( Response . INTERNAL_SERVER_ERROR , null ) ; } } +","private Response putObjectToNetwork ( final String nwcId , final String path , final Object body ) { log . debug ( "">> [networkId : '{}']"" , this . networkId ) ; try { Response resp = sendRequest ( nwcId , Request . Method . PUT , path , LogMessage . getSavedTxid ( ) , body ) ; if ( resp . isError ( ""PUT"" ) ) { log . warn ( ""invalid PUT({}) to {}: '{}' {}"" , resp . statusCode , nwcId , path , resp . getBodyValue ( ) ) ; } return resp ; } catch ( Exception e ) { log . error ( ""Recieved Message Exception."" , e ) ; return new Response ( Response . INTERNAL_SERVER_ERROR , null ) ; } } +" +965,"private Response putObjectToNetwork ( final String nwcId , final String path , final Object body ) { log . debug ( "">> [networkId : '{}']"" , this . networkId ) ; try { Response resp = sendRequest ( nwcId , Request . Method . PUT , path , LogMessage . getSavedTxid ( ) , body ) ; if ( resp . isError ( ""PUT"" ) ) { } return resp ; } catch ( Exception e ) { log . error ( ""Recieved Message Exception."" , e ) ; return new Response ( Response . INTERNAL_SERVER_ERROR , null ) ; } } +","private Response putObjectToNetwork ( final String nwcId , final String path , final Object body ) { log . debug ( "">> [networkId : '{}']"" , this . networkId ) ; try { Response resp = sendRequest ( nwcId , Request . Method . PUT , path , LogMessage . getSavedTxid ( ) , body ) ; if ( resp . isError ( ""PUT"" ) ) { log . warn ( ""invalid PUT({}) to {}: '{}' {}"" , resp . statusCode , nwcId , path , resp . getBodyValue ( ) ) ; } return resp ; } catch ( Exception e ) { log . error ( ""Recieved Message Exception."" , e ) ; return new Response ( Response . INTERNAL_SERVER_ERROR , null ) ; } } +" +966,"private Response putObjectToNetwork ( final String nwcId , final String path , final Object body ) { log . debug ( "">> [networkId : '{}']"" , this . networkId ) ; try { Response resp = sendRequest ( nwcId , Request . Method . PUT , path , LogMessage . getSavedTxid ( ) , body ) ; if ( resp . isError ( ""PUT"" ) ) { log . warn ( ""invalid PUT({}) to {}: '{}' {}"" , resp . statusCode , nwcId , path , resp . getBodyValue ( ) ) ; } return resp ; } catch ( Exception e ) { return new Response ( Response . INTERNAL_SERVER_ERROR , null ) ; } } +","private Response putObjectToNetwork ( final String nwcId , final String path , final Object body ) { log . debug ( "">> [networkId : '{}']"" , this . networkId ) ; try { Response resp = sendRequest ( nwcId , Request . Method . PUT , path , LogMessage . getSavedTxid ( ) , body ) ; if ( resp . isError ( ""PUT"" ) ) { log . warn ( ""invalid PUT({}) to {}: '{}' {}"" , resp . statusCode , nwcId , path , resp . getBodyValue ( ) ) ; } return resp ; } catch ( Exception e ) { log . error ( ""Recieved Message Exception."" , e ) ; return new Response ( Response . INTERNAL_SERVER_ERROR , null ) ; } } +" +967,"public String getEvaluatorDescriptorString ( ) { final String descriptorString = Utilities . getEvaluatorDescriptorString ( jallocatedEvaluator . getEvaluatorDescriptor ( ) ) ; return descriptorString ; } +","public String getEvaluatorDescriptorString ( ) { final String descriptorString = Utilities . getEvaluatorDescriptorString ( jallocatedEvaluator . getEvaluatorDescriptor ( ) ) ; LOG . log ( Level . INFO , ""allocated evaluator - serialized evaluator descriptor: "" + descriptorString ) ; return descriptorString ; } +" +968,"public PageResponse getPageDetailResponse ( ) { PageResponse pageResponse = null ; try { IPage draftPage = null ; IPage onlinePage = null ; String check = this . checkSelectedNode ( this . getPageCode ( ) ) ; if ( null == check ) { draftPage = this . getPage ( this . getPageCode ( ) ) ; onlinePage = this . getOnlinePage ( this . getPageCode ( ) ) ; } pageResponse = new PageResponse ( this , draftPage , onlinePage ) ; } catch ( Throwable t ) { this . getServletResponse ( ) . setStatus ( Status . INTERNAL_SERVER_ERROR . getStatusCode ( ) ) ; return null ; } return pageResponse ; } +","public PageResponse getPageDetailResponse ( ) { PageResponse pageResponse = null ; try { IPage draftPage = null ; IPage onlinePage = null ; String check = this . checkSelectedNode ( this . getPageCode ( ) ) ; if ( null == check ) { draftPage = this . getPage ( this . getPageCode ( ) ) ; onlinePage = this . getOnlinePage ( this . getPageCode ( ) ) ; } pageResponse = new PageResponse ( this , draftPage , onlinePage ) ; } catch ( Throwable t ) { logger . error ( ""error in getPageJsonResponse"" , t ) ; this . getServletResponse ( ) . setStatus ( Status . INTERNAL_SERVER_ERROR . getStatusCode ( ) ) ; return null ; } return pageResponse ; } +" +969,"public void close ( ) { liveClosed . set ( true ) ; SearchRunner . shutdownSearchPool ( ) ; super . close ( ) ; } +","public void close ( ) { liveClosed . set ( true ) ; log . info ( ""Shutting down live query server. Disabled finding this job from mqm."" ) ; SearchRunner . shutdownSearchPool ( ) ; super . close ( ) ; } +" +970,"private boolean validateDataStore ( DataStoreInfo resource , boolean isNew ) throws Exception { final WorkspaceInfo ws = this . getCatalog ( ) . getWorkspaceByName ( resource . getWorkspace ( ) . getName ( ) ) ; if ( ws == null ) { return false ; } ValidationResult result = null ; try { result = this . getCatalog ( ) . validate ( resource , isNew ) ; if ( ! result . isValid ( ) ) { logValidationResult ( result , resource ) ; } } catch ( Exception e ) { LOGGER . warning ( ""Could not validate the resource "" + resource + "" due to the following issue: "" + e . getLocalizedMessage ( ) ) ; logValidationExceptions ( result , e ) ; return false ; } resource . setWorkspace ( ws ) ; return true ; } +","private boolean validateDataStore ( DataStoreInfo resource , boolean isNew ) throws Exception { final WorkspaceInfo ws = this . getCatalog ( ) . getWorkspaceByName ( resource . getWorkspace ( ) . getName ( ) ) ; if ( ws == null ) { return false ; } ValidationResult result = null ; try { result = this . getCatalog ( ) . validate ( resource , isNew ) ; if ( ! result . isValid ( ) ) { LOGGER . log ( Level . SEVERE , ""Store is not valid: {0}"" , resource ) ; logValidationResult ( result , resource ) ; } } catch ( Exception e ) { LOGGER . warning ( ""Could not validate the resource "" + resource + "" due to the following issue: "" + e . getLocalizedMessage ( ) ) ; logValidationExceptions ( result , e ) ; return false ; } resource . setWorkspace ( ws ) ; return true ; } +" +971,"private boolean validateDataStore ( DataStoreInfo resource , boolean isNew ) throws Exception { final WorkspaceInfo ws = this . getCatalog ( ) . getWorkspaceByName ( resource . getWorkspace ( ) . getName ( ) ) ; if ( ws == null ) { return false ; } ValidationResult result = null ; try { result = this . getCatalog ( ) . validate ( resource , isNew ) ; if ( ! result . isValid ( ) ) { LOGGER . log ( Level . SEVERE , ""Store is not valid: {0}"" , resource ) ; logValidationResult ( result , resource ) ; } } catch ( Exception e ) { logValidationExceptions ( result , e ) ; return false ; } resource . setWorkspace ( ws ) ; return true ; } +","private boolean validateDataStore ( DataStoreInfo resource , boolean isNew ) throws Exception { final WorkspaceInfo ws = this . getCatalog ( ) . getWorkspaceByName ( resource . getWorkspace ( ) . getName ( ) ) ; if ( ws == null ) { return false ; } ValidationResult result = null ; try { result = this . getCatalog ( ) . validate ( resource , isNew ) ; if ( ! result . isValid ( ) ) { LOGGER . log ( Level . SEVERE , ""Store is not valid: {0}"" , resource ) ; logValidationResult ( result , resource ) ; } } catch ( Exception e ) { LOGGER . warning ( ""Could not validate the resource "" + resource + "" due to the following issue: "" + e . getLocalizedMessage ( ) ) ; logValidationExceptions ( result , e ) ; return false ; } resource . setWorkspace ( ws ) ; return true ; } +" +972,"@ Test void profileExpressionMatchFirst ( ) throws Exception { this . environment . setActiveProfiles ( ""production"" ) ; initialize ( ""profile-expression.xml"" ) ; assertThat ( this . output ) . contains ( ""Hello"" ) ; } +","@ Test void profileExpressionMatchFirst ( ) throws Exception { this . environment . setActiveProfiles ( ""production"" ) ; initialize ( ""profile-expression.xml"" ) ; this . logger . trace ( ""Hello"" ) ; assertThat ( this . output ) . contains ( ""Hello"" ) ; } +" +973,"private void checkForAttributesKey ( String keyValue ) { if ( ArcConstants . ATTRIBUTES_KEY . equals ( keyValue ) ) { if ( LOGGER . isTraceEnabled ( ) ) { } if ( myCurrentHasAttributes || myCurrentHasGeometry ) { if ( myCurrentHasAttributes && myCurrentHasGeometry ) { createGeometryFromCurrent ( ) ; } else { LOGGER . error ( ""Incomplete feature from document, abandoning"" ) ; clearFeatureState ( ) ; } } myState = ArcSaxState . COLLECT_ATTRIBUTES ; } } +","private void checkForAttributesKey ( String keyValue ) { if ( ArcConstants . ATTRIBUTES_KEY . equals ( keyValue ) ) { if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""->Found ATTRIBUTES_KEY"" ) ; } if ( myCurrentHasAttributes || myCurrentHasGeometry ) { if ( myCurrentHasAttributes && myCurrentHasGeometry ) { createGeometryFromCurrent ( ) ; } else { LOGGER . error ( ""Incomplete feature from document, abandoning"" ) ; clearFeatureState ( ) ; } } myState = ArcSaxState . COLLECT_ATTRIBUTES ; } } +" +974,"private void checkForAttributesKey ( String keyValue ) { if ( ArcConstants . ATTRIBUTES_KEY . equals ( keyValue ) ) { if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""->Found ATTRIBUTES_KEY"" ) ; } if ( myCurrentHasAttributes || myCurrentHasGeometry ) { if ( myCurrentHasAttributes && myCurrentHasGeometry ) { createGeometryFromCurrent ( ) ; } else { clearFeatureState ( ) ; } } myState = ArcSaxState . COLLECT_ATTRIBUTES ; } } +","private void checkForAttributesKey ( String keyValue ) { if ( ArcConstants . ATTRIBUTES_KEY . equals ( keyValue ) ) { if ( LOGGER . isTraceEnabled ( ) ) { LOGGER . trace ( ""->Found ATTRIBUTES_KEY"" ) ; } if ( myCurrentHasAttributes || myCurrentHasGeometry ) { if ( myCurrentHasAttributes && myCurrentHasGeometry ) { createGeometryFromCurrent ( ) ; } else { LOGGER . error ( ""Incomplete feature from document, abandoning"" ) ; clearFeatureState ( ) ; } } myState = ArcSaxState . COLLECT_ATTRIBUTES ; } } +" +975,"public final void handleRequest ( final Exchange exchange ) { adapterContext . runOnContext ( s -> resource . handleRequest ( exchange ) ) ; } +","public final void handleRequest ( final Exchange exchange ) { LOG . debug ( ""running handler for resource [/{}] on vert.x context"" , resource . getName ( ) ) ; adapterContext . runOnContext ( s -> resource . handleRequest ( exchange ) ) ; } +" +976,"public void afterFireAllRules ( KieSession kieSession , TestGenKieSessionJournal journal , TestGenKieSessionFireAllRules fire ) { KieSession uncorruptedSession = scoreDirector . createKieSession ( ) ; for ( TestGenKieSessionInsert insert : journal . getInitialInserts ( ) ) { Object object = insert . getFact ( ) . getInstance ( ) ; uncorruptedSession . insert ( object ) ; } uncorruptedSession . fireAllRules ( ) ; uncorruptedSession . dispose ( ) ; Score < ? > uncorruptedScore = extractScore ( uncorruptedSession ) ; Score < ? > workingScore = extractScore ( kieSession ) ; if ( ! workingScore . equals ( uncorruptedScore ) ) { throw new TestGenCorruptedScoreException ( workingScore , uncorruptedScore ) ; } } +","public void afterFireAllRules ( KieSession kieSession , TestGenKieSessionJournal journal , TestGenKieSessionFireAllRules fire ) { KieSession uncorruptedSession = scoreDirector . createKieSession ( ) ; for ( TestGenKieSessionInsert insert : journal . getInitialInserts ( ) ) { Object object = insert . getFact ( ) . getInstance ( ) ; uncorruptedSession . insert ( object ) ; } uncorruptedSession . fireAllRules ( ) ; uncorruptedSession . dispose ( ) ; Score < ? > uncorruptedScore = extractScore ( uncorruptedSession ) ; Score < ? > workingScore = extractScore ( kieSession ) ; if ( ! workingScore . equals ( uncorruptedScore ) ) { LOGGER . debug ( "" Score: working[{}], uncorrupted[{}]"" , workingScore , uncorruptedScore ) ; throw new TestGenCorruptedScoreException ( workingScore , uncorruptedScore ) ; } } +" +977,"public static com . liferay . commerce . model . CPDAvailabilityEstimateSoap updateCPDAvailabilityEstimate ( long cpdAvailabilityEstimateId , long cpDefinitionId , long commerceAvailabilityEstimateId , com . liferay . portal . kernel . service . ServiceContext serviceContext ) throws RemoteException { try { com . liferay . commerce . model . CPDAvailabilityEstimate returnValue = CPDAvailabilityEstimateServiceUtil . updateCPDAvailabilityEstimate ( cpdAvailabilityEstimateId , cpDefinitionId , commerceAvailabilityEstimateId , serviceContext ) ; return com . liferay . commerce . model . CPDAvailabilityEstimateSoap . toSoapModel ( returnValue ) ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } } +","public static com . liferay . commerce . model . CPDAvailabilityEstimateSoap updateCPDAvailabilityEstimate ( long cpdAvailabilityEstimateId , long cpDefinitionId , long commerceAvailabilityEstimateId , com . liferay . portal . kernel . service . ServiceContext serviceContext ) throws RemoteException { try { com . liferay . commerce . model . CPDAvailabilityEstimate returnValue = CPDAvailabilityEstimateServiceUtil . updateCPDAvailabilityEstimate ( cpdAvailabilityEstimateId , cpDefinitionId , commerceAvailabilityEstimateId , serviceContext ) ; return com . liferay . commerce . model . CPDAvailabilityEstimateSoap . toSoapModel ( returnValue ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } } +" +978,"public CompletableFuture < ? extends AbstractWatcher > start ( ) { initInternals ( ) ; this . selector = LabelsHelper . forKind ( entityName , prefix ) ; boolean ok = checkIntegrity ( ) ; if ( ! ok ) { return CompletableFuture . completedFuture ( null ) ; } log . info ( ""Starting {} for namespace {}"" , operatorName , namespace ) ; if ( isCrd ) { this . crd = crdDeployer . initCrds ( client , prefix , entityName , shortNames , pluralName , additionalPrinterColumnNames , additionalPrinterColumnPaths , additionalPrinterColumnTypes , infoClass , isOpenshift ) ; } onInit ( ) ; CompletableFuture < ? extends AbstractWatcher < T > > future = initializeWatcher ( ) ; future . thenApply ( res -> { this . watch = res ; log . info ( ""{}{} running{} for namespace {}"" , AnsiColors . gr ( ) , operatorName , AnsiColors . xx ( ) , Optional . ofNullable ( namespace ) . orElse ( ""'all'"" ) ) ; return res ; } ) . exceptionally ( e -> { log . error ( ""{} startup failed for namespace {}"" , operatorName , namespace , e . getCause ( ) ) ; return null ; } ) ; return future ; } +","public CompletableFuture < ? extends AbstractWatcher > start ( ) { initInternals ( ) ; this . selector = LabelsHelper . forKind ( entityName , prefix ) ; boolean ok = checkIntegrity ( ) ; if ( ! ok ) { log . warn ( ""Unable to initialize the operator correctly, some compulsory fields are missing."" ) ; return CompletableFuture . completedFuture ( null ) ; } log . info ( ""Starting {} for namespace {}"" , operatorName , namespace ) ; if ( isCrd ) { this . crd = crdDeployer . initCrds ( client , prefix , entityName , shortNames , pluralName , additionalPrinterColumnNames , additionalPrinterColumnPaths , additionalPrinterColumnTypes , infoClass , isOpenshift ) ; } onInit ( ) ; CompletableFuture < ? extends AbstractWatcher < T > > future = initializeWatcher ( ) ; future . thenApply ( res -> { this . watch = res ; log . info ( ""{}{} running{} for namespace {}"" , AnsiColors . gr ( ) , operatorName , AnsiColors . xx ( ) , Optional . ofNullable ( namespace ) . orElse ( ""'all'"" ) ) ; return res ; } ) . exceptionally ( e -> { log . error ( ""{} startup failed for namespace {}"" , operatorName , namespace , e . getCause ( ) ) ; return null ; } ) ; return future ; } +" +979,"public CompletableFuture < ? extends AbstractWatcher > start ( ) { initInternals ( ) ; this . selector = LabelsHelper . forKind ( entityName , prefix ) ; boolean ok = checkIntegrity ( ) ; if ( ! ok ) { log . warn ( ""Unable to initialize the operator correctly, some compulsory fields are missing."" ) ; return CompletableFuture . completedFuture ( null ) ; } if ( isCrd ) { this . crd = crdDeployer . initCrds ( client , prefix , entityName , shortNames , pluralName , additionalPrinterColumnNames , additionalPrinterColumnPaths , additionalPrinterColumnTypes , infoClass , isOpenshift ) ; } onInit ( ) ; CompletableFuture < ? extends AbstractWatcher < T > > future = initializeWatcher ( ) ; future . thenApply ( res -> { this . watch = res ; log . info ( ""{}{} running{} for namespace {}"" , AnsiColors . gr ( ) , operatorName , AnsiColors . xx ( ) , Optional . ofNullable ( namespace ) . orElse ( ""'all'"" ) ) ; return res ; } ) . exceptionally ( e -> { log . error ( ""{} startup failed for namespace {}"" , operatorName , namespace , e . getCause ( ) ) ; return null ; } ) ; return future ; } +","public CompletableFuture < ? extends AbstractWatcher > start ( ) { initInternals ( ) ; this . selector = LabelsHelper . forKind ( entityName , prefix ) ; boolean ok = checkIntegrity ( ) ; if ( ! ok ) { log . warn ( ""Unable to initialize the operator correctly, some compulsory fields are missing."" ) ; return CompletableFuture . completedFuture ( null ) ; } log . info ( ""Starting {} for namespace {}"" , operatorName , namespace ) ; if ( isCrd ) { this . crd = crdDeployer . initCrds ( client , prefix , entityName , shortNames , pluralName , additionalPrinterColumnNames , additionalPrinterColumnPaths , additionalPrinterColumnTypes , infoClass , isOpenshift ) ; } onInit ( ) ; CompletableFuture < ? extends AbstractWatcher < T > > future = initializeWatcher ( ) ; future . thenApply ( res -> { this . watch = res ; log . info ( ""{}{} running{} for namespace {}"" , AnsiColors . gr ( ) , operatorName , AnsiColors . xx ( ) , Optional . ofNullable ( namespace ) . orElse ( ""'all'"" ) ) ; return res ; } ) . exceptionally ( e -> { log . error ( ""{} startup failed for namespace {}"" , operatorName , namespace , e . getCause ( ) ) ; return null ; } ) ; return future ; } +" +980,"public CompletableFuture < ? extends AbstractWatcher > start ( ) { initInternals ( ) ; this . selector = LabelsHelper . forKind ( entityName , prefix ) ; boolean ok = checkIntegrity ( ) ; if ( ! ok ) { log . warn ( ""Unable to initialize the operator correctly, some compulsory fields are missing."" ) ; return CompletableFuture . completedFuture ( null ) ; } log . info ( ""Starting {} for namespace {}"" , operatorName , namespace ) ; if ( isCrd ) { this . crd = crdDeployer . initCrds ( client , prefix , entityName , shortNames , pluralName , additionalPrinterColumnNames , additionalPrinterColumnPaths , additionalPrinterColumnTypes , infoClass , isOpenshift ) ; } onInit ( ) ; CompletableFuture < ? extends AbstractWatcher < T > > future = initializeWatcher ( ) ; future . thenApply ( res -> { this . watch = res ; return res ; } ) . exceptionally ( e -> { log . error ( ""{} startup failed for namespace {}"" , operatorName , namespace , e . getCause ( ) ) ; return null ; } ) ; return future ; } +","public CompletableFuture < ? extends AbstractWatcher > start ( ) { initInternals ( ) ; this . selector = LabelsHelper . forKind ( entityName , prefix ) ; boolean ok = checkIntegrity ( ) ; if ( ! ok ) { log . warn ( ""Unable to initialize the operator correctly, some compulsory fields are missing."" ) ; return CompletableFuture . completedFuture ( null ) ; } log . info ( ""Starting {} for namespace {}"" , operatorName , namespace ) ; if ( isCrd ) { this . crd = crdDeployer . initCrds ( client , prefix , entityName , shortNames , pluralName , additionalPrinterColumnNames , additionalPrinterColumnPaths , additionalPrinterColumnTypes , infoClass , isOpenshift ) ; } onInit ( ) ; CompletableFuture < ? extends AbstractWatcher < T > > future = initializeWatcher ( ) ; future . thenApply ( res -> { this . watch = res ; log . info ( ""{}{} running{} for namespace {}"" , AnsiColors . gr ( ) , operatorName , AnsiColors . xx ( ) , Optional . ofNullable ( namespace ) . orElse ( ""'all'"" ) ) ; return res ; } ) . exceptionally ( e -> { log . error ( ""{} startup failed for namespace {}"" , operatorName , namespace , e . getCause ( ) ) ; return null ; } ) ; return future ; } +" +981,"public CompletableFuture < ? extends AbstractWatcher > start ( ) { initInternals ( ) ; this . selector = LabelsHelper . forKind ( entityName , prefix ) ; boolean ok = checkIntegrity ( ) ; if ( ! ok ) { log . warn ( ""Unable to initialize the operator correctly, some compulsory fields are missing."" ) ; return CompletableFuture . completedFuture ( null ) ; } log . info ( ""Starting {} for namespace {}"" , operatorName , namespace ) ; if ( isCrd ) { this . crd = crdDeployer . initCrds ( client , prefix , entityName , shortNames , pluralName , additionalPrinterColumnNames , additionalPrinterColumnPaths , additionalPrinterColumnTypes , infoClass , isOpenshift ) ; } onInit ( ) ; CompletableFuture < ? extends AbstractWatcher < T > > future = initializeWatcher ( ) ; future . thenApply ( res -> { this . watch = res ; log . info ( ""{}{} running{} for namespace {}"" , AnsiColors . gr ( ) , operatorName , AnsiColors . xx ( ) , Optional . ofNullable ( namespace ) . orElse ( ""'all'"" ) ) ; return res ; } ) . exceptionally ( e -> { return null ; } ) ; return future ; } +","public CompletableFuture < ? extends AbstractWatcher > start ( ) { initInternals ( ) ; this . selector = LabelsHelper . forKind ( entityName , prefix ) ; boolean ok = checkIntegrity ( ) ; if ( ! ok ) { log . warn ( ""Unable to initialize the operator correctly, some compulsory fields are missing."" ) ; return CompletableFuture . completedFuture ( null ) ; } log . info ( ""Starting {} for namespace {}"" , operatorName , namespace ) ; if ( isCrd ) { this . crd = crdDeployer . initCrds ( client , prefix , entityName , shortNames , pluralName , additionalPrinterColumnNames , additionalPrinterColumnPaths , additionalPrinterColumnTypes , infoClass , isOpenshift ) ; } onInit ( ) ; CompletableFuture < ? extends AbstractWatcher < T > > future = initializeWatcher ( ) ; future . thenApply ( res -> { this . watch = res ; log . info ( ""{}{} running{} for namespace {}"" , AnsiColors . gr ( ) , operatorName , AnsiColors . xx ( ) , Optional . ofNullable ( namespace ) . orElse ( ""'all'"" ) ) ; return res ; } ) . exceptionally ( e -> { log . error ( ""{} startup failed for namespace {}"" , operatorName , namespace , e . getCause ( ) ) ; return null ; } ) ; return future ; } +" +982,"@ Disabled @ Test public void testCancelRelease ( ) throws Exception { final com . braintreegateway . Result result = requestBody ( ""direct://CANCELRELEASE"" , null ) ; assertNotNull ( result , ""cancelRelease result"" ) ; } +","@ Disabled @ Test public void testCancelRelease ( ) throws Exception { final com . braintreegateway . Result result = requestBody ( ""direct://CANCELRELEASE"" , null ) ; assertNotNull ( result , ""cancelRelease result"" ) ; LOG . debug ( ""cancelRelease: "" + result ) ; } +" +983,"public void release ( ) throws IOException { try { if ( this . client != null ) { this . client . release ( ) ; } } catch ( Exception e ) { throw new IOException ( e . getMessage ( ) ) ; } finally { parser = null ; buffer . clear ( ) ; remAllConnectionListener ( ) ; } } +","public void release ( ) throws IOException { logger . debug ( ""In release for [{}]"" , this . getKey ( ) ) ; try { if ( this . client != null ) { this . client . release ( ) ; } } catch ( Exception e ) { throw new IOException ( e . getMessage ( ) ) ; } finally { parser = null ; buffer . clear ( ) ; remAllConnectionListener ( ) ; } } +" +984,"public TqlElement visitLiteralComparison ( TqlParser . LiteralComparisonContext ctx ) { TqlElement fieldTqlElement = ctx . getChild ( 0 ) . accept ( this ) ; TqlParser . ComparisonOperatorContext comparisonOperator = ctx . getChild ( TqlParser . ComparisonOperatorContext . class , 0 ) ; ComparisonOperator comparisonOperatorTqlElement = ( ComparisonOperator ) comparisonOperator . accept ( this ) ; TqlParser . LiteralValueContext literalValue = ctx . getChild ( TqlParser . LiteralValueContext . class , 0 ) ; LiteralValue literalValueTqlElement = ( LiteralValue ) literalValue . accept ( this ) ; ComparisonExpression comparisonExpression = new ComparisonExpression ( comparisonOperatorTqlElement , fieldTqlElement , literalValueTqlElement ) ; LOG . debug ( ""End visit literal comparison: "" + ctx . getText ( ) ) ; return comparisonExpression ; } +","public TqlElement visitLiteralComparison ( TqlParser . LiteralComparisonContext ctx ) { LOG . debug ( ""Visit literal comparison: "" + ctx . getText ( ) ) ; TqlElement fieldTqlElement = ctx . getChild ( 0 ) . accept ( this ) ; TqlParser . ComparisonOperatorContext comparisonOperator = ctx . getChild ( TqlParser . ComparisonOperatorContext . class , 0 ) ; ComparisonOperator comparisonOperatorTqlElement = ( ComparisonOperator ) comparisonOperator . accept ( this ) ; TqlParser . LiteralValueContext literalValue = ctx . getChild ( TqlParser . LiteralValueContext . class , 0 ) ; LiteralValue literalValueTqlElement = ( LiteralValue ) literalValue . accept ( this ) ; ComparisonExpression comparisonExpression = new ComparisonExpression ( comparisonOperatorTqlElement , fieldTqlElement , literalValueTqlElement ) ; LOG . debug ( ""End visit literal comparison: "" + ctx . getText ( ) ) ; return comparisonExpression ; } +" +985,"public TqlElement visitLiteralComparison ( TqlParser . LiteralComparisonContext ctx ) { LOG . debug ( ""Visit literal comparison: "" + ctx . getText ( ) ) ; TqlElement fieldTqlElement = ctx . getChild ( 0 ) . accept ( this ) ; TqlParser . ComparisonOperatorContext comparisonOperator = ctx . getChild ( TqlParser . ComparisonOperatorContext . class , 0 ) ; ComparisonOperator comparisonOperatorTqlElement = ( ComparisonOperator ) comparisonOperator . accept ( this ) ; TqlParser . LiteralValueContext literalValue = ctx . getChild ( TqlParser . LiteralValueContext . class , 0 ) ; LiteralValue literalValueTqlElement = ( LiteralValue ) literalValue . accept ( this ) ; ComparisonExpression comparisonExpression = new ComparisonExpression ( comparisonOperatorTqlElement , fieldTqlElement , literalValueTqlElement ) ; return comparisonExpression ; } +","public TqlElement visitLiteralComparison ( TqlParser . LiteralComparisonContext ctx ) { LOG . debug ( ""Visit literal comparison: "" + ctx . getText ( ) ) ; TqlElement fieldTqlElement = ctx . getChild ( 0 ) . accept ( this ) ; TqlParser . ComparisonOperatorContext comparisonOperator = ctx . getChild ( TqlParser . ComparisonOperatorContext . class , 0 ) ; ComparisonOperator comparisonOperatorTqlElement = ( ComparisonOperator ) comparisonOperator . accept ( this ) ; TqlParser . LiteralValueContext literalValue = ctx . getChild ( TqlParser . LiteralValueContext . class , 0 ) ; LiteralValue literalValueTqlElement = ( LiteralValue ) literalValue . accept ( this ) ; ComparisonExpression comparisonExpression = new ComparisonExpression ( comparisonOperatorTqlElement , fieldTqlElement , literalValueTqlElement ) ; LOG . debug ( ""End visit literal comparison: "" + ctx . getText ( ) ) ; return comparisonExpression ; } +" +986,"private void addReportingTaskFingerprint ( final StringBuilder builder , final ReportingTaskDTO dto ) { builder . append ( dto . getId ( ) ) ; builder . append ( dto . getType ( ) ) ; builder . append ( dto . getName ( ) ) ; addBundleFingerprint ( builder , dto . getBundle ( ) ) ; builder . append ( dto . getComments ( ) ) ; builder . append ( dto . getSchedulingPeriod ( ) ) ; builder . append ( dto . getSchedulingStrategy ( ) ) ; builder . append ( dto . getAnnotationData ( ) ) ; final BundleCoordinate coordinate = getCoordinate ( dto . getType ( ) , dto . getBundle ( ) ) ; final ConfigurableComponent configurableComponent = extensionManager . getTempComponent ( dto . getType ( ) , coordinate ) ; if ( configurableComponent == null ) { } addPropertiesFingerprint ( builder , configurableComponent , dto . getProperties ( ) ) ; } +","private void addReportingTaskFingerprint ( final StringBuilder builder , final ReportingTaskDTO dto ) { builder . append ( dto . getId ( ) ) ; builder . append ( dto . getType ( ) ) ; builder . append ( dto . getName ( ) ) ; addBundleFingerprint ( builder , dto . getBundle ( ) ) ; builder . append ( dto . getComments ( ) ) ; builder . append ( dto . getSchedulingPeriod ( ) ) ; builder . append ( dto . getSchedulingStrategy ( ) ) ; builder . append ( dto . getAnnotationData ( ) ) ; final BundleCoordinate coordinate = getCoordinate ( dto . getType ( ) , dto . getBundle ( ) ) ; final ConfigurableComponent configurableComponent = extensionManager . getTempComponent ( dto . getType ( ) , coordinate ) ; if ( configurableComponent == null ) { logger . warn ( ""Unable to get ReportingTask of type {}; its default properties will be fingerprinted instead of being ignored."" , dto . getType ( ) ) ; } addPropertiesFingerprint ( builder , configurableComponent , dto . getProperties ( ) ) ; } +" +987,"public static com . liferay . portal . kernel . model . UserGroupSoap [ ] getUserGroups ( long companyId , String name , int start , int end ) throws RemoteException { try { java . util . List < com . liferay . portal . kernel . model . UserGroup > returnValue = UserGroupServiceUtil . getUserGroups ( companyId , name , start , end ) ; return com . liferay . portal . kernel . model . UserGroupSoap . toSoapModels ( returnValue ) ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } } +","public static com . liferay . portal . kernel . model . UserGroupSoap [ ] getUserGroups ( long companyId , String name , int start , int end ) throws RemoteException { try { java . util . List < com . liferay . portal . kernel . model . UserGroup > returnValue = UserGroupServiceUtil . getUserGroups ( companyId , name , start , end ) ; return com . liferay . portal . kernel . model . UserGroupSoap . toSoapModels ( returnValue ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } } +" +988,"void restoreTransientState ( ProxyPreparer preparer ) throws RemoteException { if ( operationsLogger . isDebugEnabled ( ) ) { LogUtils . entering ( operationsLogger , TxnManagerTransaction . class , ""restoreTransientState"" ) ; } ParticipantHandle [ ] phs = parthandles ( ) ; if ( phs == null ) return ; int size = phs . length ; ParticipantHandle [ ] handles = new ParticipantHandle [ size ] ; int j = 0 ; for ( ParticipantHandle ph : phs ) { handles [ ++ j ] = ph ; } for ( int i = 0 ; i < handles . length ; i ++ ) { handles [ i ] . restoreTransientState ( preparer ) ; if ( transactionsLogger . isTraceEnabled ( ) ) { } } if ( operationsLogger . isDebugEnabled ( ) ) { LogUtils . exiting ( operationsLogger , TxnManagerTransaction . class , ""restoreTransientState"" ) ; } } +","void restoreTransientState ( ProxyPreparer preparer ) throws RemoteException { if ( operationsLogger . isDebugEnabled ( ) ) { LogUtils . entering ( operationsLogger , TxnManagerTransaction . class , ""restoreTransientState"" ) ; } ParticipantHandle [ ] phs = parthandles ( ) ; if ( phs == null ) return ; int size = phs . length ; ParticipantHandle [ ] handles = new ParticipantHandle [ size ] ; int j = 0 ; for ( ParticipantHandle ph : phs ) { handles [ ++ j ] = ph ; } for ( int i = 0 ; i < handles . length ; i ++ ) { handles [ i ] . restoreTransientState ( preparer ) ; if ( transactionsLogger . isTraceEnabled ( ) ) { transactionsLogger . trace ( ""Restored transient state for {}"" , handles [ i ] ) ; } } if ( operationsLogger . isDebugEnabled ( ) ) { LogUtils . exiting ( operationsLogger , TxnManagerTransaction . class , ""restoreTransientState"" ) ; } } +" +989,"FileUtils . FileCopyResult getSegmentFiles ( String region , String container , String path , File outDir ) throws SegmentLoadingException { CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy ( cloudFilesApi , region , container ) ; final CloudFilesByteSource byteSource = new CloudFilesByteSource ( objectApi , path ) ; try { final FileUtils . FileCopyResult result = CompressionUtils . unzip ( byteSource , outDir , CloudFilesUtils . CLOUDFILESRETRY , false ) ; return result ; } catch ( Exception e ) { try { FileUtils . deleteDirectory ( outDir ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to remove output directory [%s] for segment pulled from [%s]"" , outDir . getAbsolutePath ( ) , path ) ; } throw new SegmentLoadingException ( e , e . getMessage ( ) ) ; } finally { try { byteSource . closeStream ( ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to close payload for segmente pulled from [%s]"" , path ) ; } } } +","FileUtils . FileCopyResult getSegmentFiles ( String region , String container , String path , File outDir ) throws SegmentLoadingException { CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy ( cloudFilesApi , region , container ) ; final CloudFilesByteSource byteSource = new CloudFilesByteSource ( objectApi , path ) ; try { final FileUtils . FileCopyResult result = CompressionUtils . unzip ( byteSource , outDir , CloudFilesUtils . CLOUDFILESRETRY , false ) ; log . info ( ""Loaded %d bytes from [%s] to [%s]"" , result . size ( ) , path , outDir . getAbsolutePath ( ) ) ; return result ; } catch ( Exception e ) { try { FileUtils . deleteDirectory ( outDir ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to remove output directory [%s] for segment pulled from [%s]"" , outDir . getAbsolutePath ( ) , path ) ; } throw new SegmentLoadingException ( e , e . getMessage ( ) ) ; } finally { try { byteSource . closeStream ( ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to close payload for segmente pulled from [%s]"" , path ) ; } } } +" +990,"FileUtils . FileCopyResult getSegmentFiles ( String region , String container , String path , File outDir ) throws SegmentLoadingException { CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy ( cloudFilesApi , region , container ) ; final CloudFilesByteSource byteSource = new CloudFilesByteSource ( objectApi , path ) ; try { final FileUtils . FileCopyResult result = CompressionUtils . unzip ( byteSource , outDir , CloudFilesUtils . CLOUDFILESRETRY , false ) ; log . info ( ""Loaded %d bytes from [%s] to [%s]"" , result . size ( ) , path , outDir . getAbsolutePath ( ) ) ; return result ; } catch ( Exception e ) { try { FileUtils . deleteDirectory ( outDir ) ; } catch ( IOException ioe ) { } throw new SegmentLoadingException ( e , e . getMessage ( ) ) ; } finally { try { byteSource . closeStream ( ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to close payload for segmente pulled from [%s]"" , path ) ; } } } +","FileUtils . FileCopyResult getSegmentFiles ( String region , String container , String path , File outDir ) throws SegmentLoadingException { CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy ( cloudFilesApi , region , container ) ; final CloudFilesByteSource byteSource = new CloudFilesByteSource ( objectApi , path ) ; try { final FileUtils . FileCopyResult result = CompressionUtils . unzip ( byteSource , outDir , CloudFilesUtils . CLOUDFILESRETRY , false ) ; log . info ( ""Loaded %d bytes from [%s] to [%s]"" , result . size ( ) , path , outDir . getAbsolutePath ( ) ) ; return result ; } catch ( Exception e ) { try { FileUtils . deleteDirectory ( outDir ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to remove output directory [%s] for segment pulled from [%s]"" , outDir . getAbsolutePath ( ) , path ) ; } throw new SegmentLoadingException ( e , e . getMessage ( ) ) ; } finally { try { byteSource . closeStream ( ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to close payload for segmente pulled from [%s]"" , path ) ; } } } +" +991,"FileUtils . FileCopyResult getSegmentFiles ( String region , String container , String path , File outDir ) throws SegmentLoadingException { CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy ( cloudFilesApi , region , container ) ; final CloudFilesByteSource byteSource = new CloudFilesByteSource ( objectApi , path ) ; try { final FileUtils . FileCopyResult result = CompressionUtils . unzip ( byteSource , outDir , CloudFilesUtils . CLOUDFILESRETRY , false ) ; log . info ( ""Loaded %d bytes from [%s] to [%s]"" , result . size ( ) , path , outDir . getAbsolutePath ( ) ) ; return result ; } catch ( Exception e ) { try { FileUtils . deleteDirectory ( outDir ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to remove output directory [%s] for segment pulled from [%s]"" , outDir . getAbsolutePath ( ) , path ) ; } throw new SegmentLoadingException ( e , e . getMessage ( ) ) ; } finally { try { byteSource . closeStream ( ) ; } catch ( IOException ioe ) { } } } +","FileUtils . FileCopyResult getSegmentFiles ( String region , String container , String path , File outDir ) throws SegmentLoadingException { CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy ( cloudFilesApi , region , container ) ; final CloudFilesByteSource byteSource = new CloudFilesByteSource ( objectApi , path ) ; try { final FileUtils . FileCopyResult result = CompressionUtils . unzip ( byteSource , outDir , CloudFilesUtils . CLOUDFILESRETRY , false ) ; log . info ( ""Loaded %d bytes from [%s] to [%s]"" , result . size ( ) , path , outDir . getAbsolutePath ( ) ) ; return result ; } catch ( Exception e ) { try { FileUtils . deleteDirectory ( outDir ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to remove output directory [%s] for segment pulled from [%s]"" , outDir . getAbsolutePath ( ) , path ) ; } throw new SegmentLoadingException ( e , e . getMessage ( ) ) ; } finally { try { byteSource . closeStream ( ) ; } catch ( IOException ioe ) { log . warn ( ioe , ""Failed to close payload for segmente pulled from [%s]"" , path ) ; } } } +" +992,"@ Pollable ( message = ""Updating merged asset text units"" ) void updateLastSuccessfulAsseteExtraction ( Asset asset , MultiBranchState currentState , @ ParentTask PollableTask currentTask ) { AssetExtraction lastSuccessfulAssetExtraction = getOrCreateLastSuccessfulAssetExtraction ( asset ) ; MultiBranchState lastSuccessfulMultiBranchState = updateAssetExtractionWithState ( lastSuccessfulAssetExtraction . getId ( ) , currentState , AssetContentMd5s . of ( ) ) ; } +","@ Pollable ( message = ""Updating merged asset text units"" ) void updateLastSuccessfulAsseteExtraction ( Asset asset , MultiBranchState currentState , @ ParentTask PollableTask currentTask ) { logger . trace ( ""Make sure we have a last successful extraction in the Asset (legacy support edge case)"" ) ; AssetExtraction lastSuccessfulAssetExtraction = getOrCreateLastSuccessfulAssetExtraction ( asset ) ; MultiBranchState lastSuccessfulMultiBranchState = updateAssetExtractionWithState ( lastSuccessfulAssetExtraction . getId ( ) , currentState , AssetContentMd5s . of ( ) ) ; } +" +993,"protected Future < Message > processRemoveCmdHandlingAdapterInstance ( final Message request , final ResourceIdentifier targetAddress , final SpanContext spanContext ) { final String tenantId = targetAddress . getTenantId ( ) ; final String deviceId = MessageHelper . getDeviceId ( request ) ; final String adapterInstanceId = MessageHelper . getApplicationProperty ( request . getApplicationProperties ( ) , MessageHelper . APP_PROPERTY_ADAPTER_INSTANCE_ID , String . class ) ; final Span span = TracingHelper . buildServerChildSpan ( tracer , spanContext , SPAN_NAME_REMOVE_CMD_HANDLING_ADAPTER_INSTANCE , getClass ( ) . getSimpleName ( ) ) . start ( ) ; final Future < Message > resultFuture ; if ( tenantId == null || deviceId == null || adapterInstanceId == null ) { TracingHelper . logError ( span , ""missing tenant, device and/or adapter instance id"" ) ; resultFuture = Future . failedFuture ( new ClientErrorException ( HttpURLConnection . HTTP_BAD_REQUEST ) ) ; } else { TracingHelper . TAG_TENANT_ID . set ( span , tenantId ) ; TracingHelper . TAG_DEVICE_ID . set ( span , deviceId ) ; span . setTag ( MessageHelper . APP_PROPERTY_ADAPTER_INSTANCE_ID , adapterInstanceId ) ; resultFuture = getService ( ) . removeCommandHandlingAdapterInstance ( tenantId , deviceId , adapterInstanceId , span ) . map ( res -> DeviceConnectionConstants . getAmqpReply ( DeviceConnectionConstants . DEVICE_CONNECTION_ENDPOINT , tenantId , request , res ) ) ; } return finishSpanOnFutureCompletion ( span , resultFuture ) ; } +","protected Future < Message > processRemoveCmdHandlingAdapterInstance ( final Message request , final ResourceIdentifier targetAddress , final SpanContext spanContext ) { final String tenantId = targetAddress . getTenantId ( ) ; final String deviceId = MessageHelper . getDeviceId ( request ) ; final String adapterInstanceId = MessageHelper . getApplicationProperty ( request . getApplicationProperties ( ) , MessageHelper . APP_PROPERTY_ADAPTER_INSTANCE_ID , String . class ) ; final Span span = TracingHelper . buildServerChildSpan ( tracer , spanContext , SPAN_NAME_REMOVE_CMD_HANDLING_ADAPTER_INSTANCE , getClass ( ) . getSimpleName ( ) ) . start ( ) ; final Future < Message > resultFuture ; if ( tenantId == null || deviceId == null || adapterInstanceId == null ) { TracingHelper . logError ( span , ""missing tenant, device and/or adapter instance id"" ) ; resultFuture = Future . failedFuture ( new ClientErrorException ( HttpURLConnection . HTTP_BAD_REQUEST ) ) ; } else { TracingHelper . TAG_TENANT_ID . set ( span , tenantId ) ; TracingHelper . TAG_DEVICE_ID . set ( span , deviceId ) ; span . setTag ( MessageHelper . APP_PROPERTY_ADAPTER_INSTANCE_ID , adapterInstanceId ) ; logger . debug ( ""removing command handling adapter instance for tenant [{}], device [{}] with value {}"" , tenantId , deviceId , adapterInstanceId ) ; resultFuture = getService ( ) . removeCommandHandlingAdapterInstance ( tenantId , deviceId , adapterInstanceId , span ) . map ( res -> DeviceConnectionConstants . getAmqpReply ( DeviceConnectionConstants . DEVICE_CONNECTION_ENDPOINT , tenantId , request , res ) ) ; } return finishSpanOnFutureCompletion ( span , resultFuture ) ; } +" +994,"public void stateHasChanged ( SharedTriStateReader reader , SharedTriState . STATE value ) throws Exception { if ( log . isTraceEnabled ( ) ) { } } +","public void stateHasChanged ( SharedTriStateReader reader , SharedTriState . STATE value ) throws Exception { if ( log . isTraceEnabled ( ) ) { log . trace ( ""table:"" + tableName + "" stateHasChanged("" + reader + "", "" + value + ""). This listener does nothing"" ) ; } } +" +995,"public void done ( boolean doneSync ) { try { if ( exception == null ) { exchange . removeProperty ( ExchangePropertyKey . FAILURE_ENDPOINT ) ; } else { exchange . setException ( exception ) ; exchange . setProperty ( ExchangePropertyKey . EXCEPTION_CAUGHT , exception ) ; } if ( ! doneSync ) { ExchangeHelper . prepareOutToIn ( exchange ) ; if ( LOG . isTraceEnabled ( ) ) { } } } finally { callback . done ( doneSync ) ; } } +","public void done ( boolean doneSync ) { try { if ( exception == null ) { exchange . removeProperty ( ExchangePropertyKey . FAILURE_ENDPOINT ) ; } else { exchange . setException ( exception ) ; exchange . setProperty ( ExchangePropertyKey . EXCEPTION_CAUGHT , exception ) ; } if ( ! doneSync ) { ExchangeHelper . prepareOutToIn ( exchange ) ; if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( ""Processing complete for exchangeId: {} >>> {}"" , exchange . getExchangeId ( ) , exchange ) ; } } } finally { callback . done ( doneSync ) ; } } +" +996,"private static void logWeighedThrottlingSetting ( ) { if ( connectionWeightEnabled ) { LOG . info ( ""The weights for different session types are: global {} renew {} local {}"" , DEFAULT_GLOBAL_SESSION_WEIGHT , DEFAULT_RENEW_SESSION_WEIGHT , DEFAULT_LOCAL_SESSION_WEIGHT ) ; } else { LOG . info ( ""Weighed connection throttling is disabled"" ) ; } } +","private static void logWeighedThrottlingSetting ( ) { if ( connectionWeightEnabled ) { LOG . info ( ""Weighed connection throttling is enabled. "" + ""But it will only be effective if connection throttling is enabled"" ) ; LOG . info ( ""The weights for different session types are: global {} renew {} local {}"" , DEFAULT_GLOBAL_SESSION_WEIGHT , DEFAULT_RENEW_SESSION_WEIGHT , DEFAULT_LOCAL_SESSION_WEIGHT ) ; } else { LOG . info ( ""Weighed connection throttling is disabled"" ) ; } } +" +997,"private static void logWeighedThrottlingSetting ( ) { if ( connectionWeightEnabled ) { LOG . info ( ""Weighed connection throttling is enabled. "" + ""But it will only be effective if connection throttling is enabled"" ) ; } else { LOG . info ( ""Weighed connection throttling is disabled"" ) ; } } +","private static void logWeighedThrottlingSetting ( ) { if ( connectionWeightEnabled ) { LOG . info ( ""Weighed connection throttling is enabled. "" + ""But it will only be effective if connection throttling is enabled"" ) ; LOG . info ( ""The weights for different session types are: global {} renew {} local {}"" , DEFAULT_GLOBAL_SESSION_WEIGHT , DEFAULT_RENEW_SESSION_WEIGHT , DEFAULT_LOCAL_SESSION_WEIGHT ) ; } else { LOG . info ( ""Weighed connection throttling is disabled"" ) ; } } +" +998,"private static void logWeighedThrottlingSetting ( ) { if ( connectionWeightEnabled ) { LOG . info ( ""Weighed connection throttling is enabled. "" + ""But it will only be effective if connection throttling is enabled"" ) ; LOG . info ( ""The weights for different session types are: global {} renew {} local {}"" , DEFAULT_GLOBAL_SESSION_WEIGHT , DEFAULT_RENEW_SESSION_WEIGHT , DEFAULT_LOCAL_SESSION_WEIGHT ) ; } else { } } +","private static void logWeighedThrottlingSetting ( ) { if ( connectionWeightEnabled ) { LOG . info ( ""Weighed connection throttling is enabled. "" + ""But it will only be effective if connection throttling is enabled"" ) ; LOG . info ( ""The weights for different session types are: global {} renew {} local {}"" , DEFAULT_GLOBAL_SESSION_WEIGHT , DEFAULT_RENEW_SESSION_WEIGHT , DEFAULT_LOCAL_SESSION_WEIGHT ) ; } else { LOG . info ( ""Weighed connection throttling is disabled"" ) ; } } +" +999,"public void doExceptionAction ( final HttpServletResponse httpResponse , final RuntimeException e ) throws IOException { httpResponse . sendError ( HttpServletResponse . SC_FORBIDDEN , ""Error in Authentication"" ) ; } +","public void doExceptionAction ( final HttpServletResponse httpResponse , final RuntimeException e ) throws IOException { LOGGER . error ( ""Error in Authentication"" , e ) ; httpResponse . sendError ( HttpServletResponse . SC_FORBIDDEN , ""Error in Authentication"" ) ; } +"