input
stringlengths
28
18.7k
output
stringlengths
39
1.69k
testNext ( ) { java . lang . Object value = "testObject" ; when ( nativeResultSet . getObject ( anyInt ( ) ) ) . thenReturn ( value ) ; when ( nativeResultSet . next ( ) ) . thenReturn ( true ) ; org . pentaho . platform . plugin . services . connections . sql . SQLResultSet resultSet = new org . pentaho . platform . plugin . services . connections . sql . SQLResultSet ( nativeResultSet , nativeConnection ) ; java . lang . Object [ ] objects = resultSet . next ( ) ; for ( int i = 0 ; i < ( objects . length ) ; i ++ ) { "<AssertPlaceHolder>" ; } } next ( ) { if ( ( peekRow ) != null ) { java . lang . Object [ ] row = peekRow ; peekRow = null ; return row ; } try { int columns = nativeResultSet . getMetaData ( ) . getColumnCount ( ) ; if ( nativeResultSet . next ( ) ) { java . lang . Object [ ] currentRow = new java . lang . Object [ columns ] ; for ( int column = 0 ; column < columns ; column ++ ) { currentRow [ column ] = nativeResultSet . getObject ( ( column + 1 ) ) ; } return currentRow ; } } catch ( java . sql . SQLException e ) { org . pentaho . platform . plugin . services . connections . sql . SQLResultSet . log . error ( org . pentaho . platform . plugin . services . messages . Messages . getInstance ( ) . getErrorString ( "SQLResultSet.ERROR_0005_NEXT" ) , e ) ; throw new org . pentaho . platform . plugin . services . connections . sql . SQLResultSetException ( org . pentaho . platform . plugin . services . messages . Messages . getInstance ( ) . getErrorString ( "SQLResultSet.ERROR_0005_NEXT" ) , e ) ; } return null ; }
org . junit . Assert . assertEquals ( value , objects [ i ] )
testGetNotExist ( ) { jp . primecloud . auto . zabbix . model . host . HostGetParam param = new jp . primecloud . auto . zabbix . model . host . HostGetParam ( ) ; param . setHostids ( java . util . Arrays . asList ( "999999" ) ) ; param . setOutput ( "extend" ) ; param . setSelectGroups ( "extend" ) ; param . setSelectParentTemplates ( "extend" ) ; java . util . List < jp . primecloud . auto . zabbix . model . host . Host > hosts = client . host ( ) . get ( param ) ; "<AssertPlaceHolder>" ; } get ( java . lang . String ) { if ( ( jp . primecloud . auto . ui . util . ViewProperties . userBundle ) != null ) { try { return jp . primecloud . auto . ui . util . ViewProperties . userBundle . getString ( key ) ; } catch ( java . util . MissingResourceException ignore ) { } } try { return jp . primecloud . auto . ui . util . ViewProperties . defaultBundle . getString ( key ) ; } catch ( java . util . MissingResourceException ignore ) { } return "" ; }
org . junit . Assert . assertEquals ( 0 , hosts . size ( ) )
testEquals ( ) { gov . uspto . patent . model . classification . CpcClassification cpc1 = new gov . uspto . patent . model . classification . CpcClassification ( ) ; cpc1 . parseText ( "D07B2201" ) ; gov . uspto . patent . model . classification . CpcClassification cpc2 = new gov . uspto . patent . model . classification . CpcClassification ( ) ; cpc2 . parseText ( "D07B2201" ) ; "<AssertPlaceHolder>" ; } parseText ( java . lang . String ) { super . setTextOriginal ( classificationStr ) ; java . util . regex . Matcher matcher = gov . uspto . patent . model . classification . CpcClassification . REGEX . matcher ( classificationStr ) ; if ( matcher . matches ( ) ) { java . lang . String section = matcher . group ( 1 ) ; java . lang . String mainClass = matcher . group ( 2 ) ; java . lang . String subClass = matcher . group ( 3 ) ; java . lang . String mainGroup = matcher . group ( 4 ) ; java . lang . String subGroup = matcher . group ( 5 ) ; setSection ( section ) ; setMainClass ( mainClass ) ; setSubClass ( subClass ) ; setMainGroup ( mainGroup ) ; setSubGroup ( subGroup ) ; } else if ( ( classificationStr . length ( ) ) == 3 ) { java . util . regex . Matcher matchL3 = gov . uspto . patent . model . classification . CpcClassification . REGEX_LEN3 . matcher ( classificationStr ) ; if ( matchL3 . matches ( ) ) { java . lang . String section = matchL3 . group ( 1 ) ; java . lang . String mainClass = matchL3 . group ( 2 ) ; setSection ( section ) ; setMainClass ( mainClass ) ; } } else if ( ( classificationStr . length ( ) ) == 4 ) { java . util . regex . Matcher matchL4 = gov . uspto . patent . model . classification . CpcClassification . REGEX_LEN4 . matcher ( classificationStr ) ; if ( matchL4 . matches ( ) ) { java . lang . String section = matchL4 . group ( 1 ) ; java . lang . String mainClass = matchL4 . group ( 2 ) ; java . lang . String subClass = matchL4 . group ( 3 ) ; setSection ( section ) ; setMainClass ( mainClass ) ; setSubClass ( subClass ) ; } } else { throw new java . text . ParseException ( ( "Failed<sp>to<sp>regex<sp>parse<sp>USPC<sp>Classification:<sp>" + classificationStr ) , 0 ) ; } }
org . junit . Assert . assertEquals ( cpc1 , cpc2 )
testCalculatePreviosStartIndex_FirstPageStartIndexLessThanCount ( ) { int nextStartIndex = org . deegree . services . wfs . util . ResponsePagingUtils . calculatePreviousStartIndex ( 5 , 10 ) ; "<AssertPlaceHolder>" ; } calculatePreviousStartIndex ( int , int ) { int previousStartIndex = startIndex - count ; if ( ( previousStartIndex < 0 ) && ( startIndex > 0 ) ) return 0 ; if ( previousStartIndex >= 0 ) { return previousStartIndex ; } return - 1 ; }
org . junit . Assert . assertThat ( nextStartIndex , org . hamcrest . CoreMatchers . is ( 0 ) )
test_CaptureEnPassantRightFalse ( ) { classUnderTest = addBlackPawn ( GenericPosition . e4 ) ; pl . add ( new eubos . board . pieces . Knight ( eubos . board . pieces . Piece . Colour . white , GenericPosition . d2 ) ) ; pm = new eubos . position . PositionManager ( new eubos . board . Board ( pl ) , eubos . board . pieces . Piece . Colour . black ) ; pm . performMove ( new eubos . board . pieces . GenericMove ( GenericPosition . d2 , GenericPosition . d4 ) ) ; ml = classUnderTest . generateMoves ( pm . getTheBoard ( ) ) ; expectedMove = new eubos . board . pieces . GenericMove ( GenericPosition . e4 , GenericPosition . d3 ) ; "<AssertPlaceHolder>" ; } getTheBoard ( ) { return theBoard ; }
org . junit . Assert . assertFalse ( ml . contains ( expectedMove ) )
testGetTables ( ) { com . ebay . pulsar . analytics . datasource . DataSourceProvider provider = org . mockito . Mockito . mock ( com . ebay . pulsar . analytics . datasource . DataSourceProvider . class ) ; java . util . List < com . ebay . pulsar . analytics . datasource . Table > tables = com . google . common . collect . Lists . newArrayList ( ) ; com . ebay . pulsar . analytics . datasource . Table t = new com . ebay . pulsar . analytics . datasource . Table ( ) ; t . setDateColumn ( "time" ) ; t . setTableName ( "T1" ) ; tables . add ( t ) ; when ( provider . getTables ( ) ) . thenReturn ( tables ) ; com . ebay . pulsar . analytics . datasource . DataSourceMetaRepo repo = org . mockito . Mockito . mock ( com . ebay . pulsar . analytics . datasource . DataSourceMetaRepo . class ) ; java . util . Map < java . lang . String , com . ebay . pulsar . analytics . datasource . DataSourceConfiguration > repoMap = com . google . common . collect . Maps . newHashMap ( ) ; com . ebay . pulsar . analytics . datasource . DataSourceConfiguration conf = new com . ebay . pulsar . analytics . datasource . DataSourceConfiguration ( com . ebay . pulsar . analytics . datasource . DataSourceTypeEnum . PULSAR , "pulsar" ) ; conf . setEndPoint ( com . google . common . collect . Lists . newArrayList ( "aaaa,bbbb" ) ) ; conf . setRealOnly ( true ) ; conf . setRefreshTime ( new java . util . Date ( ) . getTime ( ) ) ; repoMap . put ( "pulsar" , conf ) ; when ( repo . getDBMetaFromCache ( org . mockito . Matchers . anyString ( ) ) ) . thenReturn ( provider ) ; com . ebay . pulsar . analytics . service . ReflectFieldUtil . setStaticField ( com . ebay . pulsar . analytics . datasource . DataSourceMetaRepo . class , "instance" , repo ) ; com . ebay . pulsar . analytics . dao . RDBMS db = org . mockito . Mockito . mock ( com . ebay . pulsar . analytics . dao . RDBMS . class ) ; com . ebay . pulsar . analytics . service . DataSourceService datasourceService = new com . ebay . pulsar . analytics . service . DataSourceService ( ) ; com . ebay . pulsar . analytics . dao . service . BaseDBService < ? > ds = ( ( com . ebay . pulsar . analytics . dao . service . BaseDBService < ? > ) ( com . ebay . pulsar . analytics . service . ReflectFieldUtil . getField ( datasourceService , "datasourceService" ) ) ) ; com . ebay . pulsar . analytics . dao . service . BaseDBService < ? > rs = ( ( com . ebay . pulsar . analytics . dao . service . BaseDBService < ? > ) ( com . ebay . pulsar . analytics . service . ReflectFieldUtil . getField ( datasourceService , "rightGroupService" ) ) ) ; com . ebay . pulsar . analytics . service . ReflectFieldUtil . setField ( com . ebay . pulsar . analytics . dao . service . BaseDBService . class , ds , "db" , db ) ; com . ebay . pulsar . analytics . service . ReflectFieldUtil . setField ( com . ebay . pulsar . analytics . dao . service . BaseDBService . class , rs , "db" , db ) ; com . ebay . pulsar . analytics . dao . model . DBDataSource datasource = new com . ebay . pulsar . analytics . dao . model . DBDataSource ( ) ; datasource . setName ( com . ebay . pulsar . analytics . service . DatasourceServiceTest . uttestdatasource1 ) ; datasource . setId ( 1L ) ; datasource . setOwner ( com . ebay . pulsar . analytics . service . DatasourceServiceTest . uttestuser ) ; datasource . setType ( DataSourceTypeEnum . DRUID . getType ( ) ) ; java . util . List < java . lang . String > ts = datasourceService . getDataSourceTables ( datasource ) ; "<AssertPlaceHolder>" ; } getDataSourceTables ( com . ebay . pulsar . analytics . dao . model . DBDataSource ) { java . lang . StringBuilder keyBuilder = new java . lang . StringBuilder ( ) ; com . google . common . base . Joiner . on ( '.' ) . appendTo ( keyBuilder , datasource . getType ( ) , datasource . getName ( ) ) ; java . lang . String dbNameSpace = keyBuilder . toString ( ) ; com . ebay . pulsar . analytics . datasource . DataSourceProvider dataSourceProvider = com . ebay . pulsar . analytics . datasource . DataSourceMetaRepo . getInstance ( ) . getDBMetaFromCache ( dbNameSpace ) ; if ( dataSourceProvider != null ) { return com . google . common . collect . FluentIterable . from ( dataSourceProvider . getTables ( ) ) . transform ( new com . google . common . base . Function < com . ebay . pulsar . analytics . datasource . Table , java . lang . String > ( ) { public java . lang . String apply ( com . ebay . pulsar . analytics . datasource . Table input ) { return input . getTableName ( ) ; } } ) . toList ( ) ; } return null ; }
org . junit . Assert . assertEquals ( com . google . common . collect . Lists . newArrayList ( "T1" ) , ts )
testDisconnectedClient ( ) { it . polimi . deib . provaFinale2014 . andrea . celli_stefano1 . cereda . server . DisconnectedClient dc = new it . polimi . deib . provaFinale2014 . andrea . celli_stefano1 . cereda . server . DisconnectedClient ( 1 , null , null ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertNotNull ( dc )
construct_withNullArgName ( ) { final net . sf . qualitycheck . exception . IllegalNullElementsException e = new net . sf . qualitycheck . exception . IllegalNullElementsException ( ( ( java . lang . String ) ( null ) ) ) ; final java . lang . String expected = IllegalNullElementsException . DEFAULT_MESSAGE ; "<AssertPlaceHolder>" ; } getMessage ( ) { final java . lang . String message = super . getMessage ( ) ; if ( ( session ) != null ) { final java . lang . String context = session . getContext ( ) ; if ( ! ( context . isEmpty ( ) ) ) { return ( message + "<sp>" ) + context ; } else { return message ; } } else { return message ; } }
org . junit . Assert . assertEquals ( expected , e . getMessage ( ) )
testWriteIntMatrixCols ( ) { int [ ] [ ] matrix = new int [ ] [ ] { new int [ ] { 1 , 2 , 3 } , new int [ ] { 4 , 5 , 6 } , new int [ ] { 7 , 8 , 9 } } ; int rows = matrix . length ; int columns = matrix [ 0 ] . length ; java . io . File tmp = java . io . File . createTempFile ( "LDAUtilsTest" , ".bin" ) ; int iteration = 1 ; int [ ] colIndeces = new int [ ] { 0 , 2 } ; cc . mallet . util . LDAUtils . writeBinaryIntMatrixCols ( matrix , iteration , rows , columns , tmp . getAbsolutePath ( ) , colIndeces ) ; java . lang . String fn = java . lang . String . format ( ( ( ( ( ( ( tmp . getAbsolutePath ( ) ) + "_" ) + rows ) + "_" ) + columns ) + "_%05d.BINARY" ) , iteration ) ; java . io . RandomAccessFile inputPhiFile = new java . io . RandomAccessFile ( fn , "rw" ) ; java . nio . channels . FileChannel phiChannel = inputPhiFile . getChannel ( ) ; final int bufferSize = ( 8 * columns ) * rows ; java . nio . ByteBuffer buf = phiChannel . map ( FileChannel . MapMode . READ_ONLY , 0 , bufferSize ) ; for ( int to = 0 ; to < rows ; to ++ ) { for ( int ty = 0 ; ty < ( colIndeces . length ) ; ty ++ ) { int readInt = buf . getInt ( ) ; "<AssertPlaceHolder>" ; } } inputPhiFile . close ( ) ; } writeBinaryIntMatrixCols ( int [ ] [ ] , int , int , int , java . lang . String , int [ ] ) { java . lang . String fn = java . lang . String . format ( ( ( ( ( ( filename + "_" ) + rows ) + "_" ) + columns ) + "_%05d.BINARY" ) , iteration ) ; try ( java . io . RandomAccessFile outputPhiFile = new java . io . RandomAccessFile ( fn , "rw" ) ) { java . nio . channels . FileChannel phiChannel = outputPhiFile . getChannel ( ) ; final int bufferSize = ( 8 * columns ) * rows ; java . nio . ByteBuffer buf = phiChannel . map ( FileChannel . MapMode . READ_WRITE , 0 , bufferSize ) ; for ( int to = 0 ; to < rows ; to ++ ) { for ( int ty = 0 ; ty < ( colIndices . length ) ; ty ++ ) { buf . putInt ( matrix [ to ] [ colIndices [ ty ] ] ) ; } } } }
org . junit . Assert . assertEquals ( matrix [ to ] [ colIndeces [ ty ] ] , readInt )
test52createXGroupUserFromMap ( ) { setup ( ) ; org . apache . ranger . view . VXGroup vxGroup = vxGroup ( ) ; org . apache . ranger . view . VXUser vxUser = vxUser ( ) ; java . util . List < org . apache . ranger . view . VXUser > vXUserList = new java . util . ArrayList < org . apache . ranger . view . VXUser > ( ) ; vXUserList . add ( vxUser ) ; org . apache . ranger . view . VXGroupUserInfo vxGUInfo = new org . apache . ranger . view . VXGroupUserInfo ( ) ; vxGUInfo . setXgroupInfo ( vxGroup ) ; vxGUInfo . setXuserInfo ( vXUserList ) ; org . apache . ranger . db . XXUserDao xxUserDao = org . mockito . Mockito . mock ( org . apache . ranger . db . XXUserDao . class ) ; org . apache . ranger . db . XXPortalUserDao userDao = org . mockito . Mockito . mock ( org . apache . ranger . db . XXPortalUserDao . class ) ; org . apache . ranger . db . XXPortalUserRoleDao userRoleDao = org . mockito . Mockito . mock ( org . apache . ranger . db . XXPortalUserRoleDao . class ) ; org . apache . ranger . db . XXModuleDefDao xXModuleDefDao = org . mockito . Mockito . mock ( org . apache . ranger . db . XXModuleDefDao . class ) ; org . apache . ranger . entity . XXUser xXUser = xxUser ( vxUser ) ; org . apache . ranger . view . VXPortalUser userProfile = userProfile ( ) ; org . apache . ranger . entity . XXPortalUser xXPortalUser = xxPortalUser ( userProfile ) ; xXPortalUser . setUserSource ( RangerCommonEnums . USER_EXTERNAL ) ; java . util . List < java . lang . String > lstRole = new java . util . ArrayList < java . lang . String > ( ) ; lstRole . add ( RangerConstants . ROLE_SYS_ADMIN ) ; java . util . List < org . apache . ranger . entity . XXModuleDef > xXModuleDefs = new java . util . ArrayList < org . apache . ranger . entity . XXModuleDef > ( ) ; org . mockito . Mockito . when ( daoManager . getXXUser ( ) ) . thenReturn ( xxUserDao ) ; org . mockito . Mockito . when ( xxUserDao . findByUserName ( vxUser . getName ( ) ) ) . thenReturn ( xXUser ) ; org . mockito . Mockito . when ( daoManager . getXXPortalUser ( ) ) . thenReturn ( userDao ) ; org . mockito . Mockito . when ( userDao . findByLoginId ( vxUser . getName ( ) ) ) . thenReturn ( xXPortalUser ) ; org . mockito . Mockito . when ( xGroupService . createXGroupWithOutLogin ( vxGroup ) ) . thenReturn ( vxGroup ) ; org . mockito . Mockito . when ( daoManager . getXXPortalUserRole ( ) ) . thenReturn ( userRoleDao ) ; org . mockito . Mockito . when ( userMgr . mapXXPortalUserToVXPortalUserForDefaultAccount ( xXPortalUser ) ) . thenReturn ( userProfile ) ; org . mockito . Mockito . when ( daoManager . getXXModuleDef ( ) ) . thenReturn ( xXModuleDefDao ) ; org . mockito . Mockito . when ( xXModuleDefDao . getAll ( ) ) . thenReturn ( xXModuleDefs ) ; org . apache . ranger . view . VXGroupUserInfo vxGUInfoObj = xUserMgr . createXGroupUserFromMap ( vxGUInfo ) ; "<AssertPlaceHolder>" ; } createXGroupUserFromMap ( org . apache . ranger . rest . VXGroupUserInfo ) { return xUserMgr . createXGroupUserFromMap ( vXGroupUserInfo ) ; }
org . junit . Assert . assertNotNull ( vxGUInfoObj )
testAllowInputModification ( ) { org . deeplearning4j . nn . graph . ComputationGraphConfiguration conf = new org . deeplearning4j . nn . graph . NeuralNetConfiguration . Builder ( ) . graphBuilder ( ) . addInputs ( "in1" , "in2" ) . layer ( "0" , new org . deeplearning4j . nn . graph . DenseLayer . Builder ( ) . nOut ( 10 ) . build ( ) , "in1" ) . layer ( "1" , new org . deeplearning4j . nn . graph . DenseLayer . Builder ( ) . nOut ( 10 ) . build ( ) , "in2" ) . layer ( "2" , new org . deeplearning4j . nn . graph . DenseLayer . Builder ( ) . nOut ( 10 ) . build ( ) , "0" ) . layer ( "3" , new org . deeplearning4j . nn . graph . DenseLayer . Builder ( ) . nOut ( 10 ) . build ( ) , "1" ) . layer ( "4" , new org . deeplearning4j . nn . graph . DenseLayer . Builder ( ) . nOut ( 10 ) . build ( ) , "1" ) . layer ( "5" , new org . deeplearning4j . nn . graph . DenseLayer . Builder ( ) . nOut ( 10 ) . build ( ) , "1" ) . layer ( "6" , new org . deeplearning4j . nn . graph . DenseLayer . Builder ( ) . nOut ( 10 ) . build ( ) , "2" , "3" , "4" , "5" ) . setOutputs ( "6" ) . setInputTypes ( org . deeplearning4j . nn . conf . inputs . InputType . feedForward ( 10 ) , org . deeplearning4j . nn . conf . inputs . InputType . feedForward ( 10 ) ) . build ( ) ; org . deeplearning4j . nn . graph . ComputationGraph cg = new org . deeplearning4j . nn . graph . ComputationGraph ( conf ) ; cg . init ( ) ; org . deeplearning4j . nn . graph . Map < java . lang . String , java . lang . Boolean > exp = new org . deeplearning4j . nn . graph . HashMap ( ) ; exp . put ( "0" , false ) ; exp . put ( "1" , false ) ; exp . put ( "2" , true ) ; exp . put ( "3" , false ) ; exp . put ( "4" , false ) ; exp . put ( "5" , true ) ; exp . put ( "6" , true ) ; for ( java . lang . String s : exp . keySet ( ) ) { boolean allowed = ( ( org . deeplearning4j . nn . layers . feedforward . dense . DenseLayer ) ( cg . getLayer ( s ) ) ) . isInputModificationAllowed ( ) ; "<AssertPlaceHolder>" ; } } get ( int ) { return objects . get ( i ) ; }
org . junit . Assert . assertEquals ( s , exp . get ( s ) , allowed )
testDynamicQueryByPrimaryKeyMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . portal . kernel . model . OrgLabor . class , _dynamicQueryClassLoader ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . eq ( "orgLaborId" , com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ) ) ; java . util . List < com . liferay . portal . kernel . model . OrgLabor > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; }
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
testBasicPathMatch7 ( ) { org . apache . ogt . http . impl . cookie . BasicClientCookie cookie = new org . apache . ogt . http . impl . cookie . BasicClientCookie ( "name" , "value" ) ; org . apache . ogt . http . cookie . CookieOrigin origin = new org . apache . ogt . http . cookie . CookieOrigin ( "somehost" , 80 , "/stuff" , false ) ; org . apache . ogt . http . cookie . CookieAttributeHandler h = new org . apache . ogt . http . impl . cookie . BasicPathHandler ( ) ; "<AssertPlaceHolder>" ; } match ( org . apache . ogt . http . cookie . Cookie , org . apache . ogt . http . cookie . CookieOrigin ) { if ( cookie == null ) { throw new java . lang . IllegalArgumentException ( "Cookie<sp>may<sp>not<sp>be<sp>null" ) ; } if ( origin == null ) { throw new java . lang . IllegalArgumentException ( "Cookie<sp>origin<sp>may<sp>not<sp>be<sp>null" ) ; } java . lang . String host = origin . getHost ( ) ; java . lang . String domain = cookie . getDomain ( ) ; if ( domain == null ) { return false ; } return ( host . equals ( domain ) ) || ( ( domain . startsWith ( "." ) ) && ( host . endsWith ( domain ) ) ) ; }
org . junit . Assert . assertTrue ( h . match ( cookie , origin ) )
shouldReturnTrueWhenSelfLooped ( ) { final org . openkilda . model . SwitchId switchId = new org . openkilda . model . SwitchId ( "00:00:00:00:00:00:00:01" ) ; org . openkilda . messaging . info . event . PathNode source = new org . openkilda . messaging . info . event . PathNode ( switchId , 1 , 0 ) ; org . openkilda . messaging . info . event . PathNode destination = new org . openkilda . messaging . info . event . PathNode ( switchId , 2 , 1 ) ; org . openkilda . messaging . info . event . IslInfoData isl = new org . openkilda . messaging . info . event . IslInfoData ( source , destination , IslChangeType . DISCOVERED , false ) ; "<AssertPlaceHolder>" ; } isSelfLooped ( ) { return java . util . Objects . equals ( source . getSwitchId ( ) , destination . getSwitchId ( ) ) ; }
org . junit . Assert . assertTrue ( isl . isSelfLooped ( ) )
testAddObjectIdFilter ( ) { connection = createConnection ( TestConfig . serverEndpoint , TestConfig . port ) ; connection . connect ( ) ; connection . login ( "userc" , "1" , "voxeo" ) ; com . voxeo . rayo . client . xmpp . stanza . IQ iq = new com . voxeo . rayo . client . xmpp . stanza . IQ ( IQ . Type . set ) . setChild ( new com . voxeo . rayo . client . xmpp . stanza . Bind ( ) . setResource ( "clienttest" ) ) ; com . voxeo . rayo . client . filter . XmppObjectIdFilter filter = new com . voxeo . rayo . client . filter . XmppObjectIdFilter ( iq . getId ( ) ) ; connection . addFilter ( filter ) ; connection . send ( iq ) ; "<AssertPlaceHolder>" ; } poll ( ) { final com . voxeo . moho . Call c = _queue . poll ( ) ; if ( c != null ) { if ( _shared ) { c . unjoin ( _mixer ) ; } dispatch ( new com . voxeo . moho . queue . DequeueEventImpl ( this , c ) ) ; } return c ; }
org . junit . Assert . assertNotNull ( filter . poll ( ) )
testWithExceptionLocal ( ) { java . lang . String message = "Test<sp>failure" ; this . service . setFailException ( new java . lang . RuntimeException ( message ) ) ; try { this . doSimpleTest ( false ) ; org . junit . Assert . fail ( "Expected<sp>to<sp>fail" ) ; } catch ( java . lang . RuntimeException e ) { "<AssertPlaceHolder>" ; } finally { this . service . setFailException ( null ) ; } } fail ( java . lang . Throwable ) { return this . completableFuture . completeExceptionally ( ex ) ; }
org . junit . Assert . assertEquals ( message , e . getMessage ( ) )
saveEncounter_shouldSaveEncounterWithComplexObs ( ) { executeDataSet ( org . openmrs . api . EncounterServiceTest . ENC_OBS_HIERARCHY_DATA_XML ) ; org . openmrs . api . EncounterService es = org . openmrs . api . context . Context . getEncounterService ( ) ; org . openmrs . Encounter encounter = es . getEncounter ( 101 ) ; org . openmrs . Obs observation = buildObs ( ) ; observation . setLocation ( encounter . getLocation ( ) ) ; observation . setPerson ( encounter . getPatient ( ) ) ; encounter . addObs ( observation ) ; es . saveEncounter ( encounter ) ; org . openmrs . api . context . Context . flushSession ( ) ; org . openmrs . api . context . Context . clearSession ( ) ; encounter = es . getEncounter ( 101 ) ; "<AssertPlaceHolder>" ; } getObsAtTopLevel ( boolean ) { return getAllObs ( includeVoided ) . stream ( ) . filter ( ( o ) -> ( o . getObsGroup ( ) ) == null ) . collect ( java . util . stream . Collectors . toCollection ( LinkedHashSet :: new ) ) ; }
org . junit . Assert . assertEquals ( 2 , encounter . getObsAtTopLevel ( true ) . size ( ) )
generateSecureServerNameWithAccountIdAndLoadBalancerIdShouldCreateExpectedName ( ) { java . lang . String expectedName = ( ( ( loadBalancer . getAccountId ( ) ) + "_" ) + ( loadBalancer . getId ( ) ) ) + "_S" ; java . lang . String generatedName = org . openstack . atlas . adapter . helpers . ZxtmNameBuilder . genSslVSName ( loadBalancer . getId ( ) , loadBalancer . getAccountId ( ) ) ; "<AssertPlaceHolder>" ; } getAccountId ( ) { return accountId ; }
org . junit . Assert . assertEquals ( expectedName , generatedName )
order_04_shouldGenerateDailyDigestOneMessage ( ) { log . debug ( "In<sp>shouldGenerateDailyDigestOneMessage" ) ; mockAuthenticationOnUserService ( fr . ippon . tatami . service . MailDigestServiceTest . DAILY_DIGEST_USER ) ; userService . updateDailyDigestRegistration ( true ) ; mockAuthenticationOnFriendshipService ( fr . ippon . tatami . service . MailDigestServiceTest . DAILY_DIGEST_USER ) ; friendshipService . followUser ( "userWhoPostForDigests" ) ; mockAuthenticationOnTimelineServiceWithACurrentUser ( "userWhoPostForDigests@ippon.fr" ) ; mockAuthenticationOnStatusUpdateServiceWithACurrentUser ( "userWhoPostForDigests@ippon.fr" ) ; java . lang . String content = "voil<sp>un<sp>message<sp>qui<sp>devrait<sp>se<sp>retrouver<sp>dans<sp>le<sp>digest<sp>!<sp>" ; statusUpdateService . postStatus ( content , false , new java . util . ArrayList < java . lang . String > ( ) ) ; org . mockito . ArgumentCaptor < java . util . List > statuses = org . mockito . ArgumentCaptor . forClass ( java . util . List . class ) ; mailDigestService . dailyDigest ( ) ; verify ( mailServiceMock ) . sendDailyDigestEmail ( any ( fr . ippon . tatami . domain . User . class ) , statuses . capture ( ) , anyInt ( ) , anyCollection ( ) ) ; "<AssertPlaceHolder>" ; } sendDailyDigestEmail ( fr . ippon . tatami . domain . User , fr . ippon . tatami . service . List , int , fr . ippon . tatami . service . Collection ) { fr . ippon . tatami . service . MailService . log . debug ( "Sending<sp>daily<sp>digest<sp>e-mail<sp>to<sp>User<sp>'{}'" , user . getLogin ( ) ) ; fr . ippon . tatami . service . Map < java . lang . String , java . lang . Object > model = new fr . ippon . tatami . service . HashMap < java . lang . String , java . lang . Object > ( ) ; model . put ( "user" , user ) ; model . put ( "tatamiUrl" , tatamiUrl ) ; model . put ( "statuses" , statuses ) ; model . put ( "nbStatus" , nbStatus ) ; model . put ( "suggestedUsers" , suggestedUsers ) ; sendTextFromTemplate ( user . getLogin ( ) , model , "dailyDigest" , this . locale ) ; }
org . junit . Assert . assertThat ( ( ( statuses . getValue ( ) . size ( ) ) == 1 ) , org . hamcrest . CoreMatchers . is ( true ) )
testDecryptWithNoEncryptedProperties ( ) { java . util . Dictionary dsProps = new java . util . Hashtable ( ) ; dsProps . put ( "dataSourceName" , "testDS" ) ; dsProps . put ( "timeout" , 2000 ) ; org . ops4j . pax . jdbc . config . impl . Decryptor decryptor = new org . ops4j . pax . jdbc . config . impl . Decryptor ( getEncryptor ( ) ) ; java . util . Dictionary decryptedConfig = decryptor . decrypt ( dsProps ) ; for ( java . util . Enumeration e = decryptedConfig . keys ( ) ; e . hasMoreElements ( ) ; ) { java . lang . String key = ( ( java . lang . String ) ( e . nextElement ( ) ) ) ; java . lang . String expectedValue = java . lang . String . valueOf ( dsProps . get ( key ) ) ; java . lang . String actualValue = java . lang . String . valueOf ( decryptedConfig . get ( key ) ) ; "<AssertPlaceHolder>" ; } } decrypt ( java . util . Dictionary ) { if ( ( decryptor ) == null ) { return config ; } java . util . Dictionary < java . lang . String , java . lang . Object > decryptedConfig = new java . util . Hashtable ( ) ; for ( java . util . Enumeration < java . lang . String > e = config . keys ( ) ; e . hasMoreElements ( ) ; ) { final java . lang . String key = e . nextElement ( ) ; java . lang . String value = java . lang . String . valueOf ( config . get ( key ) ) ; if ( ( ( config . get ( key ) ) instanceof java . lang . String ) && ( org . ops4j . pax . jdbc . config . impl . Decryptor . isEncrypted ( value ) ) ) { final java . lang . String plainText = decryptValue ( value ) ; if ( plainText != null ) { decryptedConfig . put ( key , plainText ) ; } } else { decryptedConfig . put ( key , value ) ; } } return decryptedConfig ; }
org . junit . Assert . assertEquals ( expectedValue , actualValue )
testEqualsAndHashCode ( ) { org . jboss . forge . furnace . proxy . test . Bean bean1 = new org . jboss . forge . furnace . proxy . test . Bean ( ) ; java . lang . String attributeValue = "String" ; bean1 . setAtt ( attributeValue ) ; org . jboss . forge . furnace . proxy . test . Bean enhancedObj = org . jboss . forge . furnace . proxy . Proxies . enhance ( org . jboss . forge . furnace . proxy . test . Bean . class , new org . jboss . forge . furnace . proxy . ClassLoaderInterceptor ( org . jboss . forge . furnace . proxy . test . Bean . class . getClassLoader ( ) , bean1 ) ) ; enhancedObj . setAtt ( attributeValue ) ; org . jboss . forge . furnace . proxy . test . Bean bean2 = new org . jboss . forge . furnace . proxy . test . Bean ( ) ; bean2 . setAtt ( attributeValue ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == obj ) { return true ; } else if ( ( null == obj ) || ( ! ( getClass ( ) . equals ( obj . getClass ( ) ) ) ) ) { return false ; } org . jboss . forge . furnace . manager . maven . addon . AddonDependencySelector that = ( ( org . jboss . forge . furnace . manager . maven . addon . AddonDependencySelector ) ( obj ) ) ; return ( depth ) == ( that . depth ) ; }
org . junit . Assert . assertTrue ( enhancedObj . equals ( bean2 ) )
testDuplicateLocalVariableMVELConsequence ( ) { final org . kie . internal . builder . KnowledgeBuilder kbuilder = org . kie . internal . builder . KnowledgeBuilderFactory . newKnowledgeBuilder ( ) ; kbuilder . add ( org . kie . internal . io . ResourceFactory . newInputStreamResource ( getClass ( ) . getResourceAsStream ( "test_DuplicateLocalVariableMVELConsequence.drl" ) ) , ResourceType . DRL ) ; "<AssertPlaceHolder>" ; } hasErrors ( ) { return messages . stream ( ) . anyMatch ( ( m ) -> DMNMessage . Severity . ERROR . equals ( m . getSeverity ( ) ) ) ; }
org . junit . Assert . assertTrue ( kbuilder . hasErrors ( ) )
testToJson ( ) { io . vertx . ext . mongo . UpdateOptions options = new io . vertx . ext . mongo . UpdateOptions ( ) ; io . vertx . ext . mongo . WriteOption . WriteOption writeOption = MAJORITY ; boolean multi = io . vertx . test . core . TestUtils . randomBoolean ( ) ; boolean upsert = io . vertx . test . core . TestUtils . randomBoolean ( ) ; options . setWriteOption ( writeOption ) ; options . setMulti ( multi ) ; options . setUpsert ( upsert ) ; "<AssertPlaceHolder>" ; } toJson ( ) { io . vertx . core . json . JsonObject mongoClientBulkWriteResultJson = new io . vertx . core . json . JsonObject ( ) ; if ( ( insertedCount ) != ( io . vertx . ext . mongo . MongoClientBulkWriteResult . DEFAULT_INSERTED_COUNT ) ) { mongoClientBulkWriteResultJson . put ( io . vertx . ext . mongo . MongoClientBulkWriteResult . INSERTED_COUNT , insertedCount ) ; } if ( ( matchedCount ) != ( io . vertx . ext . mongo . MongoClientBulkWriteResult . DEFAULT_MATCHED_COUNT ) ) { mongoClientBulkWriteResultJson . put ( io . vertx . ext . mongo . MongoClientBulkWriteResult . MATCHED_COUNT , matchedCount ) ; } if ( ( deletedCount ) != ( io . vertx . ext . mongo . MongoClientBulkWriteResult . DEFAULT_DELETED_COUNT ) ) { mongoClientBulkWriteResultJson . put ( io . vertx . ext . mongo . MongoClientBulkWriteResult . DELETED_COUNT , deletedCount ) ; } if ( ( modifiedCount ) != ( io . vertx . ext . mongo . MongoClientBulkWriteResult . DEFAULT_MODIFIED_COUNT ) ) { mongoClientBulkWriteResultJson . put ( io . vertx . ext . mongo . MongoClientBulkWriteResult . MODIFIED_COUNT , modifiedCount ) ; } if ( ( upserts ) != null ) { mongoClientBulkWriteResultJson . put ( io . vertx . ext . mongo . MongoClientBulkWriteResult . UPSERTS , new io . vertx . core . json . JsonArray ( upserts ) ) ; } return mongoClientBulkWriteResultJson ; }
org . junit . Assert . assertEquals ( options , new io . vertx . ext . mongo . UpdateOptions ( options . toJson ( ) ) )
testOnlyHigh ( ) { tillerino . tillerinobot . RateLimiter limiter = new tillerino . tillerinobot . RateLimiter ( ) ; java . util . concurrent . atomic . AtomicInteger high = new java . util . concurrent . atomic . AtomicInteger ( ) ; java . util . concurrent . atomic . AtomicInteger low = new java . util . concurrent . atomic . AtomicInteger ( ) ; tillerino . tillerinobot . RateLimiterTest . exec . submit ( ( ) -> { limiter . setThreadPriority ( RateLimiter . REQUEST ) ; for ( ; ; ) { limiter . limitRate ( ) ; high . incrementAndGet ( ) ; } } ) ; tillerino . tillerinobot . RateLimiterTest . exec . submit ( ( ) -> { limiter . setThreadPriority ( RateLimiter . MAINTENANCE ) ; for ( ; ; ) { limiter . limitRate ( ) ; low . incrementAndGet ( ) ; } } ) ; java . util . stream . IntStream . range ( 0 , 100 ) . forEach ( ( x ) -> limiter . addPermit ( ) ) ; while ( ( ( high . get ( ) ) + ( low . get ( ) ) ) < 100 ) { java . lang . Thread . sleep ( 10 ) ; } "<AssertPlaceHolder>" ; } get ( ) { throw new java . lang . UnsupportedOperationException ( ) ; }
org . junit . Assert . assertEquals ( 100 , high . get ( ) )
whenLockNotHeldCheckReturnsFalse ( ) { com . mongodb . client . MongoDatabase db = new com . github . fakemongo . Fongo ( com . github . mongobee . dao . LockDaoTest . TEST_SERVER ) . getDatabase ( com . github . mongobee . dao . LockDaoTest . DB_NAME ) ; com . github . mongobee . dao . LockDao dao = new com . github . mongobee . dao . LockDao ( com . github . mongobee . dao . LockDaoTest . LOCK_COLLECTION_NAME ) ; dao . intitializeLock ( db ) ; "<AssertPlaceHolder>" ; } isLockHeld ( com . mongodb . client . MongoDatabase ) { return ( db . getCollection ( lockCollectionName ) . count ( ) ) == 1 ; }
org . junit . Assert . assertFalse ( dao . isLockHeld ( db ) )
empty ( ) { "<AssertPlaceHolder>" ; } parse ( java . lang . String ) { if ( input != null ) { java . util . Map < java . lang . String , java . lang . String > matches = new java . util . HashMap ( ) ; java . util . regex . Matcher matcher = org . jboss . gwt . elemento . processor . ExpressionParser . PATTERN . matcher ( input ) ; while ( matcher . find ( ) ) { java . lang . String match = matcher . group ( ) ; validate ( match ) ; matches . put ( match , stripExpression ( match ) ) ; } return matches ; } return java . util . Collections . emptyMap ( ) ; }
org . junit . Assert . assertTrue ( parser . parse ( "" ) . isEmpty ( ) )
testFromEbXMLNull ( ) { org . openehealth . ipf . commons . ihe . xds . core . requests . query . FindDocumentsQuery result = new org . openehealth . ipf . commons . ihe . xds . core . requests . query . FindDocumentsQuery ( ) ; transformer . fromEbXML ( result , null ) ; "<AssertPlaceHolder>" ; } fromEbXML ( org . openehealth . ipf . commons . ihe . xds . core . requests . query . GetAllQuery , org . openehealth . ipf . commons . ihe . xds . core . ebxml . EbXMLAdhocQueryRequest ) { if ( ( query == null ) || ( ebXML == null ) ) { return ; } org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . QuerySlotHelper slots = new org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . QuerySlotHelper ( ebXML ) ; java . lang . String patientId = slots . toString ( org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . PATIENT_ID ) ; query . setPatientId ( org . openehealth . ipf . commons . ihe . xds . core . metadata . Hl7v2Based . parse ( patientId , org . openehealth . ipf . commons . ihe . xds . core . metadata . Identifiable . class ) ) ; query . setStatusDocuments ( slots . toStatus ( org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . DOC_ENTRY_STATUS ) ) ; query . setStatusFolders ( slots . toStatus ( org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . FOLDER_STATUS ) ) ; query . setStatusSubmissionSets ( slots . toStatus ( org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . SUBMISSION_SET_STATUS ) ) ; query . setConfidentialityCodes ( slots . toCodeQueryList ( org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . DOC_ENTRY_CONFIDENTIALITY_CODE , org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . DOC_ENTRY_CONFIDENTIALITY_CODE_SCHEME ) ) ; query . setFormatCodes ( slots . toCodeList ( org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . DOC_ENTRY_FORMAT_CODE ) ) ; query . setHomeCommunityId ( ebXML . getHome ( ) ) ; query . setDocumentEntryTypes ( slots . toDocumentEntryType ( org . openehealth . ipf . commons . ihe . xds . core . transform . requests . query . DOC_ENTRY_TYPE ) ) ; }
org . junit . Assert . assertEquals ( new org . openehealth . ipf . commons . ihe . xds . core . requests . query . FindDocumentsQuery ( ) , result )
contentsEqual ( ) { parser . Terminal t = new parser . Terminal ( "test" , null ) ; parser . Terminal [ ] t1 = new parser . Terminal [ ] { t } ; parser . Terminal [ ] t2 = new parser . Terminal [ ] { t } ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ! ( o instanceof parser . Item ) ) return false ; parser . Item other = ( ( parser . Item ) ( o ) ) ; if ( ( production . equals ( other . production ) ) && ( ( dot ) == ( other . dot ) ) ) { return ( itemLookahead ) == null ? ( other . itemLookahead ) == null : itemLookahead . equals ( other . itemLookahead ) ; } else { return false ; } }
org . junit . Assert . assertFalse ( t1 . equals ( t2 ) )
testGetLocalIPString ( ) { "<AssertPlaceHolder>" ; } getLocalIPString ( ) { final java . net . InetAddress addr = com . cloud . utils . net . NetUtils . getLocalInetAddress ( ) ; if ( addr != null ) { return addr . getHostAddress ( ) ; } return "127.0.0.1" ; }
org . junit . Assert . assertNotNull ( com . cloud . utils . net . NetUtils . getLocalIPString ( ) )
forEachWithIndexWithChainedValues ( ) { org . eclipse . collections . impl . map . mutable . UnifiedMap < org . eclipse . collections . impl . CollidingInt , java . lang . String > map = org . eclipse . collections . impl . map . mutable . UnifiedMap . newMap ( ) ; int size = 100000 ; for ( int i = 0 ; i < size ; i ++ ) { map . put ( new org . eclipse . collections . impl . CollidingInt ( i , 3 ) , org . eclipse . collections . impl . map . mutable . UnifiedMapAcceptanceTest . createVal ( i ) ) ; } int [ ] intArray = new int [ 1 ] ; intArray [ 0 ] = - 1 ; map . forEachWithIndex ( ( value , index ) -> { "<AssertPlaceHolder>" ; intArray [ 0 ] = index ; } ) ; } forEachWithIndex ( int , int , org . eclipse . collections . api . block . procedure . primitive . ObjectIntProcedure ) { throw new java . lang . UnsupportedOperationException ( ( ( this . getClass ( ) . getSimpleName ( ) ) + ".forEachWithIndex()<sp>not<sp>implemented<sp>yet" ) ) ; }
org . junit . Assert . assertEquals ( index , ( ( intArray [ 0 ] ) + 1 ) )
testDisjointClassInconsistency ( ) { manager . addAxiom ( ontology , DisjointClasses ( c1 , c2 ) ) ; startReasoner ( ) ; boolean consistent = reasoner . isConsistent ( ) ; "<AssertPlaceHolder>" ; manager . removeAxiom ( ontology , DisjointClasses ( c1 , c2 ) ) ; } isConsistent ( ) { return true ; }
org . junit . Assert . assertFalse ( consistent )
testCompareWorkflowRunFiles_Smaller ( ) { net . sourceforge . seqware . pipeline . deciders . BasicDeciderTest . TestingDecider decider = ( ( net . sourceforge . seqware . pipeline . deciders . BasicDeciderTest . TestingDecider ) ( instance ) ) ; decider . setMetaws ( ( ( net . sourceforge . seqware . common . metadata . MetadataWS ) ( metadata ) ) ) ; decider . setMetaType ( fastq_gz ) ; java . util . List < java . lang . String > filesToRun = new java . util . ArrayList ( ) ; filesToRun . add ( "s3://abcco.uploads/s_G1_L001_R1_001_index8.fastq.gz" ) ; int workflowRunAcc = 6654 ; java . util . Set < java . lang . Integer > inputFiles = metadata . getWorkflowRun ( workflowRunAcc ) . getInputFileAccessions ( ) ; "<AssertPlaceHolder>" ; } compareWorkflowRunFiles ( java . util . Set , java . util . Collection ) { java . util . Set < java . lang . String > filesHasRun = determineFilePaths ( filesSWIDsHasRun ) ; net . sourceforge . seqware . common . util . Log . info ( ( "Files<sp>to<sp>run:<sp>" + ( org . apache . commons . lang3 . StringUtils . join ( filesToRun , ',' ) ) ) ) ; net . sourceforge . seqware . common . util . Log . info ( ( "Files<sp>has<sp>run:<sp>" + ( org . apache . commons . lang3 . StringUtils . join ( filesHasRun , ',' ) ) ) ) ; java . util . Set < java . lang . String > setToRun = new java . util . HashSet ( filesToRun ) ; java . util . Set < java . lang . String > setHasRun = new java . util . HashSet ( filesHasRun ) ; if ( setToRun . equals ( setHasRun ) ) { return net . sourceforge . seqware . pipeline . deciders . BasicDecider . FILE_STATUS . SAME_FILES ; } if ( net . sourceforge . seqware . pipeline . tools . SetOperations . isSubset ( setHasRun , setToRun ) ) { return net . sourceforge . seqware . pipeline . deciders . BasicDecider . FILE_STATUS . PAST_SUBSET_OR_INTERSECTION ; } if ( net . sourceforge . seqware . pipeline . tools . SetOperations . isSuperset ( setHasRun , setToRun ) ) { return net . sourceforge . seqware . pipeline . deciders . BasicDecider . FILE_STATUS . PAST_SUPERSET ; } if ( ( net . sourceforge . seqware . pipeline . tools . SetOperations . intersection ( setToRun , setHasRun ) . size ( ) ) > 0 ) { return net . sourceforge . seqware . pipeline . deciders . BasicDecider . FILE_STATUS . PAST_SUBSET_OR_INTERSECTION ; } return net . sourceforge . seqware . pipeline . deciders . BasicDecider . FILE_STATUS . DISJOINT_SETS ; }
org . junit . Assert . assertTrue ( ( ( ( ( net . sourceforge . seqware . pipeline . deciders . BasicDecider ) ( instance ) ) . compareWorkflowRunFiles ( inputFiles , filesToRun ) ) == ( BasicDecider . FILE_STATUS . PAST_SUPERSET ) ) )
testToString ( ) { java . lang . String result = fixture . toString ( ) ; java . lang . String left = "[declaration]<sp>sequence[" ; "<AssertPlaceHolder>" ; } toString ( ) { return fName ; }
org . junit . Assert . assertEquals ( left , result . substring ( 0 , left . length ( ) ) )
testViewHomeTemplate ( ) { com . ewcms . publication . freemarker . preview . PreviewService service = newPreviewService ( ) ; java . io . ByteArrayOutputStream out = new java . io . ByteArrayOutputStream ( ) ; boolean mock = false ; service . viewHomeTemplate ( out , new com . ewcms . core . site . model . Site ( ) , new com . ewcms . core . site . model . Channel ( ) , initTemplate ( "index.html" ) , mock ) ; "<AssertPlaceHolder>" ; out . close ( ) ; } size ( ) { return this . size ; }
org . junit . Assert . assertTrue ( ( ( out . size ( ) ) > 0 ) )
getItemsProperties ( ) { java . util . Map < java . lang . String , java . util . Map < java . lang . String , java . lang . String > > itemsProperties = elementPresenter . getSimpleItemsProperties ( ) ; "<AssertPlaceHolder>" ; } getSimpleItemsProperties ( ) { return elementViewList . stream ( ) . collect ( java . util . stream . Collectors . toMap ( ElementView :: getItemId , ( itemElementView ) -> org . drools . workbench . screens . scenariosimulation . client . collectioneditor . propertyPresenter . getSimpleProperties ( itemElementView . getItemId ( ) ) ) ) ; }
org . junit . Assert . assertNotNull ( itemsProperties )
findElementsOneByEmpty ( ) { final org . openqa . selenium . support . pagefactory . ByAllTest . AllDriver driver = mock ( org . openqa . selenium . support . pagefactory . ByAllTest . AllDriver . class ) ; final java . util . List < org . openqa . selenium . WebElement > elems = new java . util . ArrayList < org . openqa . selenium . WebElement > ( ) ; when ( driver . findElementsByName ( "cheese" ) ) . thenReturn ( elems ) ; org . openqa . selenium . support . pagefactory . ByAll by = new org . openqa . selenium . support . pagefactory . ByAll ( org . openqa . selenium . By . name ( "cheese" ) ) ; "<AssertPlaceHolder>" ; } findElements ( org . openqa . selenium . By ) { return by . findElements ( this ) ; }
org . junit . Assert . assertThat ( by . findElements ( driver ) , org . hamcrest . Matchers . equalTo ( elems ) )
testMultipleColumnPrefixFilterWithManyFamilies ( ) { java . lang . String family1 = "Family1" ; java . lang . String family2 = "Family2" ; org . apache . hadoop . hbase . filter . HTableDescriptor htd = new org . apache . hadoop . hbase . filter . HTableDescriptor ( "TestMultipleColumnPrefixFilter" ) ; htd . addFamily ( new org . apache . hadoop . hbase . filter . HColumnDescriptor ( family1 ) ) ; htd . addFamily ( new org . apache . hadoop . hbase . filter . HColumnDescriptor ( family2 ) ) ; org . apache . hadoop . hbase . filter . HRegionInfo info = new org . apache . hadoop . hbase . filter . HRegionInfo ( htd . getName ( ) , null , null , false ) ; org . apache . hadoop . hbase . regionserver . HRegion region = org . apache . hadoop . hbase . regionserver . HRegion . createHRegion ( info , org . apache . hadoop . hbase . filter . TestMultipleColumnPrefixFilter . TEST_UTIL . getDataTestDir ( ) , org . apache . hadoop . hbase . filter . TestMultipleColumnPrefixFilter . TEST_UTIL . getConfiguration ( ) , htd ) ; java . util . List < java . lang . String > rows = generateRandomWords ( 100 , "row" ) ; java . util . List < java . lang . String > columns = generateRandomWords ( 10000 , "column" ) ; long maxTimestamp = 3 ; java . util . List < org . apache . hadoop . hbase . filter . KeyValue > kvList = new java . util . ArrayList < org . apache . hadoop . hbase . filter . KeyValue > ( ) ; java . util . Map < java . lang . String , java . util . List < org . apache . hadoop . hbase . filter . KeyValue > > prefixMap = new java . util . HashMap < java . lang . String , java . util . List < org . apache . hadoop . hbase . filter . KeyValue > > ( ) ; prefixMap . put ( "p" , new java . util . ArrayList < org . apache . hadoop . hbase . filter . KeyValue > ( ) ) ; prefixMap . put ( "q" , new java . util . ArrayList < org . apache . hadoop . hbase . filter . KeyValue > ( ) ) ; prefixMap . put ( "s" , new java . util . ArrayList < org . apache . hadoop . hbase . filter . KeyValue > ( ) ) ; java . lang . String valueString = "ValueString" ; for ( java . lang . String row : rows ) { org . apache . hadoop . hbase . client . Put p = new org . apache . hadoop . hbase . client . Put ( org . apache . hadoop . hbase . util . Bytes . toBytes ( row ) ) ; p . setWriteToWAL ( false ) ; for ( java . lang . String column : columns ) { for ( long timestamp = 1 ; timestamp <= maxTimestamp ; timestamp ++ ) { double rand = java . lang . Math . random ( ) ; org . apache . hadoop . hbase . filter . KeyValue kv ; if ( rand < 0.5 ) kv = org . apache . hadoop . hbase . filter . KeyValueTestUtil . create ( row , family1 , column , timestamp , valueString ) ; else kv = org . apache . hadoop . hbase . filter . KeyValueTestUtil . create ( row , family2 , column , timestamp , valueString ) ; p . add ( kv ) ; kvList . add ( kv ) ; for ( java . lang . String s : prefixMap . keySet ( ) ) { if ( column . startsWith ( s ) ) { prefixMap . get ( s ) . add ( kv ) ; } } } } region . put ( p ) ; } org . apache . hadoop . hbase . filter . MultipleColumnPrefixFilter filter ; org . apache . hadoop . hbase . client . Scan scan = new org . apache . hadoop . hbase . client . Scan ( ) ; scan . setMaxVersions ( ) ; byte [ ] [ ] filter_prefix = new byte [ 2 ] [ ] ; filter_prefix [ 0 ] = new byte [ ] { 'p' } ; filter_prefix [ 1 ] = new byte [ ] { 'q' } ; filter = new org . apache . hadoop . hbase . filter . MultipleColumnPrefixFilter ( filter_prefix ) ; scan . setFilter ( filter ) ; java . util . List < org . apache . hadoop . hbase . filter . KeyValue > results = new java . util . ArrayList < org . apache . hadoop . hbase . filter . KeyValue > ( ) ; org . apache . hadoop . hbase . regionserver . InternalScanner scanner = region . getScanner ( scan ) ; while ( scanner . next ( results ) ) ; "<AssertPlaceHolder>" ; region . close ( ) ; region . getLog ( ) . closeAndDelete ( ) ; } get ( java . util . List ) { byte [ ] [ ] rows = new byte [ gets . size ( ) ] [ ] ; int maxVersions = 1 ; int count = 0 ; for ( org . apache . hadoop . hbase . client . Get g : gets ) { if ( count == 0 ) { maxVersions = g . getMaxVersions ( ) ; } else if ( ( g . getMaxVersions ( ) ) != maxVersions ) { org . apache . hadoop . hbase . rest . client . RemoteHTable . LOG .
org . junit . Assert . assertEquals ( ( ( prefixMap . get ( "p" ) . size ( ) ) + ( prefixMap . get ( "q" ) . size ( ) ) ) , results . size ( ) )
testReadMismatchVersion ( ) { java . lang . String payload = "test" ; org . apache . flink . core . io . VersionedIOWriteableTest . TestWriteable testWriteable = new org . apache . flink . core . io . VersionedIOWriteableTest . TestWriteable ( 1 , payload ) ; byte [ ] serialized ; try ( org . apache . flink . core . memory . ByteArrayOutputStreamWithPos out = new org . apache . flink . core . memory . ByteArrayOutputStreamWithPos ( ) ) { testWriteable . write ( new org . apache . flink . core . memory . DataOutputViewStreamWrapper ( out ) ) ; serialized = out . toByteArray ( ) ; } testWriteable = new org . apache . flink . core . io . VersionedIOWriteableTest . TestWriteable ( 2 ) ; try ( org . apache . flink . core . memory . ByteArrayInputStreamWithPos in = new org . apache . flink . core . memory . ByteArrayInputStreamWithPos ( serialized ) ) { testWriteable . read ( new org . apache . flink . core . memory . DataInputViewStreamWrapper ( in ) ) ; org . junit . Assert . fail ( "Version<sp>mismatch<sp>expected." ) ; } catch ( org . apache . flink . core . io . VersionMismatchException ignored ) { } "<AssertPlaceHolder>" ; } getData ( ) { return data ; }
org . junit . Assert . assertEquals ( null , testWriteable . getData ( ) )
testSetFont ( ) { org . eclipse . swt . graphics . Font font = createFont ( ) ; gc . setFont ( font ) ; org . eclipse . swt . internal . graphics . GCOperation [ ] gcOperations = org . eclipse . swt . graphics . ControlGC_Test . getGCOperations ( gc ) ; org . eclipse . swt . internal . graphics . GCOperation . SetProperty operation = ( ( org . eclipse . swt . internal . graphics . GCOperation . SetProperty ) ( gcOperations [ 0 ] ) ) ; "<AssertPlaceHolder>" ; } getFontData ( ) { return fontData ; }
org . junit . Assert . assertEquals ( font . getFontData ( ) [ 0 ] , operation . value )
testDeleteIdsEmpty ( ) { com . j256 . ormlite . dao . BaseDaoImpl < com . j256 . ormlite . dao . Foo , java . lang . Integer > dao = new com . j256 . ormlite . dao . BaseDaoImpl < com . j256 . ormlite . dao . Foo , java . lang . Integer > ( connectionSource , com . j256 . ormlite . dao . Foo . class ) { } ; dao . initialize ( ) ; java . util . List < java . lang . Integer > fooList = new java . util . ArrayList < java . lang . Integer > ( ) ; "<AssertPlaceHolder>" ; } deleteIds ( java . util . Collection ) { checkForInitialized ( ) ; if ( ( ids == null ) || ( ids . isEmpty ( ) ) ) { return 0 ; } else { com . j256 . ormlite . support . DatabaseConnection connection = connectionSource . getReadWriteConnection ( tableInfo . getTableName ( ) ) ; try { return statementExecutor . deleteIds ( connection , ids , objectCache ) ; } finally { connectionSource . releaseConnection ( connection ) ; } } }
org . junit . Assert . assertEquals ( 0 , dao . deleteIds ( fooList ) )
serialize ( ) { com . google . gson . Gson gson = com . github . seratch . jslack . common . json . GsonFactory . createSnakeCase ( ) ; com . github . seratch . jslack . api . model . event . TeamProfileReorderEvent event = new com . github . seratch . jslack . api . model . event . TeamProfileReorderEvent ( ) ; java . lang . String generatedJson = gson . toJson ( event ) ; java . lang . String expectedJson = "{\"type\":\"team_profile_reorder\"}" ; "<AssertPlaceHolder>" ; } createSnakeCase ( ) { return new com . google . gson . GsonBuilder ( ) . setFieldNamingPolicy ( FieldNamingPolicy . LOWER_CASE_WITH_UNDERSCORES ) . registerTypeAdapter ( com . github . seratch . jslack . api . model . block . LayoutBlock . class , new com . github . seratch . jslack . common . json . GsonLayoutBlockFactory ( ) ) . registerTypeAdapter ( com . github . seratch . jslack . api . model . block . composition . TextObject . class , new com . github . seratch . jslack . common . json . GsonTextObjectFactory ( ) ) . registerTypeAdapter ( com . github . seratch . jslack . api . model . block . ContextBlockElement . class , new com . github . seratch . jslack . common . json . GsonContextBlockElementFactory ( ) ) . registerTypeAdapter ( com . github . seratch . jslack . api . model . block . element . BlockElement . class , new com . github . seratch . jslack . common . json . GsonBlockElementFactory ( ) ) . create ( ) ; }
org . junit . Assert . assertThat ( generatedJson , org . hamcrest . CoreMatchers . is ( expectedJson ) )
testGetUserOperationLog_Success ( ) { container . login ( "1" , org . oscm . operatorservice . bean . ROLE_PLATFORM_OPERATOR ) ; operationList . add ( "Add" ) ; callerRolles . add ( OrganizationRoleType . PLATFORM_OPERATOR ) ; byte [ ] result = operatorService . getUserOperationLog ( operationList , 100000 , 200000 ) ; "<AssertPlaceHolder>" ; verify ( auditLogMock ) . loadAuditLogs ( operationList , 100000 , 200000 ) ; } getUserOperationLog ( org . oscm . test . stubs . List , long , long ) { throw new java . lang . UnsupportedOperationException ( ) ; }
org . junit . Assert . assertNotNull ( result )
testImgWithCacheControlPrivateReservedAndFetchNotTriggered ( ) { cache . addResponse ( new org . apache . shindig . gadgets . http . HttpRequest ( org . apache . shindig . common . uri . Uri . parse ( org . apache . shindig . gadgets . rewrite . CacheEnforcementVisitorTest . IMG_URL ) ) , new org . apache . shindig . gadgets . http . HttpResponseBuilder ( ) . setResponseString ( "test" ) . addHeader ( "Cache-Control" , "private" ) . create ( ) ) ; "<AssertPlaceHolder>" ; checkVisitBypassedAndFetchTriggered ( "img" , org . apache . shindig . gadgets . rewrite . CacheEnforcementVisitorTest . IMG_URL , false , false ) ; } getResponse ( org . apache . shindig . gadgets . http . HttpRequest ) { ( readCount ) ++ ; return data . get ( request . getUri ( ) ) ; }
org . junit . Assert . assertTrue ( ( ( cache . getResponse ( new org . apache . shindig . gadgets . http . HttpRequest ( org . apache . shindig . common . uri . Uri . parse ( org . apache . shindig . gadgets . rewrite . CacheEnforcementVisitorTest . IMG_URL ) ) ) ) != null ) )
testBondIsAromaticT ( ) { org . openscience . cdk . isomorphism . matchers . Expr expr = new org . openscience . cdk . isomorphism . matchers . Expr ( IS_AROMATIC ) ; org . openscience . cdk . interfaces . IBond bond = mock ( org . openscience . cdk . interfaces . IBond . class ) ; when ( bond . isAromatic ( ) ) . thenReturn ( true ) ; "<AssertPlaceHolder>" ; } matches ( org . openscience . cdk . interfaces . IAtomContainer ) { return matches ( atomContainer , true ) ; }
org . junit . Assert . assertTrue ( expr . matches ( bond ) )
testNOrFunction ( ) { items . add ( new org . eclipse . smarthome . core . library . types . ArithmeticGroupFunctionTest . TestItem ( "TestItem1" , OpenClosedType . CLOSED ) ) ; items . add ( new org . eclipse . smarthome . core . library . types . ArithmeticGroupFunctionTest . TestItem ( "TestItem2" , org . eclipse . smarthome . core . types . UnDefType . UNDEF ) ) ; items . add ( new org . eclipse . smarthome . core . library . types . ArithmeticGroupFunctionTest . TestItem ( "TestItem3" , OpenClosedType . OPEN ) ) ; items . add ( new org . eclipse . smarthome . core . library . types . ArithmeticGroupFunctionTest . TestItem ( "TestItem4" , OpenClosedType . CLOSED ) ) ; items . add ( new org . eclipse . smarthome . core . library . types . ArithmeticGroupFunctionTest . TestItem ( "TestItem5" , org . eclipse . smarthome . core . types . UnDefType . UNDEF ) ) ; function = new org . eclipse . smarthome . core . library . types . ArithmeticGroupFunction . NOr ( OpenClosedType . OPEN , OpenClosedType . CLOSED ) ; org . eclipse . smarthome . core . types . State state = function . calculate ( items ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertEquals ( OpenClosedType . CLOSED , state )
subInterfaceExcludeSelf ( ) { for ( java . lang . Class < ? > type : nl . jqno . equalsverifier . internal . reflection . SuperclassIterable . of ( nl . jqno . equalsverifier . internal . reflection . SuperclassIterableTest . SimpleSubInterface . class ) ) { actual . add ( type ) ; } "<AssertPlaceHolder>" ; } of ( java . lang . Class ) { return new nl . jqno . equalsverifier . internal . reflection . SuperclassIterable ( type , false ) ; }
org . junit . Assert . assertEquals ( emptyList ( ) , actual )
deleteAllNotes ( ) { openNotes ( ) ; java . util . List < org . openqa . selenium . WebElement > notes = com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . wd . findElements ( org . openqa . selenium . By . id ( "com.evernote:id/title" ) ) ; while ( ( notes . size ( ) ) != 0 ) { org . openqa . selenium . WebElement note = notes . get ( 0 ) ; java . util . HashMap < java . lang . String , java . lang . String > values = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; values . put ( "element" , ( ( org . openqa . selenium . remote . RemoteWebElement ) ( note ) ) . getId ( ) ) ; ( ( org . openqa . selenium . JavascriptExecutor ) ( com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . wd ) ) . executeScript ( "mobile:<sp>longClick" , values ) ; com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . pause ( 1000 ) ; org . openqa . selenium . WebElement footer = ( ( org . openqa . selenium . remote . RemoteWebDriver ) ( com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . wd ) ) . findElementById ( "com.evernote:id/efab_menu_footer" ) ; java . util . List < org . openqa . selenium . WebElement > footerItems = footer . findElements ( org . openqa . selenium . By . className ( "android.widget.ImageButton" ) ) ; org . openqa . selenium . WebElement moreOptions = footerItems . get ( ( ( footerItems . size ( ) ) - 1 ) ) ; moreOptions . click ( ) ; com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . pause ( 1000 ) ; org . openqa . selenium . WebElement deleteButton = com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . wd . findElements ( org . openqa . selenium . By . id ( "com.evernote:id/item_title" ) ) . get ( 5 ) ; deleteButton . click ( ) ; com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . pause ( 1000 ) ; org . openqa . selenium . WebElement deleteConfirmation = com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . wd . findElement ( org . openqa . selenium . By . id ( "android:id/button1" ) ) ; deleteConfirmation . click ( ) ; com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . pause ( 1000 ) ; notes = com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . wd . findElements ( org . openqa . selenium . By . id ( "com.evernote:id/title" ) ) ; } "<AssertPlaceHolder>" ; } pause ( long ) { try { com . sugarcrm . candybean . automation . AutomationInterface . logger . info ( ( ( "Pausing<sp>for<sp>" + ms ) + "ms<sp>via<sp>thread<sp>sleep." ) ) ; java . lang . Thread . sleep ( ms ) ; } catch ( java . lang . InterruptedException e ) { throw new com . sugarcrm . candybean . exceptions . CandybeanException ( e . getMessage ( ) ) ; } }
org . junit . Assert . assertEquals ( com . sugarcrm . candybean . examples . mobile . EvernoteAndroidTest . iface . wd . findElements ( org . openqa . selenium . By . id ( "com.evernote:id/title" ) ) . size ( ) , 0 )
testPageWhichDoesAdaptWithExtension ( ) { org . apache . sling . api . resource . Resource r = provider . getResource ( resolveContext , ( ( goodMntPath ) + ".json" ) , null , null ) ; "<AssertPlaceHolder>" ; } getResource ( org . apache . sling . spi . resource . provider . ResolveContext , java . lang . String , org . apache . sling . spi . resource . provider . ResourceContext , org . apache . sling . api . resource . Resource ) { org . apache . sling . api . resource . ResourceResolver resourceResolver = rc . getResourceResolver ( ) ; if ( path == null ) { return null ; } else if ( path . equals ( com . adobe . acs . commons . genericlists . impl . GenericListJsonResourceProvider . ROOT ) ) { return null ; } else { java . lang . String listPath ; if ( path . endsWith ( com . adobe . acs . commons . genericlists . impl . GenericListJsonResourceProvider . EXTENSION ) ) { listPath = path . substring ( com . adobe . acs . commons . genericlists . impl . GenericListJsonResourceProvider . ROOT . length ( ) , ( ( path . length ( ) ) - ( com . adobe . acs . commons . genericlists . impl . GenericListJsonResourceProvider . EXTENSION_LENGTH ) ) ) ; } else { listPath = path . substring ( com . adobe . acs . commons . genericlists . impl . GenericListJsonResourceProvider . ROOT . length ( ) ) ; } java . lang . String fullListPath = ( listRoot ) + listPath ; com . day . cq . wcm . api . Page listPage = resourceResolver . adaptTo ( com . day . cq . wcm . api . PageManager . class ) . getPage ( fullListPath ) ; if ( listPage == null ) { return null ; } else { com . adobe . acs . commons . genericlists . GenericList list = listPage . adaptTo ( com . adobe . acs . commons . genericlists . GenericList . class ) ; if ( list == null ) { return null ; } else { org . apache . sling . api . resource . ResourceMetadata rm = new org . apache . sling . api . resource . ResourceMetadata ( ) ; rm . setResolutionPath ( path ) ; return new com . adobe . acs . commons . genericlists . impl . GenericListJsonResourceProvider . JsonResource ( list , resourceResolver , rm ) ; } } } }
org . junit . Assert . assertNotNull ( r )
testGetTimestamps ( ) { final com . xiaomi . infra . chronos . ChronosServer chronosServer = createChronosServer ( new com . xiaomi . infra . chronos . zookeeper . HostPort ( "127.0.0.1" , 2188 ) ) ; java . lang . Thread thread = new java . lang . Thread ( ) { @ com . xiaomi . infra . chronos . client . Override public void run ( ) { chronosServer . run ( ) ; } } ; thread . start ( ) ; java . lang . Thread . sleep ( 500 ) ; com . xiaomi . infra . chronos . client . ChronosClient chronosClient = createChronosServer ( ) ; long timestamp1 = chronosClient . getTimestamps ( 100 ) ; long timestamp2 = chronosClient . getTimestamps ( 100 ) ; "<AssertPlaceHolder>" ; chronosServer . stopThriftServer ( ) ; chronosServer . getFailoverWatcher ( ) . close ( ) ; chronosClient . getChronosClientWatcher ( ) . close ( ) ; } getTimestamps ( int ) { long currentTime = ( java . lang . System . currentTimeMillis ( ) ) << 18 ; synchronized ( this ) { if ( currentTime > ( maxAssignedTimestamp ) ) { maxAssignedTimestamp = ( currentTime + range ) - 1 ; } else { maxAssignedTimestamp += range ; } if ( ( maxAssignedTimestamp ) >= ( chronosServerWatcher . getCachedPersistentTimestamp ( ) ) ) { sleepUntilAsyncSet ( ) ; if ( ( maxAssignedTimestamp ) >= ( chronosServerWatcher . getCachedPersistentTimestamp ( ) ) ) { long newPersistentTimestamp = ( maxAssignedTimestamp ) + ( zkAdvanceTimestamp ) ; if ( com . xiaomi . infra . chronos . ChronosImplement . LOG . isDebugEnabled ( ) ) { com . xiaomi . infra . chronos . ChronosImplement . LOG . debug ( ( "Try<sp>to<sp>sync<sp>set<sp>persistent<sp>timestamp<sp>" + newPersistentTimestamp ) ) ; } try { chronosServerWatcher . setPersistentTimestamp ( newPersistentTimestamp ) ; } catch ( com . xiaomi . infra . chronos . exception . ChronosException e ) { com . xiaomi . infra . chronos . ChronosImplement . LOG . fatal ( "Error<sp>to<sp>set<sp>persistent<sp>timestamp,<sp>exit<sp>immediately" ) ; java . lang . System . exit ( 0 ) ; } } } if ( ( ! ( isAsyncSetPersistentTimestamp ) ) && ( ( maxAssignedTimestamp ) >= ( ( chronosServerWatcher . getCachedPersistentTimestamp ( ) ) - ( ( zkAdvanceTimestamp ) * 0.5 ) ) ) ) { long newPersistentTimestamp = ( chronosServerWatcher . getCachedPersistentTimestamp ( ) ) + ( zkAdvanceTimestamp ) ; if ( com . xiaomi . infra . chronos . ChronosImplement . LOG . isDebugEnabled ( ) ) { com . xiaomi . infra . chronos . ChronosImplement . LOG . debug ( ( "Try<sp>to<sp>async<sp>set<sp>persistent<sp>timestamp<sp>" + newPersistentTimestamp ) ) ; } isAsyncSetPersistentTimestamp = true ; asyncSetPersistentTimestamp ( newPersistentTimestamp ) ; } return ( ( maxAssignedTimestamp ) - range ) + 1 ; } }
org . junit . Assert . assertTrue ( ( ( timestamp2 - timestamp1 ) >= 100 ) )
testIsEmpty_WithPayload ( ) { pubMessage = new net . xenqtt . message . PubMessage ( net . xenqtt . message . QoS . AT_LEAST_ONCE , false , "my<sp>topic" , 123 , new byte [ ] { 1 , 2 , 3 } ) ; message = new net . xenqtt . client . PublishMessage ( channelManager , channel , pubMessage ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return ( pubMessage . getPayload ( ) . length ) == 0 ; }
org . junit . Assert . assertFalse ( message . isEmpty ( ) )
runTest ( ) { boolean result = checkNoError ( "Social_Files_Add_and_Get_Community_File_Comments" ) ; "<AssertPlaceHolder>" ; } getNoErrorMsg ( ) { return noErrorMsg ; }
org . junit . Assert . assertTrue ( getNoErrorMsg ( ) , result )
testSyncWriteFiltered ( ) { final com . spotify . heroic . ingestion . CoreIngestionGroup group = setupIngestionGroup ( empty ( ) , empty ( ) , empty ( ) ) ; doReturn ( filter ) . when ( filterSupplier ) . get ( ) ; doReturn ( other ) . when ( async ) . failed ( any ( java . lang . Throwable . class ) ) ; doReturn ( expected ) . when ( async ) . resolved ( any ( com . spotify . heroic . ingestion . Ingestion . class ) ) ; doReturn ( false ) . when ( filter ) . apply ( series ) ; doNothing ( ) . when ( writePermits ) . acquire ( ) ; doNothing ( ) . when ( writePermits ) . release ( ) ; doReturn ( other ) . when ( expected ) . onFinished ( any ( eu . toolchain . async . FutureFinished . class ) ) ; doReturn ( other ) . when ( group ) . doWrite ( request ) ; "<AssertPlaceHolder>" ; verify ( async ) . resolved ( any ( com . spotify . heroic . ingestion . Ingestion . class ) ) ; verify ( async , never ( ) ) . failed ( any ( java . lang . Throwable . class ) ) ; verify ( writePermits , never ( ) ) . acquire ( ) ; verify ( writePermits , never ( ) ) . release ( ) ; verify ( reporter , never ( ) ) . incrementConcurrentWrites ( ) ; verify ( reporter , never ( ) ) . decrementConcurrentWrites ( ) ; verify ( reporter ) . reportDroppedByFilter ( ) ; verify ( group , never ( ) ) . doWrite ( request ) ; verify ( other , never ( ) ) . onFinished ( any ( eu . toolchain . async . FutureFinished . class ) ) ; } syncWrite ( com . spotify . heroic . ingestion . Ingestion$Request ) { final io . opencensus . trace . Span span = tracer . spanBuilder ( "CoreIngestionGroup.syncWrite" ) . startSpan ( ) ; if ( ! ( filter . get ( ) . apply ( request . getSeries ( ) ) ) ) { reporter . reportDroppedByFilter ( ) ; span . setStatus ( Status . FAILED_PRECONDITION . withDescription ( "Dropped<sp>by<sp>filter" ) ) ; span . end ( ) ; return async . resolved ( com . spotify . heroic . ingestion . Ingestion . of ( com . google . common . collect . ImmutableList . of ( ) ) ) ; } try { span . addAnnotation ( "Acquiring<sp>write<sp>lock" ) ; writePermits . acquire ( ) ; } catch ( final java . lang . InterruptedException e ) { java . lang . String error = "Failed<sp>to<sp>acquire<sp>semaphore<sp>for<sp>bounded<sp>request" ; span . setStatus ( Status . INTERNAL . withDescription ( error ) ) ; span . end ( ) ; return async . failed ( new java . lang . Exception ( error , e ) ) ; } span . addAnnotation ( "Acquired<sp>write<sp>lock" ) ; reporter . incrementConcurrentWrites ( ) ; try ( io . opencensus . common . Scope ws = tracer . withSpan ( span ) ) { return doWrite ( request ) . onFinished ( ( ) -> { writePermits . release ( ) ; reporter . decrementConcurrentWrites ( ) ; span . end ( ) ; } ) ; } }
org . junit . Assert . assertEquals ( expected , group . syncWrite ( request ) )
testCreateFacadesWithDelegate ( ) { org . locationtech . geowave . core . cli . prefix . JCommanderPrefixTranslator translator = new org . locationtech . geowave . core . cli . prefix . JCommanderPrefixTranslator ( ) ; translator . addObject ( new org . locationtech . geowave . core . cli . prefix . JCommanderTranslationMapTest . Arguments ( ) ) ; org . locationtech . geowave . core . cli . prefix . JCommanderTranslationMap map = translator . translate ( ) ; map . createFacadeObjects ( ) ; "<AssertPlaceHolder>" ; } getObjects ( ) { return java . util . Collections . unmodifiableCollection ( translatedObjects ) ; }
org . junit . Assert . assertEquals ( 2 , map . getObjects ( ) . size ( ) )
isBillingDataButtonDisabled_Null ( ) { bean . setFromDate ( null ) ; bean . setToDate ( null ) ; boolean result = bean . isBillingDataButtonDisabled ( ) ; "<AssertPlaceHolder>" ; } isBillingDataButtonDisabled ( ) { java . util . Date date = new java . util . Date ( java . lang . System . currentTimeMillis ( ) ) ; bean . setFromDate ( date ) ; bean . setToDate ( date ) ; boolean result = bean . isBillingDataButtonDisabled ( ) ; org . junit . Assert . assertFalse ( result ) ; }
org . junit . Assert . assertTrue ( result )
testConstructFromSorted ( ) { java . util . TreeMap map = ch . usi . da . dmap . TreeMapTest . map5 ( ) ; java . util . TreeMap map2 = new java . util . TreeMap ( map ) ; "<AssertPlaceHolder>" ; } map5 ( ) { java . util . TreeMap map = new java . util . TreeMap ( ) ; org . junit . Assert . assertTrue ( map . isEmpty ( ) ) ; map . put ( 1 , "A" ) ; map . put ( 5 , "E" ) ; map . put ( 3 , "C" ) ; map . put ( 2 , "B" ) ; map . put ( 4 , "D" ) ; org . junit . Assert . assertFalse ( map . isEmpty ( ) ) ; org . junit . Assert . assertEquals ( 5 , map . size ( ) ) ; return map ; }
org . junit . Assert . assertEquals ( map , map2 )
clusterNewConfigurationErrorTest ( ) { initData ( ) ; when ( mockScalingRuleService . getRule ( 1 ) ) . thenReturn ( rule ) ; when ( mockClusterService . getCluster ( 1 ) ) . thenReturn ( cluster ) ; when ( mockHealthMonitoringService . getClusterLoad ( machine , PeriodicAutoscalerItemProcessor . METRIC_NAMES , PeriodicAutoscalerItemProcessor . METRIC_TYPE_LOAD , PeriodicAutoscalerItemProcessor . METRIC_PERIOD ) ) . thenReturn ( ( ( float ) ( 1 ) ) ) ; when ( mockScalingRuleService . applyScalingRule ( 1 , 1 , rule ) ) . thenReturn ( ScalingStatus . ERROR_MACHINE_CONFIGURATION_FAILURE ) ; org . openinfinity . cloud . autoscaler . periodicautoscaler . ClusterProcessingState clusterProcessingState = new org . openinfinity . cloud . autoscaler . periodicautoscaler . ClusterProcessingState ( 0 , false , false , false , false , false ) ; itemProcessor . getProcessingStatusMap ( ) . put ( 1 , clusterProcessingState ) ; org . openinfinity . cloud . autoscaler . util . ScalingData sd = new org . openinfinity . cloud . autoscaler . util . ScalingData ( 1 , cluster , rule ) ; org . openinfinity . cloud . autoscaler . test . Job job = itemProcessor . process ( machine ) ; "<AssertPlaceHolder>" ; verify ( notifier , times ( 1 ) ) . notify ( sd , Notifier . NotificationType . MACHINE_CONFIGURATION_ERROR ) ; }
org . junit . Assert . assertNull ( job )
shouldMarkTransitionFromNullToRegularAdminAsValid ( ) { stubSecurityContextWithAuthentication ( ) ; stubRoleTransition ( null , com . qcadoo . security . internal . validators . ROLE_ADMIN ) ; final boolean isValid = userRoleValidationService . checkUserCreatingSuperadmin ( userDataDefMock , userEntityMock ) ; "<AssertPlaceHolder>" ; } checkUserCreatingSuperadmin ( com . qcadoo . model . api . DataDefinition , com . qcadoo . model . api . Entity ) { java . lang . Boolean isRoleSuperadminInNewGroup = securityService . hasRole ( entity , QcadooSecurityConstants . ROLE_SUPERADMIN ) ; java . lang . Boolean isRoleSuperadminInOldGroup = ( ( entity . getId ( ) ) == null ) ? false : securityService . hasRole ( dataDefinition . get ( entity . getId ( ) ) , QcadooSecurityConstants . ROLE_SUPERADMIN ) ; if ( ( com . google . common . base . Objects . equal ( isRoleSuperadminInOldGroup , isRoleSuperadminInNewGroup ) ) || ( isCurrentUserShopOrSuperAdmin ( dataDefinition ) ) ) { return true ; } entity . addError ( dataDefinition . getField ( UserFields . GROUP ) , "qcadooUsers.validate.global.error.forbiddenRole" ) ; return false ; }
org . junit . Assert . assertTrue ( isValid )
shouldCreateStringRepresentationOfKey ( ) { java . lang . String key1 = uniqueString ( 3 ) ; java . lang . String key2 = uniqueString ( 3 ) ; java . lang . String key3 = uniqueString ( 3 ) ; java . sql . Array array = createJdbcArray ( key1 , key2 , key3 ) ; given ( resultSet . getArray ( annis . sqlgen . PostgreSqlArraySolutionKeyTest . keyColumnName ) ) . willReturn ( array ) ; key . retrieveKey ( resultSet ) ; java . lang . String actual = key . getCurrentKeyAsString ( ) ; java . lang . String expected = ( ( ( key1 + "," ) + key2 ) + "," ) + key3 ; "<AssertPlaceHolder>" ; } getCurrentKeyAsString ( ) { return org . apache . commons . lang3 . StringUtils . join ( getCurrentKey ( ) , "," ) ; }
org . junit . Assert . assertThat ( actual , org . hamcrest . CoreMatchers . is ( expected ) )
testAvailableMessageCountInitialized ( ) { java . lang . String topic = "some:topic" ; java . util . Optional < java . lang . Long > result = channelManager . getAvailableMessageCount ( topic ) ; "<AssertPlaceHolder>" ; } getAvailableMessageCount ( java . lang . String ) { return java . util . Optional . ofNullable ( consumersByTopic . get ( topic ) ) . flatMap ( Consumer :: messageCount ) ; }
org . junit . Assert . assertEquals ( java . util . Optional . empty ( ) , result )
randomElesTest ( ) { java . util . List < java . lang . Integer > result = cn . hutool . core . util . RandomUtil . randomEles ( cn . hutool . core . collection . CollectionUtil . newArrayList ( 1 , 2 , 3 , 4 , 5 , 6 ) , 2 ) ; "<AssertPlaceHolder>" ; } size ( ) { return this . size ; }
org . junit . Assert . assertEquals ( result . size ( ) , 2 )
shouldReturnFalseForCloseBracket ( ) { boolean start = edu . stanford . bmir . protege . web . shared . entity . EntityNameUtils . isWordStart ( ")" , 0 ) ; "<AssertPlaceHolder>" ; } isWordStart ( java . lang . String , int ) { checkNotNull ( entityName ) ; int length = entityName . length ( ) ; checkElementIndex ( index , length ) ; edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType indexCharType = edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . getType ( entityName , index ) ; if ( ! ( indexCharType . isWordLetter ( ) ) ) { return false ; } if ( index == 0 ) { return true ; } edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType prevCharType = edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . getType ( entityName , ( index - 1 ) ) ; if ( prevCharType != indexCharType ) { return ! ( ( indexCharType == ( EntityNameCharType . LETTER ) ) && ( prevCharType == ( EntityNameCharType . UPPER_CASE_LETTER ) ) ) ; } if ( indexCharType == ( EntityNameCharType . UPPER_CASE_LETTER ) ) { boolean hasFollowingCharacter = index < ( length - 1 ) ; return hasFollowingCharacter && ( ( edu . stanford . bmir . protege . web . shared . entity . EntityNameCharType . getType ( entityName , ( index + 1 ) ) ) == ( EntityNameCharType . LETTER ) ) ; } return false ; }
org . junit . Assert . assertEquals ( false , start )
testMethod ( ) { final boolean [ ] result = new boolean [ ] { false } ; result [ 0 ] = new edu . rice . pcdp . await . TestAwait3 ( ) . run ( ) ; "<AssertPlaceHolder>" ; } run ( ) { edu . rice . pcdp . PCDP . finish ( ( ) -> { final int acc0 = 1 ; async ( ( ) -> { int acc1 = 2 ; async ( ( ) -> { int acc2 = 3 ; async ( ( ) -> { int acc3 = ( acc0 + acc1 ) + acc2 ; res = acc3 == 6 ; } ) ; } ) ; } ) ; } ) ; return res ; }
org . junit . Assert . assertEquals ( true , result [ 0 ] )
testAxpyOpRows ( ) { org . nd4j . linalg . api . ndarray . INDArray arr = org . nd4j . linalg . factory . Nd4j . create ( 1 , 4 ) . assign ( 2.0F ) ; org . nd4j . linalg . api . ndarray . INDArray ones = org . nd4j . linalg . factory . Nd4j . ones ( 1 , 4 ) . assign ( 3.0F ) ; org . nd4j . linalg . factory . Nd4j . exec ( new org . nd4j . linalg . api . ops . impl . transforms . pairwise . arithmetic . Axpy ( arr , ones , arr , 10.0 , 4 ) ) ; org . nd4j . linalg . api . ndarray . INDArray exp = org . nd4j . linalg . factory . Nd4j . valueArrayOf ( new long [ ] { 1 , 4 } , 23.0 ) ; "<AssertPlaceHolder>" ; } valueArrayOf ( long [ ] , double , org . nd4j . linalg . api . buffer . DataType ) { if ( ( shape . length ) == 0 ) return org . nd4j . linalg . factory . Nd4j . scalar ( type , value ) ; org . nd4j . linalg . factory . Nd4j . checkShapeValues ( shape ) ; org . nd4j . linalg . factory . INDArray ret = org . nd4j . linalg . factory . Nd4j . createUninitialized ( type , shape ) ; org . nd4j . linalg . factory . Nd4j . logCreationIfNecessary ( ret ) ; ret . assign ( value ) ; return ret ; }
org . junit . Assert . assertEquals ( exp , arr )
executeParameterizedTest ( ) { try { net . sourceforge . seqware . pipeline . runner . Runner . main ( testArgs . split ( "\\s" ) ) ; } catch ( net . sourceforge . seqware . pipeline . runner . RunnerTest . ExitException e ) { "<AssertPlaceHolder>" ; } } main ( java . lang . String [ ] ) { new net . sourceforge . seqware . pipeline . runner . Runner ( ) . run ( args ) ; }
org . junit . Assert . assertEquals ( e . status , expected )
testResolveUserVolumePath ( ) { java . lang . String volumeString = java . lang . String . format ( BIND_STRING_FMT , io . fabric8 . maven . docker . util . VolumeBindingUtilTest . USER_PATH , io . fabric8 . maven . docker . util . VolumeBindingUtilTest . CONTAINER_PATH ) ; java . lang . String relativizedVolumeString = io . fabric8 . maven . docker . util . VolumeBindingUtil . resolveRelativeVolumeBinding ( new java . io . File ( "ignored" ) , volumeString ) ; java . lang . String expectedBindingString = java . lang . String . format ( BIND_STRING_FMT , new java . io . File ( java . lang . System . getProperty ( "user.home" ) , io . fabric8 . maven . docker . util . PathTestUtil . stripLeadingTilde ( io . fabric8 . maven . docker . util . VolumeBindingUtilTest . USER_PATH ) ) , io . fabric8 . maven . docker . util . VolumeBindingUtilTest . CONTAINER_PATH ) ; "<AssertPlaceHolder>" ; } stripLeadingTilde ( java . lang . String ) { if ( path . startsWith ( io . fabric8 . maven . docker . util . PathTestUtil . TILDE ) ) { return path . substring ( 1 ) ; } return path ; }
org . junit . Assert . assertEquals ( expectedBindingString , relativizedVolumeString )
resolvesGradebookEntryRefDidInStudentGradebookEntryCorrectly ( ) { org . slc . sli . ingestion . NeutralRecordEntity entity = loadEntity ( "schoolId" 1 ) ; resolveInternalId ( entity ) ; java . util . Map < java . lang . String , java . lang . String > schoolNaturalKeys = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; schoolNaturalKeys . put ( "stateOrganizationId" , "schoolId" 2 ) ; java . lang . String schoolId = generateExpectedDid ( schoolNaturalKeys , org . slc . sli . ingestion . transformation . normalization . did . DidReferenceResolutionTest . TENANT_ID , "educationOrganization" , null ) ; java . util . Map < java . lang . String , java . lang . String > sectionNaturalKeys = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; sectionNaturalKeys . put ( "schoolId" , schoolId ) ; sectionNaturalKeys . put ( "uniqueSectionCode" , "this<sp>section" ) ; java . lang . String sectionDid = generateExpectedDid ( sectionNaturalKeys , org . slc . sli . ingestion . transformation . normalization . did . DidReferenceResolutionTest . TENANT_ID , "section" , null ) ; java . util . Map < java . lang . String , java . lang . String > naturalKeys = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; naturalKeys . put ( "gradebookEntryId" , "Unit<sp>test" ) ; naturalKeys . put ( "schoolId" 4 , "2011-09-15" ) ; naturalKeys . put ( "sectionId" , sectionDid ) ; java . lang . String refId = generateExpectedDid ( naturalKeys , org . slc . sli . ingestion . transformation . normalization . did . DidReferenceResolutionTest . TENANT_ID , "schoolId" 0 , sectionDid ) ; java . util . Map < java . lang . String , java . lang . Object > body = entity . getBody ( ) ; java . lang . Object resolvedRef = body . get ( "schoolId" 3 ) ; "<AssertPlaceHolder>" ; } get ( java . lang . Iterable ) { org . slc . sli . api . service . BasicService . LOG . debug ( ">>>BasicService.get(Iterable<sp>id)" ) ; org . slc . sli . api . service . NeutralQuery neutralQuery = new org . slc . sli . api . service . NeutralQuery ( ) ; neutralQuery . setOffset ( 0 ) ; neutralQuery . setLimit ( org . slc . sli . api . service . BasicService . MAX_RESULT_SIZE ) ; return get ( ids , neutralQuery ) ; }
org . junit . Assert . assertEquals ( refId , resolvedRef )
testExceptionOnUnknowwTemplate ( ) { de . neuland . jade4j . JadeConfiguration config = new de . neuland . jade4j . JadeConfiguration ( ) ; de . neuland . jade4j . template . JadeTemplate template = null ; try { template = config . getTemplate ( "UNKNOWN_PATH" ) ; org . junit . Assert . fail ( "Did<sp>expect<sp>TemplatException!" ) ; } catch ( java . io . IOException ignore ) { } "<AssertPlaceHolder>" ; } getTemplate ( java . lang . String ) { if ( caching ) { long lastModified = templateLoader . getLastModified ( name ) ; de . neuland . jade4j . template . JadeTemplate template = cache . get ( getKeyValue ( name , lastModified ) ) ; if ( template != null ) { return template ; } java . lang . String key = getCachedKey ( name , lastModified ) ; synchronized ( key ) { de . neuland . jade4j . template . JadeTemplate newTemplate = createTemplate ( name ) ; cache . put ( key , newTemplate ) ; return newTemplate ; } } return createTemplate ( name ) ; }
org . junit . Assert . assertNull ( template )
clear_ModelReload ( ) { lcc . getModel ( ) . getSettings ( ) . add ( new org . oscm . ui . dialog . common . ldapsettings . LdapSetting ( "key" , "value" , false ) ) ; model . setOrganizationIdentifier ( null ) ; model . setOrganizationSelectionSupported ( true ) ; lcc . clear ( ) ; verify ( lcc , times ( 1 ) ) . initPlatformSettings ( ) ; "<AssertPlaceHolder>" ; } getModel ( ) { if ( ( model ) == null ) { model = new org . oscm . ui . model . Marketplace ( ) ; model . setRevenueSharesReadOnly ( ( ! ( isLoggedInAndPlatformOperator ( ) ) ) ) ; } return model ; }
org . junit . Assert . assertTrue ( lcc . getModel ( ) . getSettings ( ) . isEmpty ( ) )
resolveNotFound ( ) { when ( servers . hasMore ( ) ) . thenReturn ( false ) ; com . hazelcast . kubernetes . DnsEndpointResolver dnsEndpointResolver = new com . hazelcast . kubernetes . DnsEndpointResolver ( com . hazelcast . kubernetes . DnsEndpointResolverTest . LOGGER , com . hazelcast . kubernetes . DnsEndpointResolverTest . SERVICE_DNS , com . hazelcast . kubernetes . DnsEndpointResolverTest . UNSET_PORT , dirContext ) ; java . util . List < com . hazelcast . spi . discovery . DiscoveryNode > result = dnsEndpointResolver . resolve ( ) ; "<AssertPlaceHolder>" ; } resolve ( ) { if ( ( ( serviceName ) != null ) && ( ! ( serviceName . isEmpty ( ) ) ) ) { return getSimpleDiscoveryNodes ( client . endpointsByName ( serviceName ) ) ; } else if ( ( ( serviceLabel ) != null ) && ( ! ( serviceLabel . isEmpty ( ) ) ) ) { return getSimpleDiscoveryNodes ( client . endpointsByLabel ( serviceLabel , serviceLabelValue ) ) ; } return getSimpleDiscoveryNodes ( client . endpoints ( ) ) ; }
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
whenResourceIsCreated_thenUriOfTheNewlyCreatedResourceIsDiscoverable ( ) { final com . baeldung . persistence . model . Foo newResource = new com . baeldung . persistence . model . Foo ( org . apache . commons . lang3 . RandomStringUtils . randomAlphabetic ( 6 ) ) ; final io . restassured . response . Response createResp = io . restassured . RestAssured . given ( ) . contentType ( MediaType . APPLICATION_JSON_VALUE ) . body ( newResource ) . post ( com . baeldung . common . web . AbstractDiscoverabilityLiveTest . getURL ( ) ) ; final java . lang . String uriOfNewResource = createResp . getHeader ( HttpHeaders . LOCATION ) ; final io . restassured . response . Response response = io . restassured . RestAssured . given ( ) . header ( HttpHeaders . ACCEPT , MediaType . APPLICATION_JSON_VALUE ) . get ( uriOfNewResource ) ; final com . baeldung . persistence . model . Foo resourceFromServer = response . body ( ) . as ( com . baeldung . persistence . model . Foo . class ) ; "<AssertPlaceHolder>" ; } get ( int ) { return ( ( E ) ( internal [ index ] ) ) ; }
org . junit . Assert . assertThat ( newResource , org . hamcrest . Matchers . equalTo ( resourceFromServer ) )
normalGetComponentIdsByType ( ) { final long serviceId = 7117 ; final java . lang . String [ ] cidarray = new java . lang . String [ ] { ".component-1" } ; final java . util . Collection cMetadatas = new java . util . ArrayList ( ) ; cMetadatas . add ( mockServiceMetadata ) ; mockery . checking ( new org . jmock . Expectations ( ) { { oneOf ( mockContext ) . getServiceReferences ( with ( any ( java . lang . String . class ) ) , with ( any ( java . lang . String . class ) ) ) ; will ( returnValue ( mockServiceReferences ) ) ; oneOf ( mockContext ) . getService ( mockServiceReferences [ 0 ] ) ; will ( returnValue ( mockContainer ) ) ; } } ) ; mockery . checking ( new org . jmock . Expectations ( ) { { oneOf ( mockContainer ) . getMetadata ( org . osgi . service . blueprint . reflect . ServiceMetadata . class ) ; will ( returnValue ( cMetadatas ) ) ; } } ) ; mockery . checking ( new org . jmock . Expectations ( ) { { oneOf ( mockServiceMetadata ) . getId ( ) ; will ( returnValue ( cidarray [ 0 ] ) ) ; } } ) ; "<AssertPlaceHolder>" ; } getComponentIdsByType ( long , java . lang . String ) { org . osgi . service . blueprint . container . BlueprintContainer container = getBlueprintContainer ( containerServiceId ) ; java . util . Collection < ? extends org . osgi . service . blueprint . reflect . ComponentMetadata > components ; if ( type . equals ( BlueprintMetadataMBean . SERVICE_METADATA ) ) { components = container . getMetadata ( org . osgi . service . blueprint . reflect . ServiceMetadata . class ) ; } else if ( type . equals ( BlueprintMetadataMBean . BEAN_METADATA ) ) { components = container . getMetadata ( org . osgi . service . blueprint . reflect . BeanMetadata . class ) ; } else if ( type . equals ( BlueprintMetadataMBean . SERVICE_REFERENCE_METADATA ) ) { components = container . getMetadata ( org . osgi . service . blueprint . reflect . ServiceReferenceMetadata . class ) ; } else { throw new java . lang . IllegalArgumentException ( ( "Unrecognized<sp>component<sp>type:<sp>" + type ) ) ; } java . lang . String [ ] ids = new java . lang . String [ components . size ( ) ] ; int i = 0 ; for ( org . osgi . service . blueprint . reflect . ComponentMetadata component : components ) { if ( null == component ) continue ; ids [ ( i ++ ) ] = component . getId ( ) ; } return ids ; }
org . junit . Assert . assertArrayEquals ( cidarray , metadata . getComponentIdsByType ( serviceId , BlueprintMetadataMBean . SERVICE_METADATA ) )
testNonAsciiReturnValues ( ) { java . lang . String nonAsciiTableName = "nonAsciiTable" ; java . lang . String nonAsciiString = "'<sp>into<sp>table<sp>" 1 ; org . apache . hadoop . fs . Path nonAsciiFilePath = new org . apache . hadoop . fs . Path ( org . apache . hive . jdbc . cbo_rp_TestJdbcDriver2 . dataFileDir , "non_ascii_tbl.txt" ) ; java . sql . Statement stmt = con . createStatement ( ) ; stmt . execute ( "set<sp>hive.support.concurrency<sp>=<sp>false" ) ; stmt . execute ( ( ( ( "create<sp>table<sp>" + nonAsciiTableName ) + "<sp>(key<sp>int,<sp>value<sp>string)<sp>" ) + "row<sp>format<sp>delimited<sp>fields<sp>terminated<sp>by<sp>'|'" ) ) ; stmt . execute ( ( ( ( "load<sp>data<sp>local<sp>inpath<sp>'" + ( nonAsciiFilePath . toString ( ) ) ) + "'<sp>into<sp>table<sp>" ) + nonAsciiTableName ) ) ; java . sql . ResultSet rs = stmt . executeQuery ( ( ( "select<sp>value<sp>from<sp>" + nonAsciiTableName ) + "<sp>limit<sp>1" ) ) ; while ( rs . next ( ) ) { java . lang . String resultValue = rs . getString ( 1 ) ; "<AssertPlaceHolder>" ; } try { stmt . execute ( ( "'<sp>into<sp>table<sp>" 0 + nonAsciiTableName ) ) ; } catch ( java . lang . Exception ex ) { } stmt . close ( ) ; } getString ( java . lang . String ) { return get ( key ) ; }
org . junit . Assert . assertTrue ( resultValue . equalsIgnoreCase ( nonAsciiString ) )
resetShouldClearArbitraryMutableFields ( ) { net . openhft . chronicle . wire . WiresTest . StringBuilderContainer container1 = new net . openhft . chronicle . wire . WiresTest . StringBuilderContainer ( ) ; container1 . stringBuilder . setLength ( 0 ) ; container1 . stringBuilder . append ( "value1" ) ; net . openhft . chronicle . wire . WiresTest . StringBuilderContainer container2 = new net . openhft . chronicle . wire . WiresTest . StringBuilderContainer ( ) ; container2 . stringBuilder . setLength ( 0 ) ; container2 . stringBuilder . append ( "value2" ) ; net . openhft . chronicle . wire . Wires . reset ( container1 ) ; net . openhft . chronicle . wire . Wires . reset ( container2 ) ; container1 . stringBuilder . append ( "value1" ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( "ScalarStrategy<" + ( type . getName ( ) ) ) + ">" ; }
org . junit . Assert . assertEquals ( "" , container2 . stringBuilder . toString ( ) )
roomInvitationsShouldDontFireOnChatCreated ( ) { final com . calclab . emite . xtesting . handlers . PairChatChangedTestHandler handler = addChatCreatedHandler ( ) ; session . receives ( ( ( ( ( ( ( "<message<sp>to='" + ( MYSELF ) ) + "'<sp>from='someroom@domain'><x<sp>xmlns='http://jabber.org/protocol/muc#user'>" ) + "<invite<sp>from='" ) + ( OTHER ) ) + "'><reason>Join<sp>to<sp>our<sp>conversation</reason></invite>" ) + "</x><x<sp>jid='someroom@domain'<sp>xmlns='jabber:x:conference'<sp>/></message>" ) ) ; "<AssertPlaceHolder>" ; } isNotCalled ( ) { return ( getCalledTimes ( ) ) == 0 ; }
org . junit . Assert . assertTrue ( handler . isNotCalled ( ) )
testConvert ( ) { java . lang . String name = "first<sp>blood" ; java . lang . String abbrName = "fb" ; java . lang . Long id = 1L ; java . lang . Long timePeriodId = 2L ; org . lnu . is . domain . timeperiod . TimePeriod timePeriod = new org . lnu . is . domain . timeperiod . TimePeriod ( ) ; timePeriod . setId ( timePeriodId ) ; org . lnu . is . domain . specoffer . SpecOfferType expected = new org . lnu . is . domain . specoffer . SpecOfferType ( ) ; expected . setName ( name ) ; expected . setAbbrName ( abbrName ) ; expected . setId ( id ) ; expected . setTimePeriod ( timePeriod ) ; org . lnu . is . resource . specoffer . type . SpecOfferTypeResource source = new org . lnu . is . resource . specoffer . type . SpecOfferTypeResource ( ) ; source . setName ( name ) ; source . setAbbrName ( abbrName ) ; source . setTimePeriodId ( timePeriodId ) ; source . setId ( id ) ; org . lnu . is . domain . specoffer . SpecOfferType actual = unit . convert ( source ) ; "<AssertPlaceHolder>" ; } convert ( org . lnu . is . domain . admin . unit . AdminUnit ) { return convert ( source , new org . lnu . is . resource . adminunit . AdminUnitResource ( ) ) ; }
org . junit . Assert . assertEquals ( expected , actual )
getMessageFromReceiverLinkDoesNothing ( ) { java . lang . String linkName = "linkName" ; final tests . unit . com . microsoft . azure . sdk . iot . device . transport . amqps . AmqpsSessionManager amqpsSessionManager = new tests . unit . com . microsoft . azure . sdk . iot . device . transport . amqps . AmqpsSessionManager ( mockDeviceClientConfig , mockScheduledExecutorService ) ; tests . unit . com . microsoft . azure . sdk . iot . device . transport . amqps . Deencapsulation . setField ( amqpsSessionManager , "session" , null ) ; tests . unit . com . microsoft . azure . sdk . iot . device . transport . amqps . AmqpsMessage amqpsMessage = tests . unit . com . microsoft . azure . sdk . iot . device . transport . amqps . Deencapsulation . invoke ( amqpsSessionManager , "getMessageFromReceiverLink" , linkName ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertNull ( amqpsMessage )
testGetClassLoader ( ) { org . apache . ambari . server . orm . entities . ViewEntity viewDefinition = org . apache . ambari . server . orm . entities . ViewEntityTest . getViewEntity ( ) ; "<AssertPlaceHolder>" ; } getClassLoader ( ) { return classLoader ; }
org . junit . Assert . assertEquals ( org . apache . ambari . server . orm . entities . ViewEntityTest . class . getClassLoader ( ) , viewDefinition . getClassLoader ( ) )
testApp ( ) { br . com . casadocodigo . java8 . List lista = br . com . casadocodigo . java8 . Arrays . asList ( 1 , 2 , 3 ) ; lista . forEach ( new br . com . casadocodigo . java8 . Consumer ( ) { public void accept ( java . lang . Object a ) { System . out . println ( a ) ; } } ) ; lista . forEach ( ( v ) -> { System . out . println ( v ) ; } ) ; "<AssertPlaceHolder>" ; } accept ( java . lang . Object ) { System . out . println ( a ) ; }
org . junit . Assert . assertTrue ( true )
test ( ) { org . apache . hadoop . hbase . client . HBaseAdmin hBaseAdmin = mock ( org . apache . hadoop . hbase . client . HBaseAdmin . class ) ; org . apache . hadoop . hbase . HTableDescriptor [ ] hds = new org . apache . hadoop . hbase . HTableDescriptor [ 2 ] ; org . apache . hadoop . hbase . HTableDescriptor d1 = mock ( org . apache . hadoop . hbase . HTableDescriptor . class ) ; org . apache . hadoop . hbase . HTableDescriptor d2 = mock ( org . apache . hadoop . hbase . HTableDescriptor . class ) ; hds [ 0 ] = d1 ; hds [ 1 ] = d2 ; when ( d1 . getValue ( "KYLIN_HOST" ) ) . thenReturn ( "../examples/test_metadata/" ) ; when ( d2 . getValue ( "KYLIN_HOST" ) ) . thenReturn ( "../examples/test_metadata/" ) ; when ( d1 . getTableName ( ) ) . thenReturn ( org . apache . hadoop . hbase . TableName . valueOf ( "KYLIN_J9TE08D9IA" ) ) ; java . lang . String toBeDel = "to-be-del" ; when ( d2 . getTableName ( ) ) . thenReturn ( org . apache . hadoop . hbase . TableName . valueOf ( toBeDel ) ) ; when ( hBaseAdmin . listTables ( "KYLIN_.*" ) ) . thenReturn ( hds ) ; when ( hBaseAdmin . tableExists ( toBeDel ) ) . thenReturn ( true ) ; when ( hBaseAdmin . isTableEnabled ( toBeDel ) ) . thenReturn ( false ) ; org . apache . kylin . rest . job . StorageCleanJobHbaseUtil . cleanUnusedHBaseTables ( hBaseAdmin , true , 100000 ) ; org . mockito . ArgumentCaptor < java . lang . String > captor = org . mockito . ArgumentCaptor . forClass ( java . lang . String . class ) ; verify ( hBaseAdmin ) . deleteTable ( captor . capture ( ) ) ; "<AssertPlaceHolder>" ; } getAllValues ( ) { throw new java . lang . UnsupportedOperationException ( ) ; }
org . junit . Assert . assertEquals ( com . google . common . collect . Lists . newArrayList ( toBeDel ) , captor . getAllValues ( ) )
unregister ( ) { org . talend . esb . servicelocator . client . Endpoint endpoint = org . talend . esb . servicelocator . client . internal . EndpointStubFactory . create ( org . talend . esb . servicelocator . TestValues . SERVICE_QNAME_1 , org . talend . esb . servicelocator . TestValues . ENDPOINT_1 , BindingType . JAXRS , TransportType . HTTP ) ; org . easymock . Capture < java . lang . Long > lastTimeStoppedCapture = new org . easymock . Capture < java . lang . Long > ( ) ; endpointExists ( org . talend . esb . servicelocator . client . internal . PathValues . ENDPOINT_PATH_11 ) ; getData ( org . talend . esb . servicelocator . client . internal . PathValues . ENDPOINT_PATH_11 , org . talend . esb . servicelocator . client . internal . RegisterEndpointProviderTest . OLD_DATA ) ; data2Ep ( org . talend . esb . servicelocator . TestValues . SERVICE_QNAME_1 , org . talend . esb . servicelocator . client . internal . RegisterEndpointProviderTest . OLD_DATA ) ; deleteEndpointStatus ( org . talend . esb . servicelocator . client . internal . PathValues . ENDPOINT_PATH_11 ) ; ep2Data ( endpoint , org . talend . esb . servicelocator . TestValues . LAST_TIME_STARTED , lastTimeStoppedCapture , org . talend . esb . servicelocator . client . internal . RegisterEndpointProviderTest . NEW_DATA ) ; setData ( org . talend . esb . servicelocator . client . internal . PathValues . ENDPOINT_PATH_11 , org . talend . esb . servicelocator . client . internal . RegisterEndpointProviderTest . NEW_DATA ) ; replayAll ( ) ; org . talend . esb . servicelocator . client . internal . ServiceLocatorImpl slc = createServiceLocatorSuccess ( ) ; slc . setEndpointTransformer ( trans ) ; long beforeUnregister = java . lang . System . currentTimeMillis ( ) ; slc . unregister ( endpoint ) ; long afterUnregister = java . lang . System . currentTimeMillis ( ) ; verifyAll ( ) ; long lastTimeStopped = lastTimeStoppedCapture . getValue ( ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; }
org . junit . Assert . assertTrue ( ( ( beforeUnregister <= lastTimeStopped ) && ( lastTimeStopped <= afterUnregister ) ) )
callFunctionPointerVrV ( ) { final boolean [ ] called = new boolean [ ] { false } ; jnr . ffi . DelegateTest . TestLib . CallableVrV javaClosure = new jnr . ffi . DelegateTest . TestLib . CallableVrV ( ) { @ jnr . ffi . Override public void call ( ) { called [ 0 ] = true ; } } ; jnr . ffi . DelegateTest . TestLib . CallableVrV callable = jnr . ffi . DelegateTest . lib . ret_pointer ( javaClosure ) ; callable . call ( ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertTrue ( called [ 0 ] )
customDefaultJvmRoute ( ) { java . lang . String expected = "expected" ; java . lang . System . clearProperty ( "jboss.jvmRoute" ) ; org . jboss . modcluster . container . Engine engine = mock ( org . jboss . modcluster . container . Engine . class ) ; org . jboss . modcluster . config . JvmRouteFactory defaultFactory = mock ( org . jboss . modcluster . config . JvmRouteFactory . class ) ; org . jboss . modcluster . config . JvmRouteFactory factory = new org . jboss . modcluster . config . impl . SystemPropertyJvmRouteFactory ( defaultFactory , "jboss.jvmRoute" ) ; when ( defaultFactory . createJvmRoute ( engine ) ) . thenReturn ( expected ) ; java . lang . String result = factory . createJvmRoute ( engine ) ; "<AssertPlaceHolder>" ; } createJvmRoute ( org . jboss . modcluster . container . Engine ) { org . jboss . modcluster . container . Connector connector = engine . getProxyConnector ( ) ; byte [ ] address = connector . getAddress ( ) . getAddress ( ) ; byte [ ] name = engine . getName ( ) . getBytes ( ) ; java . nio . ByteBuffer buffer = java . nio . ByteBuffer . allocate ( ( ( ( address . length ) + ( org . jboss . modcluster . config . impl . UUIDJvmRouteFactory . INT_SIZE ) ) + ( name . length ) ) ) ; buffer . put ( address ) . putInt ( connector . getPort ( ) ) . put ( name ) ; return java . util . UUID . nameUUIDFromBytes ( buffer . array ( ) ) . toString ( ) ; }
org . junit . Assert . assertSame ( expected , result )
testSimpleNoDecimals ( ) { org . apache . commons . math3 . linear . ArrayRealVector c = new org . apache . commons . math3 . linear . ArrayRealVector ( new double [ ] { 1 , 1 , 1 } ) ; java . lang . String expected = "{1;<sp>1;<sp>1}" ; java . lang . String actual = realVectorFormat . format ( c ) ; "<AssertPlaceHolder>" ; } format ( java . lang . Object [ ] ) { return format . format ( arguments ) ; }
org . junit . Assert . assertEquals ( expected , actual )
testDeleteFiles_String ( ) { System . out . println ( "deleteFiles" ) ; this . createFile ( "file2.jpg" ) ; java . lang . String directory = folder . getRoot ( ) . getPath ( ) ; com . bixly . pastevid . util . FileUtil . deleteFiles ( directory ) ; "<AssertPlaceHolder>" ; } deleteFiles ( java . lang . String ) { if ( dontDelete == null ) { return false ; } try { java . io . File f = new java . io . File ( this . getCaptureFolder ( ) ) ; if ( f . exists ( ) ) { com . bixly . pastevid . util . FileUtil . deleteFiles ( this . getCaptureFolder ( ) , new java . lang . String [ ] { this . prevVideoFileName , ( ( f . getPath ( ) ) + ( this . separator ) ) + dontDelete } ) ; com . bixly . pastevid . util . FileUtil . deleteSubdirs ( this . getCaptureFolder ( ) ) ; return true ; } } catch ( java . lang . Exception ex ) { } return false ; }
org . junit . Assert . assertFalse ( properties . exists ( ) )
testEquals ( ) { org . jfree . data . time . Day day1 = new org . jfree . data . time . Day ( 29 , org . jfree . date . MonthConstants . MARCH , 2002 ) ; org . jfree . data . time . Day day2 = new org . jfree . data . time . Day ( 29 , org . jfree . date . MonthConstants . MARCH , 2002 ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ! ( o instanceof com . mysql . fabric . Server ) ) { return false ; } com . mysql . fabric . Server s = ( ( com . mysql . fabric . Server ) ( o ) ) ; return s . getUuid ( ) . equals ( getUuid ( ) ) ; }
org . junit . Assert . assertTrue ( day1 . equals ( day2 ) )
testGetByPath ( ) { com . xiaomi . shepher . model . Snapshot snapshot = snapshotMapper . getByPathAndZk ( "/test/test2" , "local_test" , org . apache . commons . lang3 . time . DateUtils . parseDate ( "2016-09-20<sp>02:07:59" , new java . lang . String [ ] { "yyyy-MM-dd<sp>hh:mm:ss" } ) , 0 ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertNotNull ( snapshot )
getKafkaTopicOffsets ( ) { final long from = 1L ; final long to = 3L ; final java . util . Map < java . lang . String , java . util . Map < java . lang . Long , java . lang . Double > > expected = new java . util . HashMap ( ) ; expected . put ( StormMappedMetric . logsize . name ( ) , generateTestPointsMap ( ) ) ; expected . put ( StormMappedMetric . offset . name ( ) , generateTestPointsMap ( ) ) ; expected . put ( StormMappedMetric . lag . name ( ) , generateTestPointsMap ( ) ) ; new mockit . Expectations ( ) { { mockTimeSeriesQuerier . getMetrics ( withEqual ( mockedTopologyName ) , withEqual ( ( ( ( source . getId ( ) ) + "-" ) + ( source . getName ( ) ) ) ) , withEqual ( java . lang . String . format ( StormMappedMetric . logsize . getStormMetricName ( ) , com . hortonworks . streamline . streams . metrics . storm . topology . StormTopologyTimeSeriesMetricsImplTest . TOPIC_NAME ) ) , withEqual ( StormMappedMetric . logsize . getAggregateFunction ( ) ) , withEqual ( from ) , withEqual ( to ) ) ; result = expected . get ( StormMappedMetric . logsize . name ( ) ) ; mockTimeSeriesQuerier . getMetrics ( withEqual ( mockedTopologyName ) , withEqual ( ( ( ( source . getId ( ) ) + "-" ) + ( source . getName ( ) ) ) ) , withEqual ( java . lang . String . format ( StormMappedMetric . offset . getStormMetricName ( ) , com . hortonworks . streamline . streams . metrics . storm . topology . StormTopologyTimeSeriesMetricsImplTest . TOPIC_NAME ) ) , withEqual ( StormMappedMetric . offset . getAggregateFunction ( ) ) , withEqual ( from ) , withEqual ( to ) ) ; result = expected . get ( StormMappedMetric . offset . name ( ) ) ; mockTimeSeriesQuerier . getMetrics ( withEqual ( mockedTopologyName ) , withEqual ( ( ( ( source . getId ( ) ) + "-" ) + ( source . getName ( ) ) ) ) , withEqual ( java . lang . String . format ( StormMappedMetric . lag . getStormMetricName ( ) , com . hortonworks . streamline . streams . metrics . storm . topology . StormTopologyTimeSeriesMetricsImplTest . TOPIC_NAME ) ) , withEqual ( StormMappedMetric . lag . getAggregateFunction ( ) ) , withEqual ( from ) , withEqual ( to ) ) ; result = expected . get ( StormMappedMetric . lag . name ( ) ) ; } } ; java . util . Map < java . lang . String , java . util . Map < java . lang . Long , java . lang . Double > > actual = stormTopologyTimeSeriesMetrics . getkafkaTopicOffsets ( topology , source , from , to , null ) ; "<AssertPlaceHolder>" ; } getkafkaTopicOffsets ( com . hortonworks . streamline . streams . layout . component . TopologyLayout , com . hortonworks . streamline . streams . layout . component . Component , long , long , java . lang . String ) { java . lang . String stormTopologyName = com . hortonworks . streamline . streams . storm . common . StormTopologyUtil . findOrGenerateTopologyName ( client , topology . getId ( ) , topology . getName ( ) , asUser ) ; java . lang . String stormComponentName = getComponentName ( component ) ; java . lang . String topicName = findKafkaTopicName ( component ) ; if ( topicName == null ) { throw new java . lang . IllegalStateException ( ( ( ( "Cannot<sp>find<sp>Kafka<sp>topic<sp>name<sp>from<sp>source<sp>config<sp>-<sp>topology<sp>name:<sp>" + ( topology . getName ( ) ) ) + "<sp>/<sp>source<sp>:<sp>" ) + ( component . getName ( ) ) ) ) ; } com . hortonworks . streamline . streams . metrics . storm . topology . StormMappedMetric [ ] metrics = new com . hortonworks . streamline . streams . metrics . storm . topology . StormMappedMetric [ ] { StormMappedMetric . logsize , StormMappedMetric . offset , StormMappedMetric . lag } ; java . util . Map < java . lang . String , java . util . Map < java . lang . Long , java . lang . Double > > kafkaOffsets = new java . util . HashMap ( ) ; for ( com . hortonworks . streamline . streams . metrics . storm . topology . StormMappedMetric metric : metrics ) { kafkaOffsets . put ( metric . name ( ) , queryKafkaMetrics ( stormTopologyName , stormComponentName , metric , topicName , from , to ) ) ; } return kafkaOffsets ; }
org . junit . Assert . assertEquals ( expected , actual )
givenStartedServerWhenUploadSipThenReturnOK ( ) { final java . util . List < fr . gouv . vitam . logbook . common . parameters . LogbookOperationParameters > operationList = new java . util . ArrayList ( ) ; final fr . gouv . vitam . common . guid . GUID ingestGuid = fr . gouv . vitam . common . guid . GUIDFactory . newGUID ( ) ; final fr . gouv . vitam . common . guid . GUID conatinerGuid = fr . gouv . vitam . common . guid . GUIDFactory . newGUID ( ) ; final fr . gouv . vitam . logbook . common . parameters . LogbookOperationParameters externalOperationParameters1 = fr . gouv . vitam . logbook . common . parameters . LogbookParametersFactory . newLogbookOperationParameters ( ingestGuid , "Ingest<sp>external" , conatinerGuid , LogbookTypeProcess . INGEST , StatusCode . STARTED , "Start<sp>Ingest<sp>external" , conatinerGuid ) ; final fr . gouv . vitam . logbook . common . parameters . LogbookOperationParameters externalOperationParameters2 = fr . gouv . vitam . logbook . common . parameters . LogbookParametersFactory . newLogbookOperationParameters ( ingestGuid , "Ingest<sp>external" , conatinerGuid , LogbookTypeProcess . INGEST , StatusCode . OK , "End<sp>Ingest<sp>external" , conatinerGuid ) ; operationList . add ( externalOperationParameters1 ) ; operationList . add ( externalOperationParameters2 ) ; java . io . InputStream inputStreamATR = fr . gouv . vitam . common . PropertiesUtils . getResourceAsStream ( "ATR_example.xml" ) ; when ( fr . gouv . vitam . ingest . internal . client . IngestInternalClientRestTest . mockLogbook . post ( ) ) . thenReturn ( javax . ws . rs . core . Response . status ( Status . CREATED ) . build ( ) ) ; when ( fr . gouv . vitam . ingest . internal . client . IngestInternalClientRestTest . mock . post ( ) ) . thenReturn ( javax . ws . rs . core . Response . status ( Status . OK ) . entity ( fr . gouv . vitam . common . FileUtil . readInputStream ( inputStreamATR ) ) . build ( ) ) ; final java . io . InputStream inputStream = fr . gouv . vitam . common . PropertiesUtils . getResourceAsStream ( "SIP_bordereau_avec_objet_OK.zip" ) ; final javax . ws . rs . core . Response response2 = fr . gouv . vitam . ingest . internal . client . IngestInternalClientRestTest . client . uploadInitialLogbook ( operationList ) ; "<AssertPlaceHolder>" ; fr . gouv . vitam . common . model . processing . WorkFlow workflow = fr . gouv . vitam . common . model . processing . WorkFlow . of ( fr . gouv . vitam . ingest . internal . client . IngestInternalClientRestTest . WROKFLOW_ID , fr . gouv . vitam . ingest . internal . client . IngestInternalClientRestTest . WROKFLOW_IDENTIFIER , fr . gouv . vitam . ingest . internal . client . IngestInternalClientRestTest . INGEST ) ; fr . gouv . vitam . ingest . internal . client . IngestInternalClientRestTest . client . upload ( inputStream , CommonMediaType . ZIP_TYPE , workflow , fr . gouv . vitam . ingest . internal . client . IngestInternalClientRestTest . X_ACTION ) ; } getStatus ( ) { return status ; }
org . junit . Assert . assertEquals ( response2 . getStatus ( ) , Status . CREATED . getStatusCode ( ) )
signWithMultipleDataFiles ( ) { byte [ ] digest = java . security . MessageDigest . getInstance ( "SHA-256" ) . digest ( "hello" . getBytes ( ) ) ; org . digidoc4j . DigestDataFile digestDataFile = new org . digidoc4j . DigestDataFile ( "hello.txt" , DigestAlgorithm . SHA256 , digest ) ; byte [ ] digest2 = java . security . MessageDigest . getInstance ( "SHA-256" ) . digest ( "hello2" . getBytes ( ) ) ; org . digidoc4j . DigestDataFile digestDataFile2 = new org . digidoc4j . DigestDataFile ( "hello2.txt" , DigestAlgorithm . SHA256 , digest2 ) ; org . digidoc4j . Signature signature = org . digidoc4j . DetachedXadesSignatureBuilder . withConfiguration ( new org . digidoc4j . Configuration ( ) ) . withDataFile ( digestDataFile ) . withDataFile ( digestDataFile2 ) . withSignatureToken ( pkcs12EccSignatureToken ) . invokeSigning ( ) ; "<AssertPlaceHolder>" ; } validateSignature ( ) { org . digidoc4j . impl . asic . AsicSignature . logger . debug ( "Validating<sp>signature" ) ; if ( ( validationResult ) == null ) { validationResult = this . validator . extractResult ( ) ; org . digidoc4j . impl . asic . AsicSignature . logger . info ( ( ( ( ( "Signature<sp>has<sp>" + ( validationResult . getErrors ( ) . size ( ) ) ) + "<sp>validation<sp>errors<sp>and<sp>" ) + ( validationResult . getWarnings ( ) . size ( ) ) ) + "<sp>warnings" ) ) ; } else { org . digidoc4j . impl . asic . AsicSignature . logger . debug ( ( ( ( ( "Using<sp>existing<sp>validation<sp>errors<sp>with<sp>" + ( validationResult . getErrors ( ) . size ( ) ) ) + "<sp>validation<sp>errors<sp>and<sp>" ) + ( validationResult . getWarnings ( ) . size ( ) ) ) + "<sp>warnings" ) ) ; } return validationResult ; }
org . junit . Assert . assertTrue ( signature . validateSignature ( ) . isValid ( ) )
testIn ( ) { com . j256 . ormlite . stmt . Where < com . j256 . ormlite . stmt . Foo , java . lang . String > where = new com . j256 . ormlite . stmt . Where < com . j256 . ormlite . stmt . Foo , java . lang . String > ( createTableInfo ( ) , null , databaseType ) ; int val = 112 ; where . in ( Foo . VAL_COLUMN_NAME , val ) ; java . lang . StringBuilder whereSb = new java . lang . StringBuilder ( ) ; where . appendSql ( null , whereSb , new java . util . ArrayList < com . j256 . ormlite . stmt . ArgumentHolder > ( ) ) ; java . lang . StringBuilder sb = new java . lang . StringBuilder ( ) ; databaseType . appendEscapedEntityName ( sb , Foo . VAL_COLUMN_NAME ) ; sb . append ( "<sp>IN<sp>(" ) ; sb . append ( val ) . append ( "<sp>)<sp>" ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( ( getClass ( ) . getSimpleName ( ) ) + ":name=" ) + ( field . getName ( ) ) ) + ",class=" ) + ( field . getDeclaringClass ( ) . getSimpleName ( ) ) ; }
org . junit . Assert . assertEquals ( sb . toString ( ) , whereSb . toString ( ) )
parseTime ( ) { java . time . LocalTime time = ( ( java . time . LocalTime ) ( org . jpmml . evaluator . TypeUtil . parse ( DataType . TIME , org . jpmml . evaluator . TypeUtilTest . TIME ) ) ) ; "<AssertPlaceHolder>" ; } getDataType ( java . lang . Object ) { if ( value instanceof java . lang . String ) { return org . dmg . pmml . DataType . STRING ; } else if ( value instanceof java . lang . Integer ) { return org . dmg . pmml . DataType . INTEGER ; } else if ( value instanceof java . lang . Float ) { return org . dmg . pmml . DataType . FLOAT ; } else if ( value instanceof java . lang . Double ) { return org . dmg . pmml . DataType . DOUBLE ; } else if ( value instanceof java . lang . Boolean ) { return org . dmg . pmml . DataType . BOOLEAN ; } else if ( value instanceof java . time . LocalDate ) { return org . dmg . pmml . DataType . DATE ; } else if ( value instanceof java . time . LocalTime ) { return org . dmg . pmml . DataType . TIME ; } else if ( value instanceof java . time . LocalDateTime ) { return org . dmg . pmml . DataType . DATE_TIME ; } else if ( value instanceof org . jpmml . evaluator . DaysSinceDate ) { org . jpmml . evaluator . DaysSinceDate period = ( ( org . jpmml . evaluator . DaysSinceDate ) ( value ) ) ; return org . jpmml . evaluator . TypeUtil . getDaysDataType ( period . getEpoch ( ) ) ; } else if ( value instanceof org . jpmml . evaluator . SecondsSinceMidnight ) { return org . dmg . pmml . DataType . TIME_SECONDS ; } else if ( value instanceof org . jpmml . evaluator . SecondsSinceDate ) { org . jpmml . evaluator . SecondsSinceDate period = ( ( org . jpmml . evaluator . SecondsSinceDate ) ( value ) ) ; return org . jpmml . evaluator . TypeUtil . getSecondsDataType ( period . getEpoch ( ) ) ; } throw new org . jpmml . evaluator . EvaluationException ( ( "No<sp>PMML<sp>data<sp>type<sp>for<sp>Java<sp>data<sp>type<sp>" + ( value != null ? value . getClass ( ) . getName ( ) : null ) ) ) ; }
org . junit . Assert . assertEquals ( DataType . TIME , org . jpmml . evaluator . TypeUtil . getDataType ( time ) )
get ( ) { org . searchisko . api . service . ProviderService tested = getTested ( ) ; java . util . Map < java . lang . String , java . lang . Object > expected = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; org . mockito . Mockito . when ( tested . entityService . get ( "aaa" ) ) . thenReturn ( expected ) ; "<AssertPlaceHolder>" ; org . mockito . Mockito . verify ( tested . entityService ) . get ( "aaa" ) ; org . mockito . Mockito . verifyNoMoreInteractions ( tested . entityService ) ; } get ( java . lang . String ) { java . security . Principal principal = securityContext . getUserPrincipal ( ) ; if ( principal == null ) { return super . getFiltered ( id , fieldsToRemove ) ; } else { return super . get ( id ) ; } }
org . junit . Assert . assertEquals ( expected , tested . get ( "aaa" ) )
validate_shouldPassValidationIfFieldLengthsAreCorrect ( ) { org . openmrs . ConceptNameTag cnt = new org . openmrs . ConceptNameTag ( ) ; cnt . setTag ( "tag" ) ; cnt . setVoidReason ( "VoidReason" ) ; org . springframework . validation . Errors errors = new org . springframework . validation . BindException ( cnt , "cnt" ) ; new org . openmrs . validator . ConceptNameTagValidator ( ) . validate ( cnt , errors ) ; "<AssertPlaceHolder>" ; } hasErrors ( ) { return erroneous ; }
org . junit . Assert . assertFalse ( errors . hasErrors ( ) )
testDataTagAddNode ( ) { cern . c2mon . shared . daq . serialization . DataTagAdd dataTagAdd = createDataTagAdd ( ) ; cern . c2mon . shared . daq . serialization . DataTagAdd anotherDataTagAdd = createDataTagAdd ( ) ; cern . c2mon . shared . daq . serialization . ChangeRequest changeRequest = new cern . c2mon . shared . daq . serialization . ChangeRequest ( ) ; changeRequest . addChange ( dataTagAdd ) ; changeRequest . addChange ( anotherDataTagAdd ) ; java . lang . String jsonRequest = cern . c2mon . shared . daq . serialization . MessageConverter . requestToJson ( changeRequest ) ; cern . c2mon . shared . daq . serialization . ChangeRequest daqRequest = ( ( cern . c2mon . shared . daq . serialization . ChangeRequest ) ( cern . c2mon . shared . daq . serialization . MessageConverter . requestFromJson ( jsonRequest ) ) ) ; "<AssertPlaceHolder>" ; } requestFromJson ( java . lang . String ) { cern . c2mon . shared . daq . messaging . ServerRequest result = null ; try { result = cern . c2mon . shared . daq . serialization . MessageConverter . mapper . readValue ( tagUpdateJson , cern . c2mon . shared . daq . messaging . ServerRequest . class ) ; } catch ( java . io . IOException e ) { log . warn ( ( "Error<sp>occurred<sp>while<sp>converting<sp>the<sp>json<sp>string<sp>back<sp>to<sp>an<sp>object:<sp>" + ( e . getMessage ( ) ) ) ) ; } return result ; }
org . junit . Assert . assertEquals ( changeRequest , daqRequest )
testMatchesGoogleVersionMurmur332SubBuffers ( ) { org . spark_project . guava . hash . HashCode hashCode = org . spark_project . guava . hash . Hashing . murmur3_32 ( 0 ) . hashBytes ( sampleData , sampleOffset , sampleLength ) ; int actual = hashCode . asInt ( ) ; int hash = murmur32 . hash ( sampleData , sampleOffset , sampleLength ) ; "<AssertPlaceHolder>" ; } hash ( byte [ ] , int , int ) { int pos = offset ; int visited = 0 ; int h = seed ; while ( ( length - visited ) >= 4 ) { int k1 = com . splicemachine . primitives . LittleEndianBits . toInt ( bytes , pos ) ; h = mutate ( h , k1 ) ; pos += 4 ; visited += 4 ; } h = updatePartial ( bytes , length , pos , h , visited ) ; return finalize ( h ) ; }
org . junit . Assert . assertEquals ( actual , hash )
flatMap_A$Function1 ( ) { com . m3 . scalaflavor4j . Nil < java . lang . String > target = com . m3 . scalaflavor4j . Nil . < java . lang . String > apply ( ) ; com . m3 . scalaflavor4j . F1 < java . lang . String , com . m3 . scalaflavor4j . CollectionLike < java . lang . String > > f = new com . m3 . scalaflavor4j . F1 < java . lang . String , com . m3 . scalaflavor4j . CollectionLike < java . lang . String > > ( ) { public com . m3 . scalaflavor4j . CollectionLike < java . lang . String > apply ( java . lang . String v1 ) { return null ; } } ; com . m3 . scalaflavor4j . Seq < java . lang . String > actual = target . flatMap ( f ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return ( ( resource ) == null ) || ( ( resource . getResource ( ) ) == null ) ; }
org . junit . Assert . assertThat ( actual . isEmpty ( ) , org . hamcrest . CoreMatchers . is ( true ) )
testDeleteColumnsWithDiffColsAndTags ( ) { org . apache . hadoop . hbase . TableName tableName = createTable ( 5 ) ; try ( org . apache . hadoop . hbase . client . Table table = org . apache . hadoop . hbase . security . visibility . TEST_UTIL . getConnection ( ) . getTable ( tableName ) ) { org . apache . hadoop . hbase . client . Put put = new org . apache . hadoop . hbase . client . Put ( org . apache . hadoop . hbase . util . Bytes . toBytes ( "row1" ) ) ; put . addColumn ( fam , qual1 , 125L , value ) ; put . setCellVisibility ( new org . apache . hadoop . hbase . security . visibility . CellVisibility ( CONFIDENTIAL ) ) ; table . put ( put ) ; put = new org . apache . hadoop . hbase . client . Put ( org . apache . hadoop . hbase . util . Bytes . toBytes ( "row1" ) ) ; put . addColumn ( fam , qual1 , 126L , value ) ; put . setCellVisibility ( new org . apache . hadoop . hbase . security . visibility . CellVisibility ( SECRET ) ) ; table . put ( put ) ; org . apache . hadoop . hbase . security . visibility . TEST_UTIL . getAdmin ( ) . flush ( tableName ) ; java . security . PrivilegedExceptionAction < java . lang . Void > actiona = new java . security . PrivilegedExceptionAction < java . lang . Void > ( ) { @ org . apache . hadoop . hbase . security . visibility . Override public org . apache . hadoop . hbase . security . visibility . Void run ( ) throws org . apache . hadoop . hbase . security . visibility . Exception { org . apache . hadoop . hbase . client . Delete d1 = new org . apache . hadoop . hbase . client . Delete ( row1 ) ; d1 . setCellVisibility ( new org . apache . hadoop . hbase . security . visibility . CellVisibility ( SECRET ) ) ; d1 . addColumns ( fam , qual , 126L ) ; org . apache . hadoop . hbase . client . Delete d2 = new org . apache . hadoop . hbase . client . Delete ( row1 ) ; d2 . setCellVisibility ( new org . apache . hadoop . hbase . security . visibility . CellVisibility ( CONFIDENTIAL ) ) ; d2 . addColumns ( fam , qual1 , 125L ) ; try ( org . apache . hadoop . hbase . client . Connection connection = org . apache . hadoop . hbase . client . ConnectionFactory . createConnection ( conf ) ; org . apache . hadoop . hbase . client . Table table = connection . getTable ( tableName ) ) { table . delete ( org . apache . hadoop . hbase . security . visibility . TestVisibilityLabelsWithDeletes . createList ( d1 , d2 ) ) ; } catch ( java . lang . Throwable t ) { throw new java . io . IOException ( t ) ; } return null ; } } ; org . apache . hadoop . hbase . security . visibility . SUPERUSER . runAs ( actiona ) ; org . apache . hadoop . hbase . client . Scan s = new org . apache . hadoop . hbase . client . Scan ( ) ; s . readVersions ( 5 ) ; s . setAuthorizations ( new org . apache . hadoop . hbase . security . visibility . Authorizations ( SECRET , CONFIDENTIAL ) ) ; org . apache . hadoop . hbase . client . ResultScanner scanner = table . getScanner ( s ) ; org . apache . hadoop . hbase . client . Result [ ] next = scanner . next ( 3 ) ; "<AssertPlaceHolder>" ; } } next ( int ) { java . lang . StringBuilder sb = new java . lang . StringBuilder ( uri ) ; sb . append ( "?n=" ) ; sb . append ( nbRows ) ; for ( int i = 0 ; i < ( maxRetries ) ; i ++ ) { org . apache . hadoop . hbase . rest . client . Response response = client . get ( sb . toString ( ) , Constants . MIMETYPE_PROTOBUF ) ; int code = response . getCode ( ) ; switch ( code ) { case 200 : org . apache . hadoop . hbase . rest . model . CellSetModel model = new org . apache . hadoop . hbase . rest . model . CellSetModel ( ) ; model . getObjectFromMessage ( response . getBody ( ) ) ; return buildResultFromModel ( model ) ; case 204 : case 206 : return null ; case 509 : try { java . lang . Thread . sleep ( sleepTime ) ; } catch ( java . lang . InterruptedException e ) { throw ( ( java . io . InterruptedIOException ) ( new java . io . InterruptedIOException ( ) . initCause ( e ) ) ) ; } break ; default : throw new java . io . IOException ( ( "scanner.next<sp>request<sp>failed<sp>with<sp>" + code ) ) ; } } throw new java . io . IOException ( "scanner.next<sp>request<sp>timed<sp>out" ) ; }
org . junit . Assert . assertEquals ( 1 , next . length )
testIsComplete_noRecips_emptyTxs ( ) { org . nhindirect . monitor . condition . impl . GeneralCompletionCondition condition = new org . nhindirect . monitor . condition . impl . GeneralCompletionCondition ( ) ; org . nhindirect . common . tx . model . Tx originalMessage = org . nhindirect . monitor . util . TestUtils . makeMessage ( TxMessageType . IMF , "" , java . util . UUID . randomUUID ( ) . toString ( ) , "gm2552@cerner.com" , "" , "" ) ; java . util . Collection < org . nhindirect . common . tx . model . Tx > txs = new java . util . ArrayList < org . nhindirect . common . tx . model . Tx > ( ) ; txs . add ( originalMessage ) ; java . util . Collection < java . lang . String > recips = condition . getIncompleteRecipients ( txs ) ; "<AssertPlaceHolder>" ; } toString ( ) { return digestString ; }
org . junit . Assert . assertEquals ( 0 , recips . size ( ) )
equalsTest2 ( ) { java . lang . Object obj = new java . lang . Object ( ) ; niciraActionDeserializerKey = new org . opendaylight . openflowjava . nx . api . NiciraActionDeserializerKey ( org . opendaylight . openflowjava . nx . api . NiciraActionDeserializerKeyTest . VERSION , 10 ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == object ) { return true ; } if ( ( object == null ) || ( ( getClass ( ) ) != ( object . getClass ( ) ) ) ) { return false ; } org . opendaylight . openflowplugin . applications . southboundcli . util . OFNode ofNode = ( ( org . opendaylight . openflowplugin . applications . southboundcli . util . OFNode ) ( object ) ) ; return ( nodeId ) != null ? nodeId . equals ( ofNode . nodeId ) : ( ofNode . nodeId ) == null ; }
org . junit . Assert . assertFalse ( niciraActionDeserializerKey . equals ( obj ) )
retrieveDefaultClient ( ) { "<AssertPlaceHolder>" ; }
org . junit . Assert . assertFalse ( ( ( client ) == null ) )