idx
int64 0
41.2k
| question
stringlengths 74
4.21k
| target
stringlengths 5
888
|
---|---|---|
41,000 | public DataFrame createMultiInstanceGet ( byte nodeId , byte commandClass ) { if ( getVersion ( ) > 1 ) { throw new ZWaveRuntimeException ( "MULTI_INSTANCE_GET is deprecated for command class versions > 1" ) ; } return createSendDataFrame ( "MULTI_INSTANCE_GET" , nodeId , new byte [ ] { MultiInstanceCommandClass . ID , MULTI_INSTANCE_GET , commandClass } , true ) ; } | Create a MULTI_INSTANCE_GET command . |
41,001 | public DataFrame createMultiChannelEndPointGet ( byte nodeId ) { if ( getVersion ( ) < 2 ) { throw new ZWaveRuntimeException ( "MULTI_CHANNEL_END_POINT_GET is not available in command class version 1" ) ; } return createSendDataFrame ( "MULTI_CHANNEL_END_POINT_GET" , nodeId , new byte [ ] { MultiInstanceCommandClass . ID , MULTI_CHANNEL_END_POINT_GET } , true ) ; } | Create a MULTI_CHANNEL_END_POINT_GET command . |
41,002 | public DataFrame createMultiChannelCapabilityGet ( byte nodeId , byte endPoint ) { if ( getVersion ( ) < 2 ) { throw new ZWaveRuntimeException ( "MULTI_CHANNEL_CAPABILITY_GET is not available in command class version 1" ) ; } return createSendDataFrame ( "MULTI_CHANNEL_CAPABILITY_GET" , nodeId , new byte [ ] { MultiInstanceCommandClass . ID , MULTI_CHANNEL_CAPABILITY_GET , endPoint } , true ) ; } | Create a MULTI_CHANNEL_CAPABILITY_GET command . |
41,003 | boolean attemptResend ( ZWaveChannelContext ctx , boolean dueToCAN ) { if ( startFrame . getSendCount ( ) < MAX_SEND_COUNT ) { logger . debug ( "Transaction {} has failed - will reset and resend initial request" , getId ( ) ) ; reset ( ) ; if ( dueToCAN ) { startFrame . decrementSendCount ( ) ; } ctx . writeFrame ( new OutboundDataFrame ( startFrame , isListeningNode ( ) ) ) ; return true ; } else { logger . debug ( "Exceeded max transaction resends" ) ; ctx . fireEvent ( new TransactionFailedEvent ( getId ( ) , startFrame ) ) ; return false ; } } | Attempts to re - send the data frame that initiated this transaction . |
41,004 | public Map < String , Object > restore ( PersistenceContext ctx , byte nodeId ) { Map < String , Object > map = ctx . getCommandClassMap ( nodeId , getId ( ) ) ; this . version = ( int ) map . get ( "version" ) ; return map ; } | Restores details about this command class from a persistence context . |
41,005 | public Map < String , Object > save ( PersistenceContext ctx , byte nodeId ) { Map < String , Object > map = ctx . getCommandClassMap ( nodeId , getId ( ) ) ; map . put ( "version" , getVersion ( ) ) ; return map ; } | Saves details about this command class to a persistence context . |
41,006 | static protected DataFrame createSendDataFrame ( String name , byte nodeId , byte [ ] data , boolean isResponseExpected ) { return new SendData ( name , nodeId , data , ( byte ) ( SendData . TRANSMIT_OPTION_ACK | SendData . TRANSMIT_OPTION_AUTO_ROUTE ) , isResponseExpected ) ; } | Convenience method for creating SendData frames |
41,007 | public DataFrame createGet ( byte nodeId , Scale s ) { switch ( getVersion ( ) ) { case 1 : return createSendDataFrame ( "METER_GET" , nodeId , new byte [ ] { MeterCommandClass . ID , METER_GET } , true ) ; default : { byte scale = scaleToByte ( s ) ; byte b = ( byte ) ( ( scale << 3 ) & 0x18 ) ; return createSendDataFrame ( "METER_GET" , nodeId , new byte [ ] { MeterCommandClass . ID , METER_GET , b } , true ) ; } } } | Create a Get data frame . |
41,008 | public void setOnHeadViewBindViewHolderListener ( FamiliarRecyclerView . OnHeadViewBindViewHolderListener onHeadViewBindViewHolderListener ) { if ( null != mWrapFamiliarRecyclerViewAdapter ) { mWrapFamiliarRecyclerViewAdapter . setOnHeadViewBindViewHolderListener ( onHeadViewBindViewHolderListener ) ; } else { mTempOnHeadViewBindViewHolderListener = onHeadViewBindViewHolderListener ; } } | HeadView onBindViewHolder callback |
41,009 | public void setOnFooterViewBindViewHolderListener ( FamiliarRecyclerView . OnFooterViewBindViewHolderListener onFooterViewBindViewHolderListener ) { if ( null != mWrapFamiliarRecyclerViewAdapter ) { mWrapFamiliarRecyclerViewAdapter . setOnFooterViewBindViewHolderListener ( onFooterViewBindViewHolderListener ) ; } else { mTempOnFooterViewBindViewHolderListener = onFooterViewBindViewHolderListener ; } } | FooterView onBindViewHolder callback |
41,010 | public void autoRefresh ( ) { if ( ! isPullRefreshEnabled ) return ; setRefreshing ( true ) ; new android . os . Handler ( ) . postDelayed ( new Runnable ( ) { public void run ( ) { callOnPullRefresh ( ) ; } } , 1000 ) ; } | Automatic pull refresh |
41,011 | protected RuleSet readRules ( MavenProject rootModule ) throws MojoExecutionException { List < RuleSource > sources = new ArrayList < > ( ) ; PluginRepository pluginRepository = pluginRepositoryProvider . getPluginRepository ( ) ; if ( rulesUrl != null ) { getLog ( ) . debug ( "Retrieving rules from URL " + rulesUrl . toString ( ) ) ; sources . add ( new UrlRuleSource ( rulesUrl ) ) ; } else { addRuleFiles ( sources , ProjectResolver . getRulesDirectory ( rootModule , rulesDirectory ) ) ; if ( rulesDirectories != null ) { for ( String directory : rulesDirectories ) { addRuleFiles ( sources , ProjectResolver . getRulesDirectory ( rootModule , directory ) ) ; } } List < RuleSource > ruleSources = pluginRepository . getRulePluginRepository ( ) . getRuleSources ( ) ; sources . addAll ( ruleSources ) ; } Collection < RuleParserPlugin > ruleParserPlugins ; try { ruleParserPlugins = pluginRepository . getRuleParserPluginRepository ( ) . getRuleParserPlugins ( getRuleConfiguration ( ) ) ; } catch ( RuleException e ) { throw new MojoExecutionException ( "Cannot get rules rule source reader plugins." , e ) ; } try { RuleParser ruleParser = new RuleParser ( ruleParserPlugins ) ; return ruleParser . parse ( sources ) ; } catch ( RuleException e ) { throw new MojoExecutionException ( "Cannot read rules." , e ) ; } } | Reads the available rules from the rules directory and deployed catalogs . |
41,012 | private void addRuleFiles ( List < RuleSource > sources , File directory ) throws MojoExecutionException { List < RuleSource > ruleSources = readRulesDirectory ( directory ) ; for ( RuleSource ruleSource : ruleSources ) { getLog ( ) . debug ( "Adding rules from file " + ruleSource ) ; sources . add ( ruleSource ) ; } } | Add rules from the given directory to the list of sources . |
41,013 | private List < RuleSource > readRulesDirectory ( File rulesDirectory ) throws MojoExecutionException { if ( rulesDirectory . exists ( ) && ! rulesDirectory . isDirectory ( ) ) { throw new MojoExecutionException ( rulesDirectory . getAbsolutePath ( ) + " does not exist or is not a directory." ) ; } getLog ( ) . info ( "Reading rules from directory " + rulesDirectory . getAbsolutePath ( ) ) ; try { return FileRuleSource . getRuleSources ( rulesDirectory ) ; } catch ( IOException e ) { throw new MojoExecutionException ( "Cannot read rulesDirectory: " + rulesDirectory . getAbsolutePath ( ) , e ) ; } } | Retrieves the list of available rules from the rules directory . |
41,014 | protected void execute ( StoreOperation storeOperation , MavenProject rootModule , Set < MavenProject > executedModules ) throws MojoExecutionException , MojoFailureException { synchronized ( cachingStoreProvider ) { Store store = getStore ( rootModule ) ; if ( isResetStoreBeforeExecution ( ) && executedModules . isEmpty ( ) ) { store . reset ( ) ; } try { storeOperation . run ( rootModule , store ) ; } finally { releaseStore ( store ) ; } } } | Execute an operation with the store . |
41,015 | protected Set < MavenProject > getExecutedModules ( MavenProject rootModule ) { String executionKey = createExecutionKey ( execution ) ; String executedModulesContextKey = AbstractProjectMojo . class . getName ( ) + "#executedModules" ; Map < String , Set < MavenProject > > executedProjectsPerExecutionKey = ( Map < String , Set < MavenProject > > ) rootModule . getContextValue ( executedModulesContextKey ) ; if ( executedProjectsPerExecutionKey == null ) { executedProjectsPerExecutionKey = new HashMap < > ( ) ; rootModule . setContextValue ( executedModulesContextKey , executedProjectsPerExecutionKey ) ; } Set < MavenProject > executedProjects = executedProjectsPerExecutionKey . get ( executionKey ) ; if ( executedProjects == null ) { executedProjects = new HashSet < > ( ) ; executedProjectsPerExecutionKey . put ( executionKey , executedProjects ) ; } return executedProjects ; } | Determine the already executed modules for a given root module . |
41,016 | private Store getStore ( MavenProject rootModule ) throws MojoExecutionException { StoreConfiguration configuration = getStoreConfiguration ( rootModule ) ; List < Class < ? > > descriptorTypes ; try { descriptorTypes = pluginRepositoryProvider . getPluginRepository ( ) . getModelPluginRepository ( ) . getDescriptorTypes ( ) ; } catch ( PluginRepositoryException e ) { throw new MojoExecutionException ( "Cannot determine model types." , e ) ; } Object existingStore = cachingStoreProvider . getStore ( configuration , descriptorTypes ) ; if ( ! Store . class . isAssignableFrom ( existingStore . getClass ( ) ) ) { throw new MojoExecutionException ( "Cannot re-use store instance from reactor. Either declare the plugin as extension or execute Maven using the property -D" + PROPERTY_STORE_LIFECYCLE + "=" + StoreLifecycle . MODULE + " on the command line." ) ; } return ( Store ) existingStore ; } | Determine the store instance to use for the given root module . |
41,017 | private EmbeddedNeo4jConfiguration getEmbeddedNeo4jConfiguration ( ) { OptionHelper . verifyDeprecatedOption ( PARAMETER_SERVER_ADDRESS , this . serverAddress , PARAMETER_EMBEDDED_LISTEN_ADDRESS ) ; OptionHelper . verifyDeprecatedOption ( PARAMETER_SERVER_PORT , this . serverPort , PARAMETER_EMBEDDED_HTTP_PORT ) ; EmbeddedNeo4jConfiguration embedded = store . getEmbedded ( ) ; EmbeddedNeo4jConfiguration . EmbeddedNeo4jConfigurationBuilder builder = EmbeddedNeo4jConfiguration . builder ( ) ; builder . connectorEnabled ( embedded . isConnectorEnabled ( ) || isConnectorRequired ( ) ) ; builder . listenAddress ( OptionHelper . selectValue ( embedded . getListenAddress ( ) , this . serverAddress , embeddedListenAddress ) ) ; builder . boltPort ( OptionHelper . selectValue ( embedded . getBoltPort ( ) , embeddedBoltPort ) ) ; builder . httpPort ( OptionHelper . selectValue ( embedded . getHttpPort ( ) , this . serverPort , embeddedHttpPort ) ) ; builder . apocEnabled ( OptionHelper . selectValue ( embedded . isApocEnabled ( ) , this . apocEnabled ) ) ; builder . graphAlgorithmsEnabled ( OptionHelper . selectValue ( embedded . isGraphAlgorithmsEnabled ( ) , this . graphAlgorithmsEnabled ) ) ; return builder . build ( ) ; } | Create the configuration for the embedded server . |
41,018 | static Map < MavenProject , List < MavenProject > > getProjects ( List < MavenProject > reactorProjects , String rulesDirectory , boolean useExecutionRootAsProjectRoot ) throws MojoExecutionException { Map < MavenProject , List < MavenProject > > rootModules = new HashMap < > ( ) ; for ( MavenProject reactorProject : reactorProjects ) { MavenProject rootModule = ProjectResolver . getRootModule ( reactorProject , reactorProjects , rulesDirectory , useExecutionRootAsProjectRoot ) ; List < MavenProject > modules = rootModules . get ( rootModule ) ; if ( modules == null ) { modules = new ArrayList < > ( ) ; rootModules . put ( rootModule , modules ) ; } modules . add ( reactorProject ) ; } return rootModules ; } | Aggregate projects to their base projects |
41,019 | static File getRulesDirectory ( MavenProject rootModule , String rulesDirectory ) { File rules = new File ( rulesDirectory ) ; return rules . isAbsolute ( ) ? rules : new File ( rootModule . getBasedir ( ) . getAbsolutePath ( ) + File . separator + rulesDirectory ) ; } | Returns the directory containing rules . |
41,020 | static File getOutputDirectory ( MavenProject rootModule ) { String directoryName = rootModule . getBuild ( ) . getDirectory ( ) + "/" + OUTPUT_DIRECTORY ; File directory = new File ( directoryName ) ; directory . mkdirs ( ) ; return directory ; } | Determines the directory for writing output files . |
41,021 | static File getOutputFile ( MavenProject rootModule , File reportFile , String defaultFile ) throws MojoExecutionException { File selectedXmlReportFile ; if ( reportFile != null ) { selectedXmlReportFile = reportFile ; } else if ( rootModule != null ) { selectedXmlReportFile = new File ( getOutputDirectory ( rootModule ) + "/" + defaultFile ) ; } else { throw new MojoExecutionException ( "Cannot determine report file." ) ; } return selectedXmlReportFile ; } | Determines a report file name . |
41,022 | protected Map < String , Object > getPluginProperties ( ) { Map < String , Object > properties = new HashMap < > ( ) ; if ( scanProperties != null ) { properties . putAll ( scanProperties ) ; } properties . put ( ScanInclude . class . getName ( ) , scanIncludes ) ; return properties ; } | Return the plugin properties . |
41,023 | private boolean isLastModuleInProject ( Set < MavenProject > executedModules , List < MavenProject > projectModules ) { Set < MavenProject > remainingModules = new HashSet < > ( ) ; if ( execution . getPlugin ( ) . getExecutions ( ) . isEmpty ( ) ) { getLog ( ) . debug ( "No configured executions found, assuming CLI invocation." ) ; remainingModules . addAll ( projectModules ) ; } else { for ( MavenProject projectModule : projectModules ) { if ( ProjectResolver . containsBuildPlugin ( projectModule , execution . getPlugin ( ) ) ) { remainingModules . add ( projectModule ) ; } } } remainingModules . removeAll ( executedModules ) ; remainingModules . remove ( currentProject ) ; if ( remainingModules . isEmpty ( ) ) { getLog ( ) . debug ( "Did not find any subsequent module with a plugin configuration." + " Will consider this module as the last one." ) ; return true ; } else { getLog ( ) . debug ( "Found " + remainingModules . size ( ) + " subsequent modules possibly executing this plugin." + " Will NOT consider this module as the last one." ) ; return false ; } } | Determines if the last module for a project is currently executed . |
41,024 | public String deriveKeyFormatted ( String inputPassword ) { PBKDF2Parameters p = getParameters ( ) ; byte [ ] salt = generateSalt ( ) ; p . setSalt ( salt ) ; p . setDerivedKey ( deriveKey ( inputPassword ) ) ; String formatted = getFormatter ( ) . toString ( p ) ; return formatted ; } | Derive key from password then format . |
41,025 | public boolean verifyKeyFormatted ( String formatted , String candidatePassword ) { PBKDF2Parameters p = getParameters ( ) ; PBKDF2Parameters q = new PBKDF2Parameters ( ) ; q . hashAlgorithm = p . hashAlgorithm ; q . hashCharset = p . hashCharset ; boolean verifyOK = false ; if ( ! getFormatter ( ) . fromString ( q , formatted ) ) { try { setParameters ( q ) ; verifyOK = verifyKey ( candidatePassword ) ; } finally { setParameters ( p ) ; } } return verifyOK ; } | Verification function . |
41,026 | protected boolean validatePassword ( String inputPassword , String expectedPassword ) { boolean verifyOK = false ; for ( ; ; ) { if ( inputPassword == null || expectedPassword == null ) { break ; } PBKDF2Parameters p = getEngineParameters ( ) ; if ( p == null ) { break ; } PBKDF2Formatter f = getFormatter ( ) ; if ( f == null ) { break ; } if ( f . fromString ( p , expectedPassword ) ) { break ; } PBKDF2 pBKDF2Engine = getEngine ( p ) ; if ( pBKDF2Engine == null ) { break ; } verifyOK = pBKDF2Engine . verifyKey ( inputPassword ) ; break ; } return verifyOK ; } | Actual salt - enabled verification function . Get parameters from database password then compute candidate derived key from user - supplied password and parameters then compare database derived key and candidate derived key . Login if match . |
41,027 | protected byte [ ] PBKDF2 ( PRF prf , byte [ ] S , int c , int dkLen ) { if ( S == null ) { S = new byte [ 0 ] ; } int hLen = prf . getHLen ( ) ; int l = ceil ( dkLen , hLen ) ; int r = dkLen - ( l - 1 ) * hLen ; byte T [ ] = new byte [ l * hLen ] ; int ti_offset = 0 ; for ( int i = 1 ; i <= l ; i ++ ) { _F ( T , ti_offset , prf , S , c , i ) ; ti_offset += hLen ; } if ( r < hLen ) { byte DK [ ] = new byte [ dkLen ] ; System . arraycopy ( T , 0 , DK , 0 , dkLen ) ; return DK ; } return T ; } | Core Password Based Key Derivation Function 2 . |
41,028 | protected int ceil ( int a , int b ) { int m = 0 ; if ( a % b > 0 ) { m = 1 ; } return a / b + m ; } | Integer division with ceiling function . |
41,029 | protected void _F ( byte [ ] dest , int offset , PRF prf , byte [ ] S , int c , int blockIndex ) { int hLen = prf . getHLen ( ) ; byte U_r [ ] = new byte [ hLen ] ; byte U_i [ ] = new byte [ S . length + 4 ] ; System . arraycopy ( S , 0 , U_i , 0 , S . length ) ; INT ( U_i , S . length , blockIndex ) ; for ( int i = 0 ; i < c ; i ++ ) { U_i = prf . doFinal ( U_i ) ; xor ( U_r , U_i ) ; } System . arraycopy ( U_r , 0 , dest , offset , hLen ) ; } | Function F . |
41,030 | protected void xor ( byte [ ] dest , byte [ ] src ) { for ( int i = 0 ; i < dest . length ; i ++ ) { dest [ i ] ^= src [ i ] ; } } | Block - Xor . Xor source bytes into destination byte buffer . Destination buffer must be same length or less than source buffer . |
41,031 | protected void INT ( byte [ ] dest , int offset , int i ) { dest [ offset + 0 ] = ( byte ) ( i / ( 256 * 256 * 256 ) ) ; dest [ offset + 1 ] = ( byte ) ( i / ( 256 * 256 ) ) ; dest [ offset + 2 ] = ( byte ) ( i / ( 256 ) ) ; dest [ offset + 3 ] = ( byte ) ( i ) ; } | Four - octet encoding of the integer i most significant octet first . |
41,032 | public static String bin2hex ( final byte [ ] b ) { if ( b == null ) { return "" ; } StringBuffer sb = new StringBuffer ( 2 * b . length ) ; for ( int i = 0 ; i < b . length ; i ++ ) { int v = ( 256 + b [ i ] ) % 256 ; sb . append ( hex . charAt ( ( v / 16 ) & 15 ) ) ; sb . append ( hex . charAt ( ( v % 16 ) & 15 ) ) ; } return sb . toString ( ) ; } | Simple binary - to - hexadecimal conversion . |
41,033 | public static byte [ ] hex2bin ( final String s ) { String m = s ; if ( s == null ) { m = "" ; } else if ( s . length ( ) % 2 != 0 ) { m = "0" + s ; } byte r [ ] = new byte [ m . length ( ) / 2 ] ; for ( int i = 0 , n = 0 ; i < m . length ( ) ; n ++ ) { char h = m . charAt ( i ++ ) ; char l = m . charAt ( i ++ ) ; r [ n ] = ( byte ) ( hex2bin ( h ) * 16 + hex2bin ( l ) ) ; } return r ; } | Convert hex string to array of bytes . |
41,034 | public static int hex2bin ( char c ) { if ( c >= '0' && c <= '9' ) { return ( c - '0' ) ; } if ( c >= 'A' && c <= 'F' ) { return ( c - 'A' + 10 ) ; } if ( c >= 'a' && c <= 'f' ) { return ( c - 'a' + 10 ) ; } throw new IllegalArgumentException ( "Input string may only contain hex digits, but found '" + c + "'" ) ; } | Convert hex digit to numerical value . |
41,035 | private void processInstanceEvent ( InstanceContentEvent instanceEvent ) { Instance instance = instanceEvent . getInstance ( ) ; boolean predictionCovered = false ; boolean trainingCovered = false ; boolean continuePrediction = instanceEvent . isTesting ( ) ; boolean continueTraining = instanceEvent . isTraining ( ) ; ErrorWeightedVote errorWeightedVote = newErrorWeightedVote ( ) ; Iterator < PassiveRule > ruleIterator = this . ruleSet . iterator ( ) ; while ( ruleIterator . hasNext ( ) ) { if ( ! continuePrediction && ! continueTraining ) break ; PassiveRule rule = ruleIterator . next ( ) ; if ( rule . isCovering ( instance ) == true ) { predictionCovered = true ; if ( continuePrediction ) { double [ ] vote = rule . getPrediction ( instance ) ; double error = rule . getCurrentError ( ) ; errorWeightedVote . addVote ( vote , error ) ; if ( ! this . unorderedRules ) continuePrediction = false ; } if ( continueTraining ) { if ( ! isAnomaly ( instance , rule ) ) { trainingCovered = true ; rule . updateStatistics ( instance ) ; sendInstanceToRule ( instance , rule . getRuleNumberID ( ) ) ; if ( ! this . unorderedRules ) continueTraining = false ; } } } } if ( predictionCovered ) { ResultContentEvent rce = newResultContentEvent ( errorWeightedVote . computeWeightedVote ( ) , instanceEvent ) ; resultStream . put ( rce ) ; } else if ( instanceEvent . isTesting ( ) ) { double [ ] vote = defaultRule . getPrediction ( instance ) ; ResultContentEvent rce = newResultContentEvent ( vote , instanceEvent ) ; resultStream . put ( rce ) ; } if ( ! trainingCovered && instanceEvent . isTraining ( ) ) { defaultRule . updateStatistics ( instance ) ; if ( defaultRule . getInstancesSeen ( ) % this . gracePeriod == 0.0 ) { if ( defaultRule . tryToExpand ( this . splitConfidence , this . tieThreshold ) == true ) { ActiveRule newDefaultRule = newRule ( defaultRule . getRuleNumberID ( ) , ( RuleActiveRegressionNode ) defaultRule . getLearningNode ( ) , ( ( RuleActiveRegressionNode ) defaultRule . getLearningNode ( ) ) . getStatisticsOtherBranchSplit ( ) ) ; defaultRule . split ( ) ; defaultRule . setRuleNumberID ( ++ ruleNumberID ) ; this . ruleSet . add ( new PassiveRule ( this . defaultRule ) ) ; sendAddRuleEvent ( defaultRule . getRuleNumberID ( ) , this . defaultRule ) ; defaultRule = newDefaultRule ; } } } } | Merge predict and train so we only check for covering rules one time |
41,036 | private ResultContentEvent newResultContentEvent ( double [ ] prediction , InstanceContentEvent inEvent ) { ResultContentEvent rce = new ResultContentEvent ( inEvent . getInstanceIndex ( ) , inEvent . getInstance ( ) , inEvent . getClassId ( ) , prediction , inEvent . isLastEvent ( ) ) ; rce . setClassifierIndex ( this . processorId ) ; rce . setEvaluationIndex ( inEvent . getEvaluationIndex ( ) ) ; return rce ; } | Helper method to generate new ResultContentEvent based on an instance and its prediction result . |
41,037 | public double [ ] getVotesForInstance ( Instance inst ) { double [ ] ret ; inst . setDataset ( dataset ) ; if ( this . isInit == false ) { ret = new double [ dataset . numClasses ( ) ] ; } else { ret = learner . getVotesForInstance ( inst ) ; } return ret ; } | Predicts the class memberships for a given instance . If an instance is unclassified the returned array elements must be all zero . |
41,038 | private KeyFinder < S4Event > getKeyFinder ( ) { KeyFinder < S4Event > keyFinder = new KeyFinder < S4Event > ( ) { public List < String > get ( S4Event s4event ) { List < String > results = new ArrayList < String > ( ) ; results . add ( s4event . getKey ( ) ) ; return results ; } } ; return keyFinder ; } | KeyFinder sets the keys for a specific event . |
41,039 | protected void onCreate ( ) { logger . debug ( "PE ID {}" , getId ( ) ) ; if ( this . processor != null ) { this . processor = this . processor . newProcessor ( this . processor ) ; this . processor . onCreate ( Integer . parseInt ( getId ( ) ) ) ; } } | Methods from ProcessingElement |
41,040 | public Object getPreparedClassOption ( ClassOption opt ) { if ( this . classOptionNamesToPreparedObjects == null ) { this . prepareForUse ( ) ; } return this . classOptionNamesToPreparedObjects . get ( opt . getName ( ) ) ; } | Gets a prepared option of this class . |
41,041 | public boolean injectNextEvent ( ) { if ( this . getProcessor ( ) . hasNext ( ) ) { ContentEvent event = this . getProcessor ( ) . nextEvent ( ) ; this . getOutputStream ( ) . put ( event ) ; return true ; } return false ; } | If there are available events first event in the queue will be sent out on the output stream . |
41,042 | public boolean process ( ContentEvent event ) { if ( event instanceof ClusteringContentEvent ) { ClusteringContentEvent cce = ( ClusteringContentEvent ) event ; outputStream . put ( event ) ; if ( cce . isSample ( ) ) { evaluationStream . put ( new ClusteringEvaluationContentEvent ( null , new DataPoint ( cce . getInstance ( ) , numInstances ++ ) , cce . isLastEvent ( ) ) ) ; } } else if ( event instanceof ClusteringEvaluationContentEvent ) { evaluationStream . put ( event ) ; } return true ; } | Process event . |
41,043 | public double [ ] getVotesForInstance ( Instance inst ) { double [ ] votes = new double [ getNumberOfClasses ( ) ] ; for ( int classIndex = 0 ; classIndex < votes . length ; classIndex ++ ) { votes [ classIndex ] = Math . log ( getPrior ( classIndex ) ) ; for ( int index = 0 ; index < inst . numAttributes ( ) ; index ++ ) { int attributeID = inst . index ( index ) ; if ( attributeID == inst . classIndex ( ) ) continue ; Double value = inst . value ( attributeID ) ; GaussianNumericAttributeClassObserver obs = attributeObservers . get ( attributeID ) ; GaussianEstimator estimator = null ; if ( obs != null && obs . getEstimator ( classIndex ) != null ) { estimator = obs . getEstimator ( classIndex ) ; } double valueNonZero ; if ( estimator != null ) { valueNonZero = estimator . probabilityDensity ( value ) ; } else { valueNonZero = ADDITIVE_SMOOTHING_FACTOR ; } votes [ classIndex ] += Math . log ( valueNonZero ) ; } if ( this . classPrototypes . get ( classIndex ) != null ) { votes [ classIndex ] += Math . log ( this . classPrototypes . get ( classIndex ) ) ; } } return votes ; } | Predicts the class memberships for a given instance . If an instance is unclassified the returned array elements will be all zero . |
41,044 | private double getPrior ( int classIndex ) { Double currentCount = this . classInstances . get ( classIndex ) ; if ( currentCount == null || currentCount == 0 ) return 0 ; else return currentCount * 1. / this . instancesSeen ; } | Compute the prior for the given classIndex . |
41,045 | public void setStreamSource ( InstanceStream stream ) { this . streamSource = new StreamSource ( stream ) ; firstInstance = streamSource . nextInstance ( ) . getData ( ) ; } | Sets the stream source . |
41,046 | public void sendInstances ( Stream inputStream , int numberInstances , boolean isTraining , boolean isTesting ) { int numberSamples = 0 ; while ( streamSource . hasMoreInstances ( ) && numberSamples < numberInstances ) { numberSamples ++ ; numberInstancesSent ++ ; InstanceContentEvent instanceContentEvent = new InstanceContentEvent ( numberInstancesSent , nextInstance ( ) , isTraining , isTesting ) ; inputStream . put ( instanceContentEvent ) ; } InstanceContentEvent instanceContentEvent = new InstanceContentEvent ( numberInstancesSent , null , isTraining , isTesting ) ; instanceContentEvent . setLast ( true ) ; inputStream . put ( instanceContentEvent ) ; } | Send instances . |
41,047 | public void sendEndEvaluationInstance ( Stream inputStream ) { InstanceContentEvent instanceContentEvent = new InstanceContentEvent ( - 1 , firstInstance , false , true ) ; inputStream . put ( instanceContentEvent ) ; } | Send end evaluation instance . |
41,048 | private void processInstanceContentEvent ( InstancesContentEvent instContentEvent ) { this . numBatches ++ ; this . contentEventList . add ( instContentEvent ) ; if ( this . numBatches == 1 || this . numBatches > 4 ) { this . processInstances ( this . contentEventList . remove ( 0 ) ) ; } if ( instContentEvent . isLastEvent ( ) ) { while ( ! contentEventList . isEmpty ( ) ) { processInstances ( contentEventList . remove ( 0 ) ) ; } } } | Helper method to process the InstanceContentEvent |
41,049 | private void trainOnInstanceImpl ( Instance inst ) { if ( this . treeRoot == null ) { this . treeRoot = newLearningNode ( this . parallelismHint ) ; this . activeLeafNodeCount = 1 ; } FoundNode foundNode = this . treeRoot . filterInstanceToLeaf ( inst , null , - 1 ) ; trainOnInstanceImpl ( foundNode , inst ) ; } | Helper method that represent training of an instance . Since it is decision tree this method routes the incoming instance into the correct leaf and then update the statistic on the found leaf . |
41,050 | private void attemptToSplit ( ActiveLearningNode activeLearningNode , FoundNode foundNode ) { this . splitId ++ ; ScheduledFuture < ? > timeOutHandler = this . executor . schedule ( new AggregationTimeOutHandler ( this . splitId , this . timedOutSplittingNodes ) , this . timeOut , TimeUnit . SECONDS ) ; this . splittingNodes . put ( this . splitId , new SplittingNodeInfo ( activeLearningNode , foundNode , timeOutHandler ) ) ; activeLearningNode . requestDistributedSuggestions ( this . splitId , this ) ; } | Helper method to represent a split attempt |
41,051 | private void continueAttemptToSplit ( ActiveLearningNode activeLearningNode , FoundNode foundNode ) { AttributeSplitSuggestion bestSuggestion = activeLearningNode . getDistributedBestSuggestion ( ) ; AttributeSplitSuggestion secondBestSuggestion = activeLearningNode . getDistributedSecondBestSuggestion ( ) ; double [ ] preSplitDist = activeLearningNode . getObservedClassDistribution ( ) ; AttributeSplitSuggestion nullSplit = new AttributeSplitSuggestion ( null , new double [ 0 ] [ ] , this . splitCriterion . getMeritOfSplit ( preSplitDist , new double [ ] [ ] { preSplitDist } ) ) ; if ( ( bestSuggestion == null ) || ( nullSplit . compareTo ( bestSuggestion ) > 0 ) ) { secondBestSuggestion = bestSuggestion ; bestSuggestion = nullSplit ; } else { if ( ( secondBestSuggestion == null ) || ( nullSplit . compareTo ( secondBestSuggestion ) > 0 ) ) { secondBestSuggestion = nullSplit ; } } boolean shouldSplit = false ; if ( secondBestSuggestion == null ) { shouldSplit = ( bestSuggestion != null ) ; } else { double hoeffdingBound = computeHoeffdingBound ( this . splitCriterion . getRangeOfMerit ( activeLearningNode . getObservedClassDistribution ( ) ) , this . splitConfidence , activeLearningNode . getWeightSeen ( ) ) ; if ( ( bestSuggestion . merit - secondBestSuggestion . merit > hoeffdingBound ) || ( hoeffdingBound < tieThreshold ) ) { shouldSplit = true ; } } SplitNode parent = foundNode . getParent ( ) ; int parentBranch = foundNode . getParentBranch ( ) ; if ( shouldSplit ) { if ( bestSuggestion . splitTest != null ) { SplitNode newSplit = new SplitNode ( bestSuggestion . splitTest , activeLearningNode . getObservedClassDistribution ( ) ) ; for ( int i = 0 ; i < bestSuggestion . numSplits ( ) ; i ++ ) { Node newChild = newLearningNode ( bestSuggestion . resultingClassDistributionFromSplit ( i ) , this . parallelismHint ) ; newSplit . setChild ( i , newChild ) ; } this . activeLeafNodeCount -- ; this . decisionNodeCount ++ ; this . activeLeafNodeCount += bestSuggestion . numSplits ( ) ; if ( parent == null ) { this . treeRoot = newSplit ; } else { parent . setChild ( parentBranch , newSplit ) ; } } } activeLearningNode . endSplitting ( ) ; activeLearningNode . setWeightSeenAtLastSplitEvaluation ( activeLearningNode . getWeightSeen ( ) ) ; } | Helper method to continue the attempt to split once all local calculation results are received . |
41,052 | private void setModelContext ( InstancesHeader ih ) { if ( ( ih != null ) && ( ih . classIndex ( ) < 0 ) ) { throw new IllegalArgumentException ( "Context for a classifier must include a class to learn" ) ; } logger . trace ( "Model context: {}" , ih . toString ( ) ) ; } | Helper method to set the model context i . e . how many attributes they are and what is the class index |
41,053 | public List < MapConfig > getMapConfigsForTopology ( SamzaTopology topology ) throws Exception { List < MapConfig > configs = new ArrayList < MapConfig > ( ) ; List < Map < String , String > > maps = this . getMapsForTopology ( topology ) ; for ( Map < String , String > map : maps ) { configs . add ( new MapConfig ( map ) ) ; } return configs ; } | Construct a list of MapConfigs for a Topology |
41,054 | private static void setValue ( Map < String , String > map , String key , String value ) { map . put ( key , value ) ; } | Set custom properties |
41,055 | public void init ( TopologyBuilder topologyBuilder , Instances dataset , int parallelism ) { this . model = new AMRRuleSetProcessor . Builder ( dataset ) . noAnomalyDetection ( noAnomalyDetectionOption . isSet ( ) ) . multivariateAnomalyProbabilityThreshold ( multivariateAnomalyProbabilityThresholdOption . getValue ( ) ) . univariateAnomalyProbabilityThreshold ( univariateAnomalyProbabilityThresholdOption . getValue ( ) ) . anomalyNumberOfInstancesThreshold ( anomalyNumInstThresholdOption . getValue ( ) ) . unorderedRules ( unorderedRulesOption . isSet ( ) ) . voteType ( votingTypeOption . getChosenIndex ( ) ) . build ( ) ; topologyBuilder . addProcessor ( model , this . ruleSetParallelismOption . getValue ( ) ) ; Stream forwardToRootStream = topologyBuilder . createStream ( this . model ) ; Stream forwardToLearnerStream = topologyBuilder . createStream ( this . model ) ; this . modelResultStream = topologyBuilder . createStream ( this . model ) ; this . model . setDefaultRuleStream ( forwardToRootStream ) ; this . model . setStatisticsStream ( forwardToLearnerStream ) ; this . model . setResultStream ( this . modelResultStream ) ; AMRDefaultRuleProcessor root = new AMRDefaultRuleProcessor . Builder ( dataset ) . threshold ( pageHinckleyThresholdOption . getValue ( ) ) . alpha ( pageHinckleyAlphaOption . getValue ( ) ) . changeDetection ( this . DriftDetectionOption . isSet ( ) ) . predictionFunction ( predictionFunctionOption . getChosenIndex ( ) ) . constantLearningRatioDecay ( constantLearningRatioDecayOption . isSet ( ) ) . learningRatio ( learningRatioOption . getValue ( ) ) . splitConfidence ( splitConfidenceOption . getValue ( ) ) . tieThreshold ( tieThresholdOption . getValue ( ) ) . gracePeriod ( gracePeriodOption . getValue ( ) ) . numericObserver ( ( FIMTDDNumericAttributeClassLimitObserver ) numericObserverOption . getValue ( ) ) . build ( ) ; topologyBuilder . addProcessor ( root ) ; Stream newRuleStream = topologyBuilder . createStream ( root ) ; this . rootResultStream = topologyBuilder . createStream ( root ) ; root . setRuleStream ( newRuleStream ) ; root . setResultStream ( this . rootResultStream ) ; AMRLearnerProcessor learner = new AMRLearnerProcessor . Builder ( dataset ) . splitConfidence ( splitConfidenceOption . getValue ( ) ) . tieThreshold ( tieThresholdOption . getValue ( ) ) . gracePeriod ( gracePeriodOption . getValue ( ) ) . noAnomalyDetection ( noAnomalyDetectionOption . isSet ( ) ) . multivariateAnomalyProbabilityThreshold ( multivariateAnomalyProbabilityThresholdOption . getValue ( ) ) . univariateAnomalyProbabilityThreshold ( univariateAnomalyProbabilityThresholdOption . getValue ( ) ) . anomalyNumberOfInstancesThreshold ( anomalyNumInstThresholdOption . getValue ( ) ) . build ( ) ; topologyBuilder . addProcessor ( learner , this . learnerParallelismOption . getValue ( ) ) ; Stream predicateStream = topologyBuilder . createStream ( learner ) ; learner . setOutputStream ( predicateStream ) ; topologyBuilder . connectInputAllStream ( newRuleStream , this . model ) ; topologyBuilder . connectInputAllStream ( predicateStream , this . model ) ; topologyBuilder . connectInputShuffleStream ( forwardToRootStream , root ) ; topologyBuilder . connectInputKeyStream ( forwardToLearnerStream , learner ) ; topologyBuilder . connectInputAllStream ( newRuleStream , learner ) ; } | private Stream resultStream ; |
41,056 | void setChild ( int index , Node child ) { if ( ( this . splitTest . maxBranches ( ) >= 0 ) && ( index >= this . splitTest . maxBranches ( ) ) ) { throw new IndexOutOfBoundsException ( ) ; } this . children . set ( index , child ) ; } | Method to set the children in a specific index of the SplitNode with the appropriate child |
41,057 | public void initTopology ( String topologyName , int delay ) { if ( this . topology != null ) { System . out . println ( "Topology has been initialized before!" ) ; return ; } this . topology = componentFactory . createTopology ( topologyName ) ; } | Initiates topology with a specific name and a delay between consecutive instances . |
41,058 | private ProcessingItem createPi ( Processor processor , int parallelism ) { ProcessingItem pi = this . componentFactory . createPi ( processor , parallelism ) ; this . topology . addProcessingItem ( pi , parallelism ) ; return pi ; } | Creates a processing item with a specific processor and paralellism level . |
41,059 | private EntranceProcessingItem createEntrancePi ( EntranceProcessor processor ) { EntranceProcessingItem epi = this . componentFactory . createEntrancePi ( processor ) ; this . topology . addEntranceProcessingItem ( epi ) ; if ( this . mapProcessorToProcessingItem == null ) this . mapProcessorToProcessingItem = new HashMap < Processor , IProcessingItem > ( ) ; this . mapProcessorToProcessingItem . put ( processor , epi ) ; return epi ; } | Creates a platform specific entrance processing item . |
41,060 | private Stream createStream ( IProcessingItem sourcePi ) { Stream stream = this . componentFactory . createStream ( sourcePi ) ; this . topology . addStream ( stream ) ; return stream ; } | Creates a platform specific stream . |
41,061 | public void init ( TopologyBuilder builder , Instances dataset , int parallelism ) { this . builder = builder ; this . dataset = dataset ; this . setLayout ( ) ; } | private int parallelism ; |
41,062 | public EntranceProcessingItem setOutputStream ( Stream outputStream ) { if ( this . outputStream != null && this . outputStream != outputStream ) { throw new IllegalStateException ( "Cannot overwrite output stream of EntranceProcessingItem" ) ; } else this . outputStream = outputStream ; return this ; } | Set the output stream of this EntranceProcessingItem . An EntranceProcessingItem should have only 1 single output stream and should not be re - assigned . |
41,063 | private double [ ] getVotesForInstance ( Instance instance ) { ErrorWeightedVote errorWeightedVote = newErrorWeightedVote ( ) ; int numberOfRulesCovering = 0 ; for ( ActiveRule rule : ruleSet ) { if ( rule . isCovering ( instance ) == true ) { numberOfRulesCovering ++ ; double [ ] vote = rule . getPrediction ( instance ) ; double error = rule . getCurrentError ( ) ; errorWeightedVote . addVote ( vote , error ) ; if ( ! this . unorderedRules ) { break ; } } } if ( numberOfRulesCovering == 0 ) { double [ ] vote = defaultRule . getPrediction ( instance ) ; double error = defaultRule . getCurrentError ( ) ; errorWeightedVote . addVote ( vote , error ) ; } double [ ] weightedVote = errorWeightedVote . computeWeightedVote ( ) ; return weightedVote ; } | getVotesForInstance extension of the instance method getVotesForInstance in moa . classifier . java returns the prediction of the instance . Called in EvaluateModelRegression |
41,064 | public void finish ( ) { if ( mSuccessful ) { mDb . setTransactionSuccessful ( ) ; } mDb . endTransaction ( ) ; if ( mSuccessful ) { for ( OnTransactionCommittedListener listener : mOnTransactionCommittedListeners ) { listener . onTransactionCommitted ( ) ; } } } | Finish the transaction . This will commit or rollback the transaction depending on whether is was marked as successful or not |
41,065 | protected void onStartLoading ( ) { if ( mCursor != null ) { deliverResult ( mCursor ) ; } if ( takeContentChanged ( ) || mCursor == null ) { forceLoad ( ) ; } } | Starts an asynchronous load of the contacts list data . When the result is ready the callbacks will be called on the UI thread . If a previous load has been completed and is still valid the result may be passed to the callbacks immediately . |
41,066 | final public boolean exists ( ) { final Model m = Query . one ( getClass ( ) , String . format ( "SELECT * FROM %s WHERE %s LIMIT 1" , Utils . getTableName ( getClass ( ) ) , Utils . getWhereStatement ( this ) ) ) . get ( ) ; return m != null ; } | Check whether this model exists in the database |
41,067 | final public void delete ( ) { Transaction t = new Transaction ( ) ; try { delete ( t ) ; t . setSuccessful ( true ) ; } finally { t . finish ( ) ; } } | Delete this model |
41,068 | final public void delete ( Transaction t ) { t . delete ( Utils . getTableName ( getClass ( ) ) , Utils . getWhereStatement ( this ) ) ; t . addOnTransactionCommittedListener ( new OnTransactionCommittedListener ( ) { public void onTransactionCommitted ( ) { Sprinkles . sInstance . mContext . getContentResolver ( ) . notifyChange ( Utils . getNotificationUri ( Model . this . getClass ( ) ) , null ) ; } } ) ; afterDelete ( ) ; } | Delete this model within the given transaction |
41,069 | public static < T extends Model > ManyQuery < T > all ( Class < T > clazz ) { return many ( clazz , "SELECT * FROM " + Utils . getTableName ( clazz ) ) ; } | Start a query for the entire list of instance of type T |
41,070 | static synchronized SQLiteDatabase getDatabase ( ) { if ( sInstance == null ) { throw new SprinklesNotInitializedException ( ) ; } if ( sDatabase == null ) { DbOpenHelper dbOpenHelper = new DbOpenHelper ( sInstance . mContext , sInstance . databaseName , sInstance . initialDatabaseVersion ) ; sDatabase = dbOpenHelper . getWritableDatabase ( ) ; } return sDatabase ; } | Throws SprinklesNotInitializedException if you try to access the database before initializing Sprinkles . |
41,071 | public Schema getXmlSchema ( CycloneDxSchema . Version schemaVersion ) throws SAXException { if ( CycloneDxSchema . Version . VERSION_10 == schemaVersion ) { return getXmlSchema10 ( ) ; } else { return getXmlSchema11 ( ) ; } } | Returns the CycloneDX XML Schema for the specified schema version . |
41,072 | private Schema getXmlSchema10 ( ) throws SAXException { final SchemaFactory schemaFactory = SchemaFactory . newInstance ( XMLConstants . W3C_XML_SCHEMA_NS_URI ) ; final Source [ ] schemaFiles = { new StreamSource ( this . getClass ( ) . getClassLoader ( ) . getResourceAsStream ( "spdx.xsd" ) ) , new StreamSource ( this . getClass ( ) . getClassLoader ( ) . getResourceAsStream ( "bom-1.0.xsd" ) ) } ; return schemaFactory . newSchema ( schemaFiles ) ; } | Returns the CycloneDX XML Schema from the specifications XSD . |
41,073 | public static LicenseChoice resolve ( String licenseString ) { try { return resolveSpdxLicenseString ( licenseString ) ; } catch ( InvalidLicenseStringException e1 ) { final LicenseChoice licenseChoice = resolveViaAlternativeMapping ( licenseString ) ; if ( licenseChoice != null ) { return licenseChoice ; } try { new URL ( licenseString ) ; final LicenseChoice choice = new LicenseChoice ( ) ; choice . addLicense ( parseLicenseByUrl ( licenseString ) ) ; return choice ; } catch ( MalformedURLException | InvalidLicenseStringException e2 ) { } } return null ; } | Attempts to resolve the specified license string via SPDX license identifier and expression parsing first . If SPDX resolution is not successful the method will attempt fuzzy matching . |
41,074 | public static LicenseChoice resolveSpdxLicenseId ( String licenseId ) throws InvalidSPDXAnalysisException { final SpdxListedLicense spdxLicense = LicenseInfoFactory . getListedLicenseById ( licenseId ) ; final LicenseChoice choice = new LicenseChoice ( ) ; choice . addLicense ( createLicenseObject ( spdxLicense ) ) ; return choice ; } | Given a valid SPDX license ID this method will return a LicenseChoice object . |
41,075 | public static License parseLicenseByUrl ( String licenseUrl ) throws InvalidLicenseStringException { final String protocolExcludedUrl = licenseUrl . replace ( "http://" , "" ) . replace ( "https://" , "" ) ; final ListedLicenses ll = ListedLicenses . getListedLicenses ( ) ; License license = resolvedByUrl . get ( licenseUrl ) ; if ( license != null ) { return license ; } for ( final String licenseId : ll . getSpdxListedLicenseIds ( ) ) { final AnyLicenseInfo licenseInfo = LicenseInfoFactory . parseSPDXLicenseString ( licenseId ) ; if ( licenseInfo instanceof SpdxListedLicense ) { final SpdxListedLicense spdxListedLicense = ( SpdxListedLicense ) licenseInfo ; for ( final String seeAlso : spdxListedLicense . getSeeAlso ( ) ) { final String protocolExcludedSeeAlsoUrl = seeAlso . replace ( "http://" , "" ) . replace ( "https://" , "" ) ; if ( protocolExcludedUrl . toLowerCase ( ) . contains ( protocolExcludedSeeAlsoUrl . toLowerCase ( ) ) || protocolExcludedSeeAlsoUrl . toLowerCase ( ) . contains ( protocolExcludedUrl . toLowerCase ( ) ) ) { license = createLicenseObject ( spdxListedLicense ) ; resolvedByUrl . put ( licenseUrl , license ) ; return license ; } } } } return null ; } | Given a URL this method will attempt to resolve the SPDX license . This method will not retrieve the URL rather it will interrogate it s internal list of SPDX licenses and the URLs defined for each . This method may impact performance for URLs that are not associated with an SPDX license or otherwise have not been queried on previously . This method will cache resolved licenses and their URLs for faster access on subsequent calls . |
41,076 | private static License createLicenseObject ( SpdxListedLicense spdxListedLicense ) { final License license = new License ( ) ; license . setId ( spdxListedLicense . getLicenseId ( ) ) ; license . setName ( spdxListedLicense . getName ( ) ) ; if ( spdxListedLicense . getSeeAlso ( ) != null && spdxListedLicense . getSeeAlso ( ) . length > 0 ) { license . setUrl ( spdxListedLicense . getSeeAlso ( ) [ 0 ] ) ; } if ( spdxListedLicense . getLicenseText ( ) != null ) { final LicenseText text = new LicenseText ( ) ; text . setContentType ( "plain/text" ) ; text . setEncoding ( "base64" ) ; text . setText ( Base64 . getEncoder ( ) . encodeToString ( spdxListedLicense . getLicenseText ( ) . getBytes ( ) ) ) ; license . setLicenseText ( text ) ; } return license ; } | Creates a License object from the specified SpdxListedLicense object . |
41,077 | private static LicenseChoice resolveViaAlternativeMapping ( String licenseString ) { if ( licenseString == null ) { return null ; } try { for ( final Map . Entry < String , List < String > > mapping : mappings . entrySet ( ) ) { final List < String > names = mapping . getValue ( ) ; if ( names != null ) { for ( final String name : names ) { if ( licenseString . equalsIgnoreCase ( name ) ) { return resolveSpdxLicenseString ( mapping . getKey ( ) ) ; } } } } } catch ( InvalidLicenseStringException e ) { } return null ; } | Attempts to perform high - confidence license resolution with unstructured text as input . |
41,078 | public String toXmlString ( ) throws TransformerException { if ( doc == null ) { return null ; } final DOMSource domSource = new DOMSource ( doc ) ; final StringWriter writer = new StringWriter ( ) ; final StreamResult result = new StreamResult ( writer ) ; final TransformerFactory tf = TransformerFactory . newInstance ( ) ; tf . setFeature ( XMLConstants . FEATURE_SECURE_PROCESSING , true ) ; final Transformer transformer = tf . newTransformer ( ) ; transformer . setOutputProperty ( OutputKeys . ENCODING , StandardCharsets . UTF_8 . name ( ) ) ; transformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; transformer . setOutputProperty ( OutputKeys . DOCTYPE_PUBLIC , "yes" ) ; transformer . setOutputProperty ( "{http://xml.apache.org/xslt}indent-amount" , "4" ) ; transformer . transform ( domSource , result ) ; return writer . toString ( ) ; } | Creates a text representation of a CycloneDX BoM Document . |
41,079 | private Bom parse ( StreamSource streamSource ) throws ParseException { try { final Schema schema = getXmlSchema ( CycloneDxSchema . Version . VERSION_11 ) ; final XmlMapper mapper = new XmlMapper ( ) ; final XMLInputFactory xif = XMLInputFactory . newFactory ( ) ; xif . setProperty ( XMLInputFactory . IS_SUPPORTING_EXTERNAL_ENTITIES , false ) ; xif . setProperty ( XMLInputFactory . SUPPORT_DTD , false ) ; final XMLStreamReader xsr = new NamespaceStreamReaderDelegate ( xif . createXMLStreamReader ( streamSource ) ) ; final Bom bom = mapper . readValue ( xsr , Bom . class ) ; xsr . close ( ) ; return bom ; } catch ( IOException | XMLStreamException | SAXException e ) { throw new ParseException ( e ) ; } } | Parses a CycloneDX BOM . |
41,080 | public List < SAXParseException > validate ( File file ) { return validate ( file , CycloneDxSchema . Version . VERSION_11 ) ; } | Verifies a CycloneDX BoM conforms to the latest version of the specification through XML validation . |
41,081 | public static List < Hash > calculateHashes ( File file ) throws IOException { if ( file == null || ! file . exists ( ) || ! file . canRead ( ) ) { return null ; } final List < Hash > hashes = new ArrayList < > ( ) ; try ( InputStream fis = Files . newInputStream ( file . toPath ( ) ) ) { hashes . add ( new Hash ( Hash . Algorithm . MD5 , DigestUtils . md5Hex ( fis ) ) ) ; } try ( InputStream fis = Files . newInputStream ( file . toPath ( ) ) ) { hashes . add ( new Hash ( Hash . Algorithm . SHA1 , DigestUtils . sha1Hex ( fis ) ) ) ; } try ( InputStream fis = Files . newInputStream ( file . toPath ( ) ) ) { hashes . add ( new Hash ( Hash . Algorithm . SHA_256 , DigestUtils . sha256Hex ( fis ) ) ) ; } try ( InputStream fis = Files . newInputStream ( file . toPath ( ) ) ) { hashes . add ( new Hash ( Hash . Algorithm . SHA_384 , DigestUtils . sha384Hex ( fis ) ) ) ; } try ( InputStream fis = Files . newInputStream ( file . toPath ( ) ) ) { hashes . add ( new Hash ( Hash . Algorithm . SHA_512 , DigestUtils . sha512Hex ( fis ) ) ) ; } return hashes ; } | Calculates the hashes of the specified file . |
41,082 | protected String getSortOrderStringForQuery ( MatcherPattern target , QueryParameters parameter ) { String result = "" ; if ( parameter . getSortOrder ( ) != null && parameter . getSortOrder ( ) . length ( ) >= 1 ) { result = parameter . getSortOrder ( ) ; } else { result = target . getTableInfo ( ) . getDefaultSortOrderString ( ) ; } return result ; } | This method gets the appropriate sort order . |
41,083 | public MatcherController add ( Class < ? > tableClassType , SubType subType , String pattern , int patternCode ) { this . addTableClass ( tableClassType ) ; this . addMatcherPattern ( subType , pattern , patternCode ) ; return this ; } | Register a class for table . And registers a pattern for UriMatcher . |
41,084 | public MatcherController add ( SubType subType , String pattern , int patternCode ) { this . addMatcherPattern ( subType , pattern , patternCode ) ; return this ; } | Registers a pattern for UriMatcher . It refer to the class that was last registered from add method . |
41,085 | public MatcherController add ( MatcherPattern matcherPattern ) { int patternCode = matcherPattern . getPatternCode ( ) ; if ( this . lastAddTableInfo == null ) { throw new IllegalStateException ( "There is a problem with the order of function call." ) ; } if ( findMatcherPattern ( patternCode ) != null ) { throw new IllegalArgumentException ( "patternCode has been specified already exists." ) ; } this . matcherPatterns . add ( matcherPattern ) ; return this ; } | Registers a pattern for UriMatcher . To register you have to create an instance of MatcherPattern . |
41,086 | public MatcherController setDefaultContentUri ( String authority , String path ) { if ( this . lastAddTableInfo == null ) { throw new IllegalStateException ( "There is a problem with the order of function call." ) ; } this . lastAddTableInfo . setDefaultContentUriInfo ( new ContentUriInfo ( authority , path ) ) ; return this ; } | Set the DefaultContentUri . If you did not use the DefaultContentUri annotation you must call this method . |
41,087 | public MatcherController setDefaultContentMimeTypeVnd ( String name , String type ) { if ( this . lastAddTableInfo == null ) { throw new IllegalStateException ( "There is a problem with the order of function call." ) ; } this . lastAddTableInfo . setDefaultContentMimeTypeVndInfo ( new ContentMimeTypeVndInfo ( name , type ) ) ; return this ; } | Set the DefaultContentMimeTypeVnd . If you did not use the DefaultContentMimeTypeVnd annotation you must call this method . |
41,088 | public MatcherController initialize ( ) { this . lastAddTableInfo = null ; for ( Map . Entry < Class < ? > , TableInfo > entry : this . tables . entrySet ( ) ) { entry . getValue ( ) . isValid ( true ) ; } for ( MatcherPattern entry : matcherPatterns ) { entry . isValid ( true ) ; this . matcher . addURI ( entry . getTableInfo ( ) . getDefaultContentUriInfo ( ) . getAuthority ( ) , entry . getPathAndPatternString ( ) , entry . getPatternCode ( ) ) ; entry . initialize ( ) ; } this . initialized = true ; return this ; } | initialized with the contents that are registered by the add method . This method checks the registration details . |
41,089 | public MatcherPattern findMatcherPattern ( int patternCode ) { MatcherPattern result = null ; for ( MatcherPattern entry : this . matcherPatterns ) { if ( entry . getPatternCode ( ) == patternCode ) { result = entry ; break ; } } return result ; } | This will search the MatcherPattern that are registered based on the return code UriMatcher . |
41,090 | protected void onQueryCompleted ( Cursor result , Uri uri , MatcherPattern target , QueryParameters parameter ) { result . setNotificationUri ( this . getContext ( ) . getContentResolver ( ) , uri ) ; } | This method is called after the onQuery processing has been handled . If you re a need you can override this method . |
41,091 | protected void onInsertCompleted ( Uri result , Uri uri , MatcherPattern target , InsertParameters parameter ) { this . getContext ( ) . getContentResolver ( ) . notifyChange ( result , null ) ; } | This method is called after the onInsert processing has been handled . If you re a need you can override this method . |
41,092 | protected void onDeleteCompleted ( int result , Uri uri , MatcherPattern target , DeleteParameters parameter ) { this . getContext ( ) . getContentResolver ( ) . notifyChange ( uri , null ) ; } | This method is called after the onDelete processing has been handled . If you re a need you can override this method . |
41,093 | protected void onUpdateCompleted ( int result , Uri uri , MatcherPattern target , UpdateParameters parameter ) { this . getContext ( ) . getContentResolver ( ) . notifyChange ( uri , null ) ; } | This method is called after the onUpdate processing has been handled . If you re a need you can override this method . |
41,094 | public static void __gmpz_import ( mpz_t rop , int count , int order , int size , int endian , int nails , Pointer buffer ) { if ( SIZE_T_CLASS == SizeT4 . class ) { SizeT4 . __gmpz_import ( rop , count , order , size , endian , nails , buffer ) ; } else { SizeT8 . __gmpz_import ( rop , count , order , size , endian , nails , buffer ) ; } } | Set rop from an array of word data at op . |
41,095 | public static void __gmpz_export ( Pointer rop , Pointer countp , int order , int size , int endian , int nails , mpz_t op ) { if ( SIZE_T_CLASS == SizeT4 . class ) { SizeT4 . __gmpz_export ( rop , countp , order , size , endian , nails , op ) ; } else { SizeT8 . __gmpz_export ( rop , countp , order , size , endian , nails , op ) ; } } | Fill rop with word data from op . |
41,096 | public static void checkLoaded ( ) { if ( LOAD_ERROR != null ) { throw LOAD_ERROR ; } BigInteger two = BigInteger . valueOf ( 2 ) ; BigInteger three = BigInteger . valueOf ( 3 ) ; BigInteger four = BigInteger . valueOf ( 4 ) ; BigInteger five = BigInteger . valueOf ( 5 ) ; BigInteger answer ; answer = modPowInsecure ( two , three , five ) ; if ( ! three . equals ( answer ) ) { throw new AssertionError ( "libgmp is loaded but modPowInsecure returned the wrong answer" ) ; } answer = modPowSecure ( two , three , five ) ; if ( ! three . equals ( answer ) ) { throw new AssertionError ( "libgmp is loaded but modPowSecure returned the wrong answer" ) ; } int answr = kronecker ( four , five ) ; if ( answr != 1 ) { throw new AssertionError ( "libgmp is loaded but kronecker returned the wrong answer" ) ; } } | Verifies this library is loaded properly . |
41,097 | public static int kronecker ( BigInteger a , BigInteger p ) { return INSTANCE . get ( ) . kroneckerImpl ( a , p ) ; } | Calculate kronecker symbol a|p . Generalization of legendre and jacobi . |
41,098 | public static BigInteger modInverse ( BigInteger val , BigInteger modulus ) { if ( modulus . signum ( ) <= 0 ) { throw new ArithmeticException ( "modulus must be positive" ) ; } return INSTANCE . get ( ) . modInverseImpl ( val , modulus ) ; } | Calculate val^ - 1 % modulus . |
41,099 | public static BigInteger exactDivide ( BigInteger dividend , BigInteger divisor ) { if ( divisor . signum ( ) == 0 ) { throw new ArithmeticException ( "BigInteger divide by zero" ) ; } return INSTANCE . get ( ) . exactDivImpl ( dividend , divisor ) ; } | Divide dividend by divisor . This method only returns correct answers when the division produces no remainder . Correct answers should not be expected when the divison would result in a remainder . |