idx
int64
0
41.2k
question
stringlengths
83
4.15k
target
stringlengths
5
715
34,000
private double loglikelihoodAnomalous ( DBIDs anomalousObjs ) { return anomalousObjs . isEmpty ( ) ? 0 : anomalousObjs . size ( ) * - FastMath . log ( anomalousObjs . size ( ) ) ; }
Loglikelihood anomalous objects . Uniform distribution .
34,001
private double loglikelihoodNormal ( DBIDs objids , SetDBIDs anomalous , CovarianceMatrix builder , Relation < V > relation ) { double [ ] mean = builder . getMeanVector ( ) ; final LUDecomposition lu = new LUDecomposition ( builder . makeSampleMatrix ( ) ) ; double [ ] [ ] covInv = lu . inverse ( ) ; double prob = ( objids . size ( ) - anomalous . size ( ) ) * - FastMath . log ( FastMath . sqrt ( MathUtil . powi ( MathUtil . TWOPI , RelationUtil . dimensionality ( relation ) ) * lu . det ( ) ) ) ; for ( DBIDIter iter = objids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( ! anomalous . contains ( iter ) ) { double [ ] xcent = minusEquals ( relation . get ( iter ) . toArray ( ) , mean ) ; prob -= .5 * transposeTimesTimes ( xcent , covInv , xcent ) ; } } return prob ; }
Computes the loglikelihood of all normal objects . Gaussian model
34,002
private DoubleMinMax exactMinMax ( Relation < O > relation , DistanceQuery < O > distFunc ) { final FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "Exact fitting distance computations" , relation . size ( ) , LOG ) : null ; DoubleMinMax minmax = new DoubleMinMax ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { for ( DBIDIter iditer2 = relation . iterDBIDs ( ) ; iditer2 . valid ( ) ; iditer2 . advance ( ) ) { if ( DBIDUtil . equal ( iditer , iditer2 ) ) { continue ; } double d = distFunc . distance ( iditer , iditer2 ) ; minmax . put ( d ) ; } LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; return minmax ; }
Compute the exact maximum and minimum .
34,003
protected void preInsert ( RdKNNEntry entry ) { KNNHeap knns_o = DBIDUtil . newHeap ( settings . k_max ) ; preInsert ( entry , getRootEntry ( ) , knns_o ) ; }
Performs necessary operations before inserting the specified entry .
34,004
protected void postDelete ( RdKNNEntry entry ) { ModifiableDoubleDBIDList rnns = DBIDUtil . newDistanceDBIDList ( ) ; doReverseKNN ( getRoot ( ) , ( ( RdKNNLeafEntry ) entry ) . getDBID ( ) , rnns ) ; ArrayModifiableDBIDs ids = DBIDUtil . newArray ( rnns ) ; ids . sort ( ) ; List < ? extends KNNList > knnLists = knnQuery . getKNNForBulkDBIDs ( ids , settings . k_max ) ; adjustKNNDistance ( getRootEntry ( ) , ids , knnLists ) ; }
Performs necessary operations after deleting the specified object .
34,005
private void doReverseKNN ( RdKNNNode node , DBID oid , ModifiableDoubleDBIDList result ) { if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { RdKNNLeafEntry entry = ( RdKNNLeafEntry ) node . getEntry ( i ) ; double distance = distanceQuery . distance ( entry . getDBID ( ) , oid ) ; if ( distance <= entry . getKnnDistance ( ) ) { result . add ( distance , entry . getDBID ( ) ) ; } } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { RdKNNDirectoryEntry entry = ( RdKNNDirectoryEntry ) node . getEntry ( i ) ; double minDist = distanceQuery . minDist ( entry , oid ) ; if ( minDist <= entry . getKnnDistance ( ) ) { doReverseKNN ( getNode ( entry ) , oid , result ) ; } } } }
Performs a reverse knn query in the specified subtree .
34,006
private void doBulkReverseKNN ( RdKNNNode node , DBIDs ids , Map < DBID , ModifiableDoubleDBIDList > result ) { if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { RdKNNLeafEntry entry = ( RdKNNLeafEntry ) node . getEntry ( i ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { DBID id = DBIDUtil . deref ( iter ) ; double distance = distanceQuery . distance ( entry . getDBID ( ) , id ) ; if ( distance <= entry . getKnnDistance ( ) ) { result . get ( id ) . add ( distance , entry . getDBID ( ) ) ; } } } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { RdKNNDirectoryEntry entry = ( RdKNNDirectoryEntry ) node . getEntry ( i ) ; ModifiableDBIDs candidates = DBIDUtil . newArray ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { DBID id = DBIDUtil . deref ( iter ) ; double minDist = distanceQuery . minDist ( entry , id ) ; if ( minDist <= entry . getKnnDistance ( ) ) { candidates . add ( id ) ; } if ( ! candidates . isEmpty ( ) ) { doBulkReverseKNN ( getNode ( entry ) , candidates , result ) ; } } } } }
Performs a bulk reverse knn query in the specified subtree .
34,007
private void checkDistanceFunction ( SpatialPrimitiveDistanceFunction < ? super O > distanceFunction ) { if ( ! settings . distanceFunction . equals ( distanceFunction ) ) { throw new IllegalArgumentException ( "Parameter distanceFunction must be an instance of " + this . distanceQuery . getClass ( ) + ", but is " + distanceFunction . getClass ( ) ) ; } }
Throws an IllegalArgumentException if the specified distance function is not an instance of the distance function used by this index .
34,008
public final void insertAll ( DBIDs ids ) { if ( ids . isEmpty ( ) || ( ids . size ( ) == 1 ) ) { return ; } if ( canBulkLoad ( ) ) { List < RdKNNEntry > leafs = new ArrayList < > ( ids . size ( ) ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { leafs . add ( createNewLeafEntry ( DBIDUtil . deref ( iter ) ) ) ; } bulkLoad ( leafs ) ; } else { for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { insert ( iter ) ; } } doExtraIntegrityChecks ( ) ; }
Inserts the specified objects into this index . If a bulk load mode is implemented the objects are inserted in one bulk .
34,009
public int correlationDistance ( PCAFilteredResult pca1 , PCAFilteredResult pca2 , int dimensionality ) { double [ ] [ ] v1t = copy ( pca1 . getEigenvectors ( ) ) ; double [ ] [ ] v1t_strong = pca1 . getStrongEigenvectors ( ) ; int lambda1 = pca1 . getCorrelationDimension ( ) ; double [ ] [ ] v2t = copy ( pca2 . getEigenvectors ( ) ) ; double [ ] [ ] v2t_strong = pca2 . getStrongEigenvectors ( ) ; int lambda2 = pca2 . getCorrelationDimension ( ) ; double [ ] [ ] m1_czech = pca1 . dissimilarityMatrix ( ) ; for ( int i = 0 ; i < v2t_strong . length ; i ++ ) { double [ ] v2_i = v2t_strong [ i ] ; double distsq = squareSum ( v2_i ) - transposeTimesTimes ( v2_i , m1_czech , v2_i ) ; if ( lambda1 < dimensionality && distsq > deltasq ) { adjust ( v1t , v2_i , lambda1 ++ ) ; double [ ] e1_czech_d = new double [ v1t . length ] ; Arrays . fill ( e1_czech_d , 0 , lambda1 , 1 ) ; m1_czech = transposeDiagonalTimes ( v1t , e1_czech_d , v1t ) ; } } double [ ] [ ] m2_czech = pca2 . dissimilarityMatrix ( ) ; for ( int i = 0 ; i < v1t_strong . length ; i ++ ) { double [ ] v1_i = v1t_strong [ i ] ; double distsq = squareSum ( v1_i ) - transposeTimesTimes ( v1_i , m2_czech , v1_i ) ; if ( lambda2 < dimensionality && distsq > deltasq ) { adjust ( v2t , v1_i , lambda2 ++ ) ; double [ ] e2_czech_d = new double [ v1t . length ] ; Arrays . fill ( e2_czech_d , 0 , lambda2 , 1 ) ; m2_czech = transposeDiagonalTimes ( v2t , e2_czech_d , v2t ) ; } } return Math . max ( lambda1 , lambda2 ) ; }
Computes the correlation distance between the two subspaces defined by the specified PCAs .
34,010
public static double logcdf ( double val , double shape1 , double shape2 ) { if ( val == Double . NEGATIVE_INFINITY ) { return Double . NEGATIVE_INFINITY ; } if ( val == Double . POSITIVE_INFINITY ) { return 0. ; } if ( val != val ) { return Double . NaN ; } if ( shape1 == 0. ) { val = FastMath . exp ( - val ) ; } else { double tmp = shape1 * val ; if ( tmp == Double . NEGATIVE_INFINITY ) { return shape2 == 0 ? 0. : Double . NEGATIVE_INFINITY ; } if ( tmp >= 1. ) { return shape2 == 0 ? Double . NEGATIVE_INFINITY : 0. ; } val = FastMath . exp ( FastMath . log1p ( - tmp ) / shape1 ) ; } if ( shape2 == 0. ) { return - val ; } final double tmp = shape2 * val ; return tmp < 1. ? FastMath . log1p ( - tmp ) / shape2 : Double . NEGATIVE_INFINITY ; }
Cumulative density function for location = 0 scale = 1
34,011
private double naiveQuerySparse ( SparseNumberVector obj , WritableDoubleDataStore scores , HashSetModifiableDBIDs cands ) { double len = 0. ; for ( int iter = obj . iter ( ) ; obj . iterValid ( iter ) ; iter = obj . iterAdvance ( iter ) ) { final int dim = obj . iterDim ( iter ) ; final double val = obj . iterDoubleValue ( iter ) ; if ( val == 0. || val != val ) { continue ; } len += val * val ; if ( dim >= index . size ( ) ) { continue ; } ModifiableDoubleDBIDList column = index . get ( dim ) ; for ( DoubleDBIDListIter n = column . iter ( ) ; n . valid ( ) ; n . advance ( ) ) { scores . increment ( n , n . doubleValue ( ) * val ) ; cands . add ( n ) ; } } return FastMath . sqrt ( len ) ; }
Query the most similar objects sparse version .
34,012
private double naiveQueryDense ( NumberVector obj , WritableDoubleDataStore scores , HashSetModifiableDBIDs cands ) { double len = 0. ; for ( int dim = 0 , max = obj . getDimensionality ( ) ; dim < max ; dim ++ ) { final double val = obj . doubleValue ( dim ) ; if ( val == 0. || val != val ) { continue ; } len += val * val ; if ( dim >= index . size ( ) ) { continue ; } ModifiableDoubleDBIDList column = index . get ( dim ) ; for ( DoubleDBIDListIter n = column . iter ( ) ; n . valid ( ) ; n . advance ( ) ) { scores . increment ( n , n . doubleValue ( ) * val ) ; cands . add ( n ) ; } } return FastMath . sqrt ( len ) ; }
Query the most similar objects dense version .
34,013
private double naiveQuery ( V obj , WritableDoubleDataStore scores , HashSetModifiableDBIDs cands ) { if ( obj instanceof SparseNumberVector ) { return naiveQuerySparse ( ( SparseNumberVector ) obj , scores , cands ) ; } else { return naiveQueryDense ( obj , scores , cands ) ; } }
Query the most similar objects abstract version .
34,014
protected BundleStreamSource invokeStreamFilters ( BundleStreamSource stream ) { assert ( stream != null ) ; if ( filters == null ) { return stream ; } MultipleObjectsBundle bundle = null ; for ( ObjectFilter filter : filters ) { if ( filter instanceof StreamFilter ) { stream = ( ( StreamFilter ) filter ) . init ( bundle != null ? bundle . asStream ( ) : stream ) ; bundle = null ; } else { bundle = filter . filter ( stream != null ? stream . asMultipleObjectsBundle ( ) : bundle ) ; stream = null ; } } return stream != null ? stream : bundle . asStream ( ) ; }
Transforms the specified list of objects and their labels into a list of objects and their associations .
34,015
private void inferCallerELKI ( ) { needToInferCaller = false ; StackTraceElement [ ] stack = ( new Throwable ( ) ) . getStackTrace ( ) ; int ix = 0 ; while ( ix < stack . length ) { StackTraceElement frame = stack [ ix ] ; final String cls = frame . getClassName ( ) ; if ( cls . equals ( START_TRACE_AT ) ) { break ; } ix ++ ; } while ( ix < stack . length ) { StackTraceElement frame = stack [ ix ] ; final String cls = frame . getClassName ( ) ; boolean ignore = false ; for ( int i = 0 ; i < IGNORE_CLASSES . length ; i ++ ) { if ( cls . equals ( IGNORE_CLASSES [ i ] ) ) { ignore = true ; break ; } } if ( ! ignore ) { super . setSourceClassName ( frame . getClassName ( ) ) ; super . setSourceMethodName ( frame . getMethodName ( ) ) ; break ; } ix ++ ; } }
Infer a caller ignoring logging - related classes .
34,016
public static SamplingResult getSamplingResult ( final Relation < ? > rel ) { Collection < SamplingResult > selections = ResultUtil . filterResults ( rel . getHierarchy ( ) , rel , SamplingResult . class ) ; if ( selections . isEmpty ( ) ) { final SamplingResult newsam = new SamplingResult ( rel ) ; ResultUtil . addChildResult ( rel , newsam ) ; return newsam ; } return selections . iterator ( ) . next ( ) ; }
Get the sampling result attached to a relation
34,017
public Element render ( SVGPlot svgp ) { Element tag = svgp . svgElement ( SVGConstants . SVG_G_TAG ) ; Element button = svgp . svgRect ( x , y , w , h ) ; if ( ! Double . isNaN ( r ) ) { SVGUtil . setAtt ( button , SVGConstants . SVG_RX_ATTRIBUTE , r ) ; SVGUtil . setAtt ( button , SVGConstants . SVG_RY_ATTRIBUTE , r ) ; } SVGUtil . setAtt ( button , SVGConstants . SVG_STYLE_ATTRIBUTE , butcss . inlineCSS ( ) ) ; tag . appendChild ( button ) ; if ( svgp . getIdElement ( SVGEffects . LIGHT_GRADIENT_ID ) != null ) { Element light = svgp . svgRect ( x , y , w , h ) ; if ( ! Double . isNaN ( r ) ) { SVGUtil . setAtt ( light , SVGConstants . SVG_RX_ATTRIBUTE , r ) ; SVGUtil . setAtt ( light , SVGConstants . SVG_RY_ATTRIBUTE , r ) ; } SVGUtil . setAtt ( light , SVGConstants . SVG_STYLE_ATTRIBUTE , "fill:url(#" + SVGEffects . LIGHT_GRADIENT_ID + ");fill-opacity:.5" ) ; tag . appendChild ( light ) ; } if ( svgp . getIdElement ( SVGEffects . SHADOW_ID ) != null ) { button . setAttribute ( SVGConstants . SVG_FILTER_ATTRIBUTE , "url(#" + SVGEffects . SHADOW_ID + ")" ) ; } if ( title != null ) { Element label = svgp . svgText ( x + w * .5 , y + h * .7 , title ) ; label . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , titlecss . inlineCSS ( ) ) ; tag . appendChild ( label ) ; } return tag ; }
Produce the actual SVG elements for the button .
34,018
public void setTitle ( String title , String textcolor ) { this . title = title ; if ( titlecss == null ) { titlecss = new CSSClass ( this , "text" ) ; titlecss . setStatement ( SVGConstants . CSS_TEXT_ANCHOR_PROPERTY , SVGConstants . CSS_MIDDLE_VALUE ) ; titlecss . setStatement ( SVGConstants . CSS_FILL_PROPERTY , textcolor ) ; titlecss . setStatement ( SVGConstants . CSS_FONT_SIZE_PROPERTY , .6 * h ) ; } }
Set the button title
34,019
private Pair < PlotItem , VisualizationTask > key ( PlotItem item , VisualizationTask task ) { return new Pair < > ( item , task ) ; }
Helper function for building a key object
34,020
private Pair < Element , Visualization > value ( Element elem , Visualization vis ) { return new Pair < > ( elem , vis ) ; }
Helper function to build a value pair
34,021
public void put ( PlotItem it , VisualizationTask task , Element elem , Visualization vis ) { map . put ( key ( it , task ) , value ( elem , vis ) ) ; }
Put a new combination into the map .
34,022
public Pair < Element , Visualization > remove ( PlotItem it , VisualizationTask task ) { return map . remove ( key ( it , task ) ) ; }
Remove a combination .
34,023
public void put ( PlotItem it , VisualizationTask task , Pair < Element , Visualization > pair ) { map . put ( key ( it , task ) , pair ) ; }
Put a new item into the visualizations
34,024
public double coveringRadiusFromEntries ( DBID routingObjectID , AbstractMTree < O , N , E , ? > mTree ) { double coveringRadius = 0. ; for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { E entry = getEntry ( i ) ; final double cover = entry . getParentDistance ( ) + entry . getCoveringRadius ( ) ; coveringRadius = coveringRadius < cover ? cover : coveringRadius ; } return coveringRadius ; }
Determines and returns the covering radius of this node .
34,025
public static double quadraticEuclidean ( double [ ] v1 , double [ ] v2 ) { final double d1 = v1 [ 0 ] - v2 [ 0 ] , d2 = v1 [ 1 ] - v2 [ 1 ] ; return ( d1 * d1 ) + ( d2 * d2 ) ; }
Squared euclidean distance . 2d .
34,026
protected void aggregateSpecial ( T value , int bin ) { final T exist = getSpecial ( bin ) ; special [ bin ] = aggregate ( exist , value ) ; }
Aggregate for a special value .
34,027
protected void removePreviousRelation ( Relation < ? > relation ) { if ( keep ) { return ; } boolean first = true ; for ( It < Index > it = relation . getHierarchy ( ) . iterDescendants ( relation ) . filter ( Index . class ) ; it . valid ( ) ; it . advance ( ) ) { if ( first ) { Logging . getLogger ( getClass ( ) ) . statistics ( "Index statistics when removing initial data relation." ) ; first = false ; } it . get ( ) . logStatistics ( ) ; } ResultUtil . removeRecursive ( relation . getHierarchy ( ) , relation ) ; }
Remove the previous relation .
34,028
protected double [ ] kNNDistances ( ) { int k = getEntry ( 0 ) . getKnnDistances ( ) . length ; double [ ] result = new double [ k ] ; for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { for ( int j = 0 ; j < k ; j ++ ) { MkTabEntry entry = getEntry ( i ) ; result [ j ] = Math . max ( result [ j ] , entry . getKnnDistance ( j + 1 ) ) ; } } return result ; }
Determines and returns the knn distance of this node as the maximum knn distance of all entries .
34,029
public OutlierResult run ( Database database , Relation < O > relation ) { StepProgress stepprog = LOG . isVerbose ( ) ? new StepProgress ( "VOV" , 3 ) : null ; DBIDs ids = relation . getDBIDs ( ) ; int dim = RelationUtil . dimensionality ( relation ) ; LOG . beginStep ( stepprog , 1 , "Materializing nearest-neighbor sets." ) ; KNNQuery < O > knnq = DatabaseUtil . precomputedKNNQuery ( database , relation , getDistanceFunction ( ) , k ) ; LOG . beginStep ( stepprog , 2 , "Computing Volumes." ) ; WritableDoubleDataStore vols = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; computeVolumes ( knnq , dim , ids , vols ) ; LOG . beginStep ( stepprog , 3 , "Computing Variance of Volumes (VOV)." ) ; WritableDoubleDataStore vovs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; DoubleMinMax vovminmax = new DoubleMinMax ( ) ; computeVOVs ( knnq , ids , vols , vovs , vovminmax ) ; LOG . setCompleted ( stepprog ) ; DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Variance of Volume" , "vov-outlier" , vovs , ids ) ; OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta ( vovminmax . getMin ( ) , vovminmax . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY , 0.0 ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Runs the VOV algorithm on the given database .
34,030
private void computeVOVs ( KNNQuery < O > knnq , DBIDs ids , DoubleDataStore vols , WritableDoubleDataStore vovs , DoubleMinMax vovminmax ) { FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Variance of Volume" , ids . size ( ) , LOG ) : null ; boolean warned = false ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { KNNList knns = knnq . getKNNForDBID ( iter , k ) ; DoubleDBIDListIter it = knns . iter ( ) ; double vbar = 0. ; for ( ; it . valid ( ) ; it . advance ( ) ) { vbar += vols . doubleValue ( it ) ; } vbar /= knns . size ( ) ; double vov = 0. ; for ( it . seek ( 0 ) ; it . valid ( ) ; it . advance ( ) ) { double v = vols . doubleValue ( it ) - vbar ; vov += v * v ; } if ( ! ( vov < Double . POSITIVE_INFINITY ) && ! warned ) { LOG . warning ( "Variance of Volumes has hit double precision limits, results are not reliable." ) ; warned = true ; } vov = ( vov < Double . POSITIVE_INFINITY ) ? vov / ( knns . size ( ) - 1 ) : Double . POSITIVE_INFINITY ; vovs . putDouble ( iter , vov ) ; vovminmax . put ( vov ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
Compute variance of volumes .
34,031
private void boundSize ( HashSetModifiableDBIDs set , int items ) { if ( set . size ( ) > items ) { DBIDs sample = DBIDUtil . randomSample ( set , items , rnd ) ; set . clear ( ) ; set . addDBIDs ( sample ) ; } }
Bound the size of a set by random sampling .
34,032
private boolean add ( DBIDRef cur , DBIDRef cand , double distance ) { KNNHeap neighbors = store . get ( cur ) ; if ( neighbors . contains ( cand ) ) { return false ; } double newKDistance = neighbors . insert ( distance , cand ) ; return ( distance <= newKDistance ) ; }
Add cand to cur s heap neighbors with distance
34,033
private int sampleNew ( DBIDs ids , WritableDataStore < HashSetModifiableDBIDs > sampleNewNeighbors , WritableDataStore < HashSetModifiableDBIDs > newNeighborHash , int items ) { int t = 0 ; for ( DBIDIter iditer = ids . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { KNNHeap realNeighbors = store . get ( iditer ) ; HashSetModifiableDBIDs newNeighbors = newNeighborHash . get ( iditer ) ; HashSetModifiableDBIDs realNewNeighbors = sampleNewNeighbors . get ( iditer ) ; realNewNeighbors . clear ( ) ; for ( DoubleDBIDListIter heapiter = realNeighbors . unorderedIterator ( ) ; heapiter . valid ( ) ; heapiter . advance ( ) ) { if ( newNeighbors . contains ( heapiter ) ) { realNewNeighbors . add ( heapiter ) ; t ++ ; } } boundSize ( realNewNeighbors , items ) ; newNeighbors . removeDBIDs ( realNewNeighbors ) ; newNeighborHash . put ( iditer , newNeighbors ) ; } return t ; }
samples newNeighbors for every object
34,034
private void reverse ( WritableDataStore < HashSetModifiableDBIDs > sampleNewHash , WritableDataStore < HashSetModifiableDBIDs > newReverseNeighbors , WritableDataStore < HashSetModifiableDBIDs > oldReverseNeighbors ) { for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { KNNHeap heap = store . get ( iditer ) ; HashSetDBIDs newNeighbors = sampleNewHash . get ( iditer ) ; for ( DoubleDBIDListIter heapiter = heap . unorderedIterator ( ) ; heapiter . valid ( ) ; heapiter . advance ( ) ) { ( newNeighbors . contains ( heapiter ) ? newReverseNeighbors : oldReverseNeighbors ) . get ( heapiter ) . add ( iditer ) ; } } }
calculates new and old neighbors for database
34,035
public static double similarityNumberVector ( NumberVector o1 , NumberVector o2 ) { final int d1 = o1 . getDimensionality ( ) , d2 = o2 . getDimensionality ( ) ; int intersection = 0 , union = 0 ; int d = 0 ; for ( ; d < d1 && d < d2 ; d ++ ) { double v1 = o1 . doubleValue ( d ) , v2 = o2 . doubleValue ( d ) ; if ( v1 != v1 || v2 != v2 ) { continue ; } if ( v1 != 0. || v2 != 0 ) { ++ union ; if ( v1 == v2 ) { ++ intersection ; } } } for ( ; d < d1 ; d ++ ) { if ( o1 . doubleValue ( d ) != 0 ) { ++ union ; } } for ( ; d < d2 ; d ++ ) { if ( o2 . doubleValue ( d ) != 0 ) { ++ union ; } } return intersection / ( double ) union ; }
Compute Jaccard similarity for two number vectors .
34,036
protected final Map < DBID , KNNList > batchNN ( N node , DBIDs ids , int kmax ) { Map < DBID , KNNList > res = new HashMap < > ( ids . size ( ) ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { DBID id = DBIDUtil . deref ( iter ) ; res . put ( id , knnq . getKNNForDBID ( id , kmax ) ) ; } return res ; }
Performs a batch k - nearest neighbor query for a list of query objects .
34,037
void writeResult ( PrintStream out , DBIDs ids , OutlierResult result , ScalingFunction scaling , String label ) { if ( scaling instanceof OutlierScaling ) { ( ( OutlierScaling ) scaling ) . prepare ( result ) ; } out . append ( label ) ; DoubleRelation scores = result . getScores ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double value = scores . doubleValue ( iter ) ; value = scaling != null ? scaling . getScaled ( value ) : value ; out . append ( ' ' ) . append ( Double . toString ( value ) ) ; } out . append ( FormatUtil . NEWLINE ) ; }
Write a single output line .
34,038
private void runForEachK ( String prefix , int mink , int maxk , IntFunction < OutlierResult > runner , BiConsumer < String , OutlierResult > out ) { if ( isDisabled ( prefix ) ) { LOG . verbose ( "Skipping (disabled): " + prefix ) ; return ; } LOG . verbose ( "Running " + prefix ) ; final int digits = ( int ) FastMath . ceil ( FastMath . log10 ( krange . getMax ( ) + 1 ) ) ; final String format = "%s-%0" + digits + "d" ; krange . forEach ( k -> { if ( k >= mink && k <= maxk ) { Duration time = LOG . newDuration ( this . getClass ( ) . getCanonicalName ( ) + "." + prefix + ".k" + k + ".runtime" ) . begin ( ) ; OutlierResult result = runner . apply ( k ) ; LOG . statistics ( time . end ( ) ) ; if ( result != null ) { out . accept ( String . format ( Locale . ROOT , format , prefix , k ) , result ) ; result . getHierarchy ( ) . removeSubtree ( result ) ; } } } ) ; }
Iterate over the k range .
34,039
public double [ ] getCoefficients ( ) { double [ ] result = new double [ b . length ] ; System . arraycopy ( b , 0 , result , 0 , b . length ) ; return result ; }
Returns a copy of the the array of coefficients b0 ... bp .
34,040
public double getValueAt ( int k ) { double result = 0. ; double log_k = FastMath . log ( k ) , acc = 1. ; for ( int p = 0 ; p < b . length ; p ++ ) { result += b [ p ] * acc ; acc *= log_k ; } return result ; }
Returns the function value of the polynomial approximation at the specified k .
34,041
@ SuppressWarnings ( "unchecked" ) private static < V extends FeatureVector < F > , F > ArrayAdapter < F , ? super V > getAdapter ( Factory < V , F > factory ) { if ( factory instanceof NumberVector . Factory ) { return ( ArrayAdapter < F , ? super V > ) NumberVectorAdapter . STATIC ; } return ( ArrayAdapter < F , ? super V > ) FeatureVectorAdapter . STATIC ; }
Choose the best adapter for this .
34,042
protected void expandClusterOrder ( DBID ipt , ClusterOrder order , DistanceQuery < V > dq , FiniteProgress prog ) { UpdatableHeap < OPTICSHeapEntry > heap = new UpdatableHeap < > ( ) ; heap . add ( new OPTICSHeapEntry ( ipt , null , Double . POSITIVE_INFINITY ) ) ; while ( ! heap . isEmpty ( ) ) { final OPTICSHeapEntry current = heap . poll ( ) ; DBID currPt = current . objectID ; order . add ( currPt , current . reachability , current . predecessorID ) ; processed . add ( currPt ) ; double coredist = inverseDensities . doubleValue ( currPt ) ; for ( DBIDIter it = neighs . get ( currPt ) . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( processed . contains ( it ) ) { continue ; } double nrdist = dq . distance ( currPt , it ) ; if ( coredist > nrdist ) { nrdist = coredist ; } if ( reachDist . doubleValue ( it ) == UNDEFINED_DISTANCE ) { reachDist . put ( it , nrdist ) ; } else if ( nrdist < reachDist . doubleValue ( it ) ) { reachDist . put ( it , nrdist ) ; } heap . add ( new OPTICSHeapEntry ( DBIDUtil . deref ( it ) , currPt , nrdist ) ) ; } LOG . incrementProcessed ( prog ) ; } }
OPTICS algorithm for processing a point but with different density estimates
34,043
public synchronized void resizeMatrix ( int newsize ) throws IOException { if ( newsize >= 0xFFFF ) { throw new RuntimeException ( "Matrix size is too big and will overflow the integer datatype." ) ; } if ( ! array . isWritable ( ) ) { throw new IOException ( "Can't resize a read-only array." ) ; } array . resizeFile ( arraysize ( newsize ) ) ; this . matrixsize = newsize ; ByteBuffer header = array . getExtraHeader ( ) ; header . putInt ( this . matrixsize ) ; }
Resize the matrix to cover newsize x newsize .
34,044
private int computeOffset ( int x , int y ) { if ( y > x ) { return computeOffset ( y , x ) ; } return ( ( x * ( x + 1 ) ) >> 1 ) + y ; }
Compute the offset within the file .
34,045
private void validateHeader ( boolean validateRecordSize ) throws IOException { int readmagic = file . readInt ( ) ; if ( readmagic != this . magic ) { file . close ( ) ; throw new IOException ( "Magic in LinearDiskCache does not match: " + readmagic + " instead of " + this . magic ) ; } if ( file . readInt ( ) != this . headersize ) { file . close ( ) ; throw new IOException ( "Header size in LinearDiskCache does not match." ) ; } if ( validateRecordSize ) { if ( file . readInt ( ) != this . recordsize ) { file . close ( ) ; throw new IOException ( "Recordsize in LinearDiskCache does not match." ) ; } } else { this . recordsize = file . readInt ( ) ; } if ( file . getFilePointer ( ) != HEADER_POS_SIZE ) { throw new IOException ( "Incorrect file position when reading header." ) ; } this . numrecs = file . readInt ( ) ; if ( numrecs < 0 || file . length ( ) != indexToFileposition ( numrecs ) ) { throw new IOException ( "File size and number of records do not agree." ) ; } if ( file . getFilePointer ( ) != INTERNAL_HEADER_SIZE ) { throw new IOException ( "Incorrect file position after reading header." ) ; } }
Validates the header and throws an IOException if the header is invalid . If validateRecordSize is set to true the record size must match exactly the stored record size within the files header else the record size is read from the header and used .
34,046
public synchronized void resizeFile ( int newsize ) throws IOException { if ( ! writable ) { throw new IOException ( "File is not writeable!" ) ; } this . numrecs = newsize ; file . seek ( HEADER_POS_SIZE ) ; file . writeInt ( numrecs ) ; file . setLength ( indexToFileposition ( numrecs ) ) ; mapArray ( ) ; }
Resize file to the intended size
34,047
public synchronized ByteBuffer getExtraHeader ( ) throws IOException { final int size = headersize - INTERNAL_HEADER_SIZE ; final MapMode mode = writable ? MapMode . READ_WRITE : MapMode . READ_ONLY ; return file . getChannel ( ) . map ( mode , INTERNAL_HEADER_SIZE , size ) ; }
Read the extra header data .
34,048
public PointerPrototypeHierarchyRepresentationResult run ( Database db , Relation < O > relation ) { DistanceQuery < O > dq = DatabaseUtil . precomputedDistanceQuery ( db , relation , getDistanceFunction ( ) , LOG ) ; final DBIDs ids = relation . getDBIDs ( ) ; final int size = ids . size ( ) ; PointerHierarchyRepresentationBuilder builder = new PointerHierarchyRepresentationBuilder ( ids , dq . getDistanceFunction ( ) . isSquared ( ) ) ; Int2ObjectOpenHashMap < ModifiableDBIDs > clusters = new Int2ObjectOpenHashMap < > ( size ) ; MatrixParadigm mat = new MatrixParadigm ( ids ) ; ArrayModifiableDBIDs prots = DBIDUtil . newArray ( MatrixParadigm . triangleSize ( size ) ) ; initializeMatrices ( mat , prots , dq ) ; DBIDArrayMIter protiter = prots . iter ( ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "MiniMax clustering" , size - 1 , LOG ) : null ; DBIDArrayIter ix = mat . ix ; for ( int i = 1 , end = size ; i < size ; i ++ ) { end = AGNES . shrinkActiveSet ( ix , builder , end , findMerge ( end , mat , protiter , builder , clusters , dq ) ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; return ( PointerPrototypeHierarchyRepresentationResult ) builder . complete ( ) ; }
Run the algorithm on a database .
34,049
protected static < O > void initializeMatrices ( MatrixParadigm mat , ArrayModifiableDBIDs prots , DistanceQuery < O > dq ) { final DBIDArrayIter ix = mat . ix , iy = mat . iy ; final double [ ] distances = mat . matrix ; int pos = 0 ; for ( ix . seek ( 0 ) ; ix . valid ( ) ; ix . advance ( ) ) { for ( iy . seek ( 0 ) ; iy . getOffset ( ) < ix . getOffset ( ) ; iy . advance ( ) ) { distances [ pos ] = dq . distance ( ix , iy ) ; prots . add ( iy ) ; pos ++ ; } } assert ( prots . size ( ) == pos ) ; }
Initializes the inter - cluster distance matrix of possible merges
34,050
protected static int findMerge ( int end , MatrixParadigm mat , DBIDArrayMIter prots , PointerHierarchyRepresentationBuilder builder , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < ? > dq ) { final DBIDArrayIter ix = mat . ix , iy = mat . iy ; final double [ ] distances = mat . matrix ; double mindist = Double . POSITIVE_INFINITY ; int x = - 1 , y = - 1 ; for ( int dx = 0 ; dx < end ; dx ++ ) { if ( builder . isLinked ( ix . seek ( dx ) ) ) { continue ; } final int xoffset = MatrixParadigm . triangleSize ( dx ) ; for ( int dy = 0 ; dy < dx ; dy ++ ) { if ( builder . isLinked ( iy . seek ( dy ) ) ) { continue ; } double dist = distances [ xoffset + dy ] ; if ( dist < mindist ) { mindist = dist ; x = dx ; y = dy ; } } } assert ( y < x ) ; merge ( end , mat , prots , builder , clusters , dq , x , y ) ; return x ; }
Find the best merge .
34,051
protected static void merge ( int size , MatrixParadigm mat , DBIDArrayMIter prots , PointerHierarchyRepresentationBuilder builder , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < ? > dq , int x , int y ) { assert ( y < x ) ; final DBIDArrayIter ix = mat . ix . seek ( x ) , iy = mat . iy . seek ( y ) ; final double [ ] distances = mat . matrix ; int offset = MatrixParadigm . triangleSize ( x ) + y ; if ( LOG . isDebuggingFine ( ) ) { LOG . debugFine ( "Merging: " + DBIDUtil . toString ( ix ) + " -> " + DBIDUtil . toString ( iy ) + " " + distances [ offset ] ) ; } ModifiableDBIDs cx = clusters . get ( x ) , cy = clusters . get ( y ) ; if ( cy == null ) { cy = DBIDUtil . newHashSet ( ) ; cy . add ( iy ) ; } if ( cx == null ) { cy . add ( ix ) ; } else { cy . addDBIDs ( cx ) ; clusters . remove ( x ) ; } clusters . put ( y , cy ) ; builder . add ( ix , distances [ offset ] , iy , prots . seek ( offset ) ) ; updateMatrices ( size , mat , prots , builder , clusters , dq , y ) ; }
Merges two clusters given by x y their points with smallest IDs and y to keep
34,052
protected static < O > void updateMatrices ( int size , MatrixParadigm mat , DBIDArrayMIter prots , PointerHierarchyRepresentationBuilder builder , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < O > dq , int c ) { final DBIDArrayIter ix = mat . ix , iy = mat . iy ; ix . seek ( c ) ; for ( iy . seek ( 0 ) ; iy . getOffset ( ) < c ; iy . advance ( ) ) { if ( builder . isLinked ( iy ) ) { continue ; } updateEntry ( mat , prots , clusters , dq , c , iy . getOffset ( ) ) ; } iy . seek ( c ) ; for ( ix . seek ( c + 1 ) ; ix . valid ( ) ; ix . advance ( ) ) { if ( builder . isLinked ( ix ) ) { continue ; } updateEntry ( mat , prots , clusters , dq , ix . getOffset ( ) , c ) ; } }
Update the entries of the matrices that contain a distance to c the newly merged cluster .
34,053
protected static void updateEntry ( MatrixParadigm mat , DBIDArrayMIter prots , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < ? > dq , int x , int y ) { assert ( y < x ) ; final DBIDArrayIter ix = mat . ix , iy = mat . iy ; final double [ ] distances = mat . matrix ; ModifiableDBIDs cx = clusters . get ( x ) , cy = clusters . get ( y ) ; DBIDVar prototype = DBIDUtil . newVar ( ix . seek ( x ) ) ; double minMaxDist ; if ( cx != null && cy != null ) { minMaxDist = findPrototype ( dq , cx , cy , prototype , Double . POSITIVE_INFINITY ) ; minMaxDist = findPrototype ( dq , cy , cx , prototype , minMaxDist ) ; } else if ( cx != null ) { minMaxDist = findPrototypeSingleton ( dq , cx , iy . seek ( y ) , prototype ) ; } else if ( cy != null ) { minMaxDist = findPrototypeSingleton ( dq , cy , ix . seek ( x ) , prototype ) ; } else { minMaxDist = dq . distance ( ix . seek ( x ) , iy . seek ( y ) ) ; prototype . set ( ix ) ; } final int offset = MatrixParadigm . triangleSize ( x ) + y ; distances [ offset ] = minMaxDist ; prots . seek ( offset ) . setDBID ( prototype ) ; }
Update entry at x y for distance matrix distances
34,054
private static double findMax ( DistanceQuery < ? > dq , DBIDIter i , DBIDs cy , double maxDist , double minMaxDist ) { for ( DBIDIter j = cy . iter ( ) ; j . valid ( ) ; j . advance ( ) ) { double dist = dq . distance ( i , j ) ; if ( dist > maxDist ) { if ( dist >= minMaxDist ) { return dist ; } maxDist = dist ; } } return maxDist ; }
Find the maximum distance of one object to a set .
34,055
public void writeExternal ( ObjectOutput out ) throws IOException { out . writeInt ( DBIDUtil . asInteger ( id ) ) ; out . writeInt ( values . length ) ; for ( double v : values ) { out . writeDouble ( v ) ; } }
Calls the super method and writes the values of this entry to the specified stream .
34,056
public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { id = DBIDUtil . importInteger ( in . read ( ) ) ; values = new double [ in . readInt ( ) ] ; for ( int d = 0 ; d < values . length ; d ++ ) { values [ d ] = in . readDouble ( ) ; } }
Calls the super method and reads the values of this entry from the specified input stream .
34,057
public StringBuilder appendToBuffer ( StringBuilder buf ) { buf . append ( getTask ( ) ) ; buf . append ( ": " ) ; buf . append ( getProcessed ( ) ) ; return buf ; }
Serialize indefinite progress .
34,058
private TypeInformation getInputTypeRestriction ( ) { int m = dims [ 0 ] ; for ( int i = 1 ; i < dims . length ; i ++ ) { m = Math . max ( dims [ i ] , m ) ; } return VectorFieldTypeInformation . typeRequest ( NumberVector . class , m , Integer . MAX_VALUE ) ; }
The input type we use .
34,059
private boolean isLocalMaximum ( double kdist , DBIDs neighbors , WritableDoubleDataStore kdists ) { for ( DBIDIter it = neighbors . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( kdists . doubleValue ( it ) < kdist ) { return false ; } } return true ; }
Test if a point is a local density maximum .
34,060
protected int expandCluster ( final int clusterid , final WritableIntegerDataStore clusterids , final KNNQuery < O > knnq , final DBIDs neighbors , final double maxkdist , final FiniteProgress progress ) { int clustersize = 1 ; final ArrayModifiableDBIDs activeSet = DBIDUtil . newArray ( ) ; activeSet . addDBIDs ( neighbors ) ; DBIDVar id = DBIDUtil . newVar ( ) ; while ( ! activeSet . isEmpty ( ) ) { activeSet . pop ( id ) ; final int oldclus = clusterids . intValue ( id ) ; if ( oldclus == NOISE ) { clustersize += 1 ; clusterids . putInt ( id , - clusterid ) ; } else if ( oldclus == UNPROCESSED ) { clustersize += 1 ; final KNNList newneighbors = knnq . getKNNForDBID ( id , k ) ; if ( newneighbors . getKNNDistance ( ) <= maxkdist ) { activeSet . addDBIDs ( newneighbors ) ; } clusterids . putInt ( id , clusterid ) ; LOG . incrementProcessed ( progress ) ; } } return clustersize ; }
Set - based expand cluster implementation .
34,061
private void fillDensities ( KNNQuery < O > knnq , DBIDs ids , WritableDoubleDataStore dens ) { FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Densities" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final KNNList neighbors = knnq . getKNNForDBID ( iter , k ) ; dens . putDouble ( iter , neighbors . getKNNDistance ( ) ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
Collect all densities into an array for sorting .
34,062
public Clustering < SubspaceModel > run ( Relation < ? extends NumberVector > relation ) { final int dimensionality = RelationUtil . dimensionality ( relation ) ; StepProgress step = new StepProgress ( 2 ) ; step . beginStep ( 1 , "Identification of subspaces that contain clusters" , LOG ) ; ArrayList < List < CLIQUESubspace > > dimensionToDenseSubspaces = new ArrayList < > ( dimensionality ) ; List < CLIQUESubspace > denseSubspaces = findOneDimensionalDenseSubspaces ( relation ) ; dimensionToDenseSubspaces . add ( denseSubspaces ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "1-dimensional dense subspaces: " + denseSubspaces . size ( ) ) ; } if ( LOG . isDebugging ( ) ) { for ( CLIQUESubspace s : denseSubspaces ) { LOG . debug ( s . toString ( ) ) ; } } for ( int k = 2 ; k <= dimensionality && ! denseSubspaces . isEmpty ( ) ; k ++ ) { denseSubspaces = findDenseSubspaces ( relation , denseSubspaces ) ; assert ( dimensionToDenseSubspaces . size ( ) == k - 1 ) ; dimensionToDenseSubspaces . add ( denseSubspaces ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( k + "-dimensional dense subspaces: " + denseSubspaces . size ( ) ) ; } if ( LOG . isDebugging ( ) ) { for ( CLIQUESubspace s : denseSubspaces ) { LOG . debug ( s . toString ( ) ) ; } } } step . beginStep ( 2 , "Identification of clusters" , LOG ) ; Clustering < SubspaceModel > result = new Clustering < > ( "CLIQUE clustering" , "clique-clustering" ) ; for ( int dim = 0 ; dim < dimensionToDenseSubspaces . size ( ) ; dim ++ ) { List < CLIQUESubspace > subspaces = dimensionToDenseSubspaces . get ( dim ) ; List < Pair < Subspace , ModifiableDBIDs > > modelsAndClusters = determineClusters ( subspaces ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( ( dim + 1 ) + "-dimensional clusters: " + modelsAndClusters . size ( ) ) ; } for ( Pair < Subspace , ModifiableDBIDs > modelAndCluster : modelsAndClusters ) { Cluster < SubspaceModel > newCluster = new Cluster < > ( modelAndCluster . second ) ; newCluster . setModel ( new SubspaceModel ( modelAndCluster . first , Centroid . make ( relation , modelAndCluster . second ) . getArrayRef ( ) ) ) ; result . addToplevelCluster ( newCluster ) ; } } return result ; }
Performs the CLIQUE algorithm on the given database .
34,063
private List < Pair < Subspace , ModifiableDBIDs > > determineClusters ( List < CLIQUESubspace > denseSubspaces ) { List < Pair < Subspace , ModifiableDBIDs > > clusters = new ArrayList < > ( ) ; for ( CLIQUESubspace subspace : denseSubspaces ) { List < Pair < Subspace , ModifiableDBIDs > > clustersInSubspace = subspace . determineClusters ( ) ; if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "Subspace " + subspace + " clusters " + clustersInSubspace . size ( ) ) ; } clusters . addAll ( clustersInSubspace ) ; } return clusters ; }
Determines the clusters in the specified dense subspaces .
34,064
private List < CLIQUESubspace > findOneDimensionalDenseSubspaces ( Relation < ? extends NumberVector > database ) { List < CLIQUESubspace > denseSubspaceCandidates = findOneDimensionalDenseSubspaceCandidates ( database ) ; return prune ? pruneDenseSubspaces ( denseSubspaceCandidates ) : denseSubspaceCandidates ; }
Determines the one dimensional dense subspaces and performs a pruning if this option is chosen .
34,065
private void updateMinMax ( NumberVector featureVector , double [ ] minima , double [ ] maxima ) { assert ( minima . length == featureVector . getDimensionality ( ) ) ; for ( int d = 0 ; d < featureVector . getDimensionality ( ) ; d ++ ) { double v = featureVector . doubleValue ( d ) ; if ( v == v ) { maxima [ d ] = MathUtil . max ( v , maxima [ d ] ) ; minima [ d ] = MathUtil . min ( v , minima [ d ] ) ; } } }
Updates the minima and maxima array according to the specified feature vector .
34,066
private List < CLIQUESubspace > findOneDimensionalDenseSubspaceCandidates ( Relation < ? extends NumberVector > database ) { Collection < CLIQUEUnit > units = initOneDimensionalUnits ( database ) ; double total = database . size ( ) ; for ( DBIDIter it = database . iterDBIDs ( ) ; it . valid ( ) ; it . advance ( ) ) { NumberVector featureVector = database . get ( it ) ; for ( CLIQUEUnit unit : units ) { unit . addFeatureVector ( it , featureVector ) ; } } int dimensionality = RelationUtil . dimensionality ( database ) ; Collection < CLIQUEUnit > denseUnits = new ArrayList < > ( ) ; CLIQUESubspace [ ] denseSubspaces = new CLIQUESubspace [ dimensionality ] ; for ( CLIQUEUnit unit : units ) { if ( unit . selectivity ( total ) >= tau ) { denseUnits . add ( unit ) ; int dim = unit . getDimension ( 0 ) ; CLIQUESubspace subspace_d = denseSubspaces [ dim ] ; if ( subspace_d == null ) { denseSubspaces [ dim ] = subspace_d = new CLIQUESubspace ( dim ) ; } subspace_d . addDenseUnit ( unit ) ; } } List < CLIQUESubspace > subspaceCandidates = new ArrayList < > ( dimensionality ) ; for ( CLIQUESubspace s : denseSubspaces ) { if ( s != null ) { subspaceCandidates . add ( s ) ; } } Collections . sort ( subspaceCandidates , CLIQUESubspace . BY_COVERAGE ) ; if ( LOG . isDebugging ( ) ) { LOG . debugFine ( new StringBuilder ( ) . append ( " number of 1-dim dense units: " ) . append ( denseUnits . size ( ) ) . append ( "\n number of 1-dim dense subspace candidates: " ) . append ( subspaceCandidates . size ( ) ) . toString ( ) ) ; } return subspaceCandidates ; }
Determines the one - dimensional dense subspace candidates by making a pass over the database .
34,067
private List < CLIQUESubspace > pruneDenseSubspaces ( List < CLIQUESubspace > denseSubspaces ) { int [ ] [ ] means = computeMeans ( denseSubspaces ) ; double [ ] [ ] diffs = computeDiffs ( denseSubspaces , means [ 0 ] , means [ 1 ] ) ; double [ ] codeLength = new double [ denseSubspaces . size ( ) ] ; double minCL = Double . MAX_VALUE ; int min_i = - 1 ; for ( int i = 0 ; i < denseSubspaces . size ( ) ; i ++ ) { int mi = means [ 0 ] [ i ] , mp = means [ 1 ] [ i ] ; double cl = codeLength [ i ] = log2OrZero ( mi ) + diffs [ 0 ] [ i ] + log2OrZero ( mp ) + diffs [ 1 ] [ i ] ; if ( cl <= minCL ) { minCL = cl ; min_i = i ; } } return denseSubspaces . subList ( 0 , min_i + 1 ) ; }
Performs a MDL - based pruning of the specified dense subspaces as described in the CLIQUE algorithm .
34,068
private int [ ] [ ] computeMeans ( List < CLIQUESubspace > denseSubspaces ) { int n = denseSubspaces . size ( ) - 1 ; int [ ] mi = new int [ n + 1 ] , mp = new int [ n + 1 ] ; double resultMI = 0 , resultMP = 0 ; for ( int i = 0 ; i < denseSubspaces . size ( ) ; i ++ ) { resultMI += denseSubspaces . get ( i ) . getCoverage ( ) ; resultMP += denseSubspaces . get ( n - i ) . getCoverage ( ) ; mi [ i ] = ( int ) FastMath . ceil ( resultMI / ( i + 1 ) ) ; if ( i != n ) { mp [ n - 1 - i ] = ( int ) FastMath . ceil ( resultMP / ( i + 1 ) ) ; } } return new int [ ] [ ] { mi , mp } ; }
The specified sorted list of dense subspaces is divided into the selected set I and the pruned set P . For each set the mean of the cover fractions is computed .
34,069
private double [ ] [ ] computeDiffs ( List < CLIQUESubspace > denseSubspaces , int [ ] mi , int [ ] mp ) { int n = denseSubspaces . size ( ) - 1 ; double [ ] diff_mi = new double [ n + 1 ] , diff_mp = new double [ n + 1 ] ; double resultMI = 0 , resultMP = 0 ; for ( int i = 0 ; i < denseSubspaces . size ( ) ; i ++ ) { double diffMI = Math . abs ( denseSubspaces . get ( i ) . getCoverage ( ) - mi [ i ] ) ; resultMI += log2OrZero ( diffMI ) ; double diffMP = ( i != n ) ? Math . abs ( denseSubspaces . get ( n - i ) . getCoverage ( ) - mp [ n - 1 - i ] ) : 0 ; resultMP += log2OrZero ( diffMP ) ; diff_mi [ i ] = resultMI ; if ( i != n ) { diff_mp [ n - 1 - i ] = resultMP ; } } return new double [ ] [ ] { diff_mi , diff_mp } ; }
The specified sorted list of dense subspaces is divided into the selected set I and the pruned set P . For each set the difference from the specified mean values is computed .
34,070
public void append ( SimpleTypeInformation < ? > meta , Object data ) { this . meta . add ( meta ) ; this . contents . add ( data ) ; }
Append a single representation to the object .
34,071
public boolean contains ( long [ ] bitset ) { for ( int i = 0 ; i < bitset . length ; i ++ ) { final long b = bitset [ i ] ; if ( i >= bits . length && b != 0L ) { return false ; } if ( ( b & bits [ i ] ) != b ) { return false ; } } return true ; }
Returns whether this BitVector contains all bits that are set to true in the specified BitSet .
34,072
public double jaccardSimilarity ( BitVector v2 ) { return BitsUtil . intersectionSize ( bits , v2 . bits ) / ( double ) BitsUtil . unionSize ( bits , v2 . bits ) ; }
Compute the Jaccard similarity of two bit vectors .
34,073
public static int writeShort ( byte [ ] array , int offset , int v ) { array [ offset + 0 ] = ( byte ) ( v >>> 8 ) ; array [ offset + 1 ] = ( byte ) ( v >>> 0 ) ; return SIZE_SHORT ; }
Write a short to the byte array at the given offset .
34,074
public static int writeInt ( byte [ ] array , int offset , int v ) { array [ offset + 0 ] = ( byte ) ( v >>> 24 ) ; array [ offset + 1 ] = ( byte ) ( v >>> 16 ) ; array [ offset + 2 ] = ( byte ) ( v >>> 8 ) ; array [ offset + 3 ] = ( byte ) ( v >>> 0 ) ; return SIZE_INT ; }
Write an integer to the byte array at the given offset .
34,075
public static int writeLong ( byte [ ] array , int offset , long v ) { array [ offset + 0 ] = ( byte ) ( v >>> 56 ) ; array [ offset + 1 ] = ( byte ) ( v >>> 48 ) ; array [ offset + 2 ] = ( byte ) ( v >>> 40 ) ; array [ offset + 3 ] = ( byte ) ( v >>> 32 ) ; array [ offset + 4 ] = ( byte ) ( v >>> 24 ) ; array [ offset + 5 ] = ( byte ) ( v >>> 16 ) ; array [ offset + 6 ] = ( byte ) ( v >>> 8 ) ; array [ offset + 7 ] = ( byte ) ( v >>> 0 ) ; return SIZE_LONG ; }
Write a long to the byte array at the given offset .
34,076
public static int writeFloat ( byte [ ] array , int offset , float v ) { return writeInt ( array , offset , Float . floatToIntBits ( v ) ) ; }
Write a float to the byte array at the given offset .
34,077
public static int writeDouble ( byte [ ] array , int offset , double v ) { return writeLong ( array , offset , Double . doubleToLongBits ( v ) ) ; }
Write a double to the byte array at the given offset .
34,078
public static short readShort ( byte [ ] array , int offset ) { int b0 = array [ offset + 0 ] & 0xFF ; int b1 = array [ offset + 1 ] & 0xFF ; return ( short ) ( ( b0 << 8 ) + ( b1 << 0 ) ) ; }
Read a short from the byte array at the given offset .
34,079
public static int readUnsignedShort ( byte [ ] array , int offset ) { int b0 = array [ offset + 0 ] & 0xFF ; int b1 = array [ offset + 1 ] & 0xFF ; return ( ( b0 << 8 ) + ( b1 << 0 ) ) ; }
Read an unsigned short from the byte array at the given offset .
34,080
public static int readInt ( byte [ ] array , int offset ) { int b0 = array [ offset + 0 ] & 0xFF ; int b1 = array [ offset + 1 ] & 0xFF ; int b2 = array [ offset + 2 ] & 0xFF ; int b3 = array [ offset + 3 ] & 0xFF ; return ( ( b0 << 24 ) + ( b1 << 16 ) + ( b2 << 8 ) + ( b3 << 0 ) ) ; }
Read an integer from the byte array at the given offset .
34,081
public static long readLong ( byte [ ] array , int offset ) { long b0 = array [ offset + 0 ] ; long b1 = array [ offset + 1 ] & 0xFF ; long b2 = array [ offset + 2 ] & 0xFF ; long b3 = array [ offset + 3 ] & 0xFF ; long b4 = array [ offset + 4 ] & 0xFF ; int b5 = array [ offset + 5 ] & 0xFF ; int b6 = array [ offset + 6 ] & 0xFF ; int b7 = array [ offset + 7 ] & 0xFF ; return ( ( b0 << 56 ) + ( b1 << 48 ) + ( b2 << 40 ) + ( b3 << 32 ) + ( b4 << 24 ) + ( b5 << 16 ) + ( b6 << 8 ) + ( b7 << 0 ) ) ; }
Read a long from the byte array at the given offset .
34,082
public static void writeUnsignedVarint ( ByteBuffer buffer , int val ) { while ( ( val & 0x7F ) != val ) { buffer . put ( ( byte ) ( ( val & 0x7F ) | 0x80 ) ) ; val >>>= 7 ; } buffer . put ( ( byte ) ( val & 0x7F ) ) ; }
Write an unsigned integer using a variable - length encoding .
34,083
public static void writeUnsignedVarintLong ( ByteBuffer buffer , long val ) { while ( ( val & 0x7F ) != val ) { buffer . put ( ( byte ) ( ( val & 0x7F ) | 0x80 ) ) ; val >>>= 7 ; } buffer . put ( ( byte ) ( val & 0x7F ) ) ; }
Write an unsigned long using a variable - length encoding .
34,084
public static void writeString ( ByteBuffer buffer , String s ) throws IOException { if ( s == null ) { s = "" ; } ByteArrayUtil . STRING_SERIALIZER . toByteBuffer ( buffer , s ) ; }
Write a string to the buffer .
34,085
public static int readUnsignedVarint ( ByteBuffer buffer ) throws IOException { int val = 0 ; int bits = 0 ; while ( true ) { final int data = buffer . get ( ) ; val |= ( data & 0x7F ) << bits ; if ( ( data & 0x80 ) == 0 ) { return val ; } bits += 7 ; if ( bits > 35 ) { throw new IOException ( "Variable length quantity is too long for expected integer." ) ; } } }
Read an unsigned integer .
34,086
public static void unmapByteBuffer ( final MappedByteBuffer map ) { if ( map == null ) { return ; } map . force ( ) ; try { if ( Runtime . class . getDeclaredMethod ( "version" ) != null ) return ; } catch ( NoSuchMethodException e ) { AccessController . doPrivileged ( new PrivilegedAction < Object > ( ) { public Object run ( ) { try { Method getCleanerMethod = map . getClass ( ) . getMethod ( "cleaner" , new Class [ 0 ] ) ; if ( getCleanerMethod == null ) { return null ; } getCleanerMethod . setAccessible ( true ) ; Object cleaner = getCleanerMethod . invoke ( map , new Object [ 0 ] ) ; Method cleanMethod = cleaner . getClass ( ) . getMethod ( "clean" ) ; if ( cleanMethod == null ) { return null ; } cleanMethod . invoke ( cleaner ) ; } catch ( Exception e ) { LoggingUtil . exception ( e ) ; } return null ; } } ) ; } catch ( SecurityException e1 ) { } }
Unmap a byte buffer .
34,087
private void sortAxes ( ) { for ( int d = 0 ; d < shared . dim ; d ++ ) { double dist = shared . camera . squaredDistanceFromCamera ( shared . layout . getNode ( d ) . getX ( ) , shared . layout . getNode ( d ) . getY ( ) ) ; axes [ d ] . first = - dist ; axes [ d ] . second = d ; } Arrays . sort ( axes ) ; for ( int i = 0 ; i < shared . dim ; i ++ ) { dindex [ axes [ i ] . second ] = i ; } }
Depth - sort the axes .
34,088
private IntIntPair [ ] sortEdges ( int [ ] dindex ) { IntIntPair [ ] edgesort = new IntIntPair [ shared . layout . edges . size ( ) ] ; int e = 0 ; for ( Layout . Edge edge : shared . layout . edges ) { int i1 = dindex [ edge . dim1 ] , i2 = dindex [ edge . dim2 ] ; edgesort [ e ] = new IntIntPair ( Math . min ( i1 , i2 ) , e ) ; e ++ ; } Arrays . sort ( edgesort ) ; return edgesort ; }
Sort the edges for rendering .
34,089
public void finalizeFirstPassE ( ) { double s = 1. / wsum ; for ( int i = 0 ; i < mean . length ; i ++ ) { mean [ i ] *= s ; } }
Finish computation of the mean .
34,090
private double restore ( int d , double val ) { d = ( mean . length == 1 ) ? 0 : d ; return val * mean [ d ] ; }
Restore a single dimension .
34,091
public OutlierResult run ( Relation < ? extends NumberVector > relation ) { final DBIDs ids = relation . getDBIDs ( ) ; WritableDoubleDataStore ranks = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmax = new DoubleMinMax ( ) ; KernelDensityEstimator kernel = new KernelDensityEstimator ( relation , eps ) ; long [ ] subspace = BitsUtil . zero ( kernel . dim ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "OUTRES scores" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iditer = ids . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { BitsUtil . zeroI ( subspace ) ; double score = outresScore ( 0 , subspace , iditer , kernel , ids ) ; ranks . putDouble ( iditer , score ) ; minmax . put ( score ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; OutlierScoreMeta meta = new InvertedOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) , 0. , 1. , 1. ) ; return new OutlierResult ( meta , new MaterializedDoubleRelation ( "OUTRES" , "outres-score" , ranks , ids ) ) ; }
Main loop for OUTRES
34,092
public double outresScore ( final int s , long [ ] subspace , DBIDRef id , KernelDensityEstimator kernel , DBIDs cands ) { double score = 1.0 ; final SubspaceEuclideanDistanceFunction df = new SubspaceEuclideanDistanceFunction ( subspace ) ; MeanVariance meanv = new MeanVariance ( ) ; ModifiableDoubleDBIDList neighcand = DBIDUtil . newDistanceDBIDList ( cands . size ( ) ) ; ModifiableDoubleDBIDList nn = DBIDUtil . newDistanceDBIDList ( cands . size ( ) ) ; for ( int i = s ; i < kernel . dim ; i ++ ) { assert ! BitsUtil . get ( subspace , i ) ; BitsUtil . setI ( subspace , i ) ; df . setSelectedDimensions ( subspace ) ; final double adjustedEps = kernel . adjustedEps ( kernel . dim ) ; DoubleDBIDList neigh = initialRange ( id , cands , df , adjustedEps * 2 , kernel , neighcand ) ; if ( neigh . size ( ) > 2 ) { if ( relevantSubspace ( subspace , neigh , kernel ) ) { final double density = kernel . subspaceDensity ( subspace , neigh ) ; meanv . reset ( ) ; for ( DoubleDBIDListIter neighbor = neigh . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { subsetNeighborhoodQuery ( neighcand , neighbor , df , adjustedEps , kernel , nn ) ; meanv . put ( kernel . subspaceDensity ( subspace , nn ) ) ; } final double deviation = ( meanv . getMean ( ) - density ) / ( 2. * meanv . getSampleStddev ( ) ) ; if ( deviation >= 1 ) { score *= density / deviation ; } score *= outresScore ( i + 1 , subspace , id , kernel , neighcand ) ; } } BitsUtil . clearI ( subspace , i ) ; } return score ; }
Main loop of OUTRES . Run for each object
34,093
private DoubleDBIDList initialRange ( DBIDRef obj , DBIDs cands , PrimitiveDistanceFunction < ? super NumberVector > df , double eps , KernelDensityEstimator kernel , ModifiableDoubleDBIDList n ) { n . clear ( ) ; NumberVector o = kernel . relation . get ( obj ) ; final double twoeps = eps * 2 ; int matches = 0 ; for ( DBIDIter cand = cands . iter ( ) ; cand . valid ( ) ; cand . advance ( ) ) { final double dist = df . distance ( o , kernel . relation . get ( cand ) ) ; if ( dist <= twoeps ) { n . add ( dist , cand ) ; if ( dist <= eps ) { ++ matches ; } } } n . sort ( ) ; return n . slice ( 0 , matches ) ; }
Initial range query .
34,094
private DoubleDBIDList subsetNeighborhoodQuery ( DoubleDBIDList neighc , DBIDRef dbid , PrimitiveDistanceFunction < ? super NumberVector > df , double adjustedEps , KernelDensityEstimator kernel , ModifiableDoubleDBIDList n ) { n . clear ( ) ; NumberVector query = kernel . relation . get ( dbid ) ; for ( DoubleDBIDListIter neighbor = neighc . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { double dist = df . distance ( query , kernel . relation . get ( neighbor ) ) ; if ( dist <= adjustedEps ) { n . add ( dist , neighbor ) ; } } return n ; }
Refine neighbors within a subset .
34,095
protected boolean relevantSubspace ( long [ ] subspace , DoubleDBIDList neigh , KernelDensityEstimator kernel ) { final double crit = K_S_CRITICAL001 / FastMath . sqrt ( neigh . size ( ) - 2 ) ; double [ ] data = new double [ neigh . size ( ) ] ; Relation < ? extends NumberVector > relation = kernel . relation ; for ( int dim = BitsUtil . nextSetBit ( subspace , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( subspace , dim + 1 ) ) { int count = 0 ; for ( DBIDIter neighbor = neigh . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { data [ count ++ ] = relation . get ( neighbor ) . doubleValue ( dim ) ; } assert ( count == neigh . size ( ) ) ; Arrays . sort ( data ) ; final double min = data [ 0 ] , norm = data [ data . length - 1 ] - min ; boolean flag = false ; for ( int j = 1 , end = data . length - 1 ; j < end ; j ++ ) { if ( Math . abs ( j / ( data . length - 2. ) - ( data [ j ] - min ) / norm ) > crit ) { flag = true ; break ; } } if ( ! flag ) { return false ; } } return true ; }
Subspace relevance test .
34,096
public static double of ( double ... data ) { double sum = 0. ; for ( double v : data ) { sum += v ; } return sum / data . length ; }
Static helper function .
34,097
@ Reference ( authors = "P. M. Neely" , title = "Comparison of Several Algorithms for Computation of Means, Standard Deviations and Correlation Coefficients" , booktitle = "Communications of the ACM 9(7), 1966" , url = "https://doi.org/10.1145/365719.365958" , bibkey = "doi:10.1145/365719.365958" ) public static double highPrecision ( double ... data ) { double sum = 0. ; for ( double v : data ) { sum += v ; } sum /= data . length ; double err = 0 ; for ( double v : data ) { err += v - sum ; } return sum + err / data . length ; }
Static helper function with extra precision
34,098
public void insertAll ( List < E > entries ) { if ( ! initialized && ! entries . isEmpty ( ) ) { initialize ( entries . get ( 0 ) ) ; } for ( E entry : entries ) { insert ( entry , false ) ; } }
Bulk insert .
34,099
protected final List < DoubleIntPair > getSortedEntries ( N node , DBID q ) { List < DoubleIntPair > result = new ArrayList < > ( ) ; for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { E entry = node . getEntry ( i ) ; double distance = distance ( entry . getRoutingObjectID ( ) , q ) ; double radius = entry . getCoveringRadius ( ) ; double minDist = ( radius > distance ) ? 0.0 : distance - radius ; result . add ( new DoubleIntPair ( minDist , i ) ) ; } Collections . sort ( result ) ; return result ; }
Sorts the entries of the specified node according to their minimum distance to the specified object .