idx
int64 0
41.2k
| question
stringlengths 83
4.15k
| target
stringlengths 5
715
|
|---|---|---|
34,200
|
public void write ( char [ ] cbuf , int off , int len ) throws IOException { if ( len <= 0 ) { return ; } if ( charsSinceNewline > 0 ) { if ( cbuf [ off ] != CARRIAGE_RETURN ) { super . write ( NEWLINEC , 0 , NEWLINEC . length ) ; charsSinceNewline = 0 ; } else { int nonnl = countNonNewline ( cbuf , off + 1 , len - 1 ) ; if ( nonnl < charsSinceNewline ) { super . write ( CARRIAGE_RETURN ) ; while ( charsSinceNewline > 0 ) { final int n = Math . min ( charsSinceNewline , WHITESPACE . length ( ) ) ; super . write ( WHITESPACE , 0 , n ) ; charsSinceNewline -= n ; } } else { charsSinceNewline = 0 ; } } } charsSinceNewline = tailingNonNewline ( cbuf , off , len ) ; super . write ( cbuf , off , len ) ; flush ( ) ; }
|
Writer that keeps track of when it hasn t seen a newline yet will auto - insert newlines except when lines start with a carriage return .
|
34,201
|
protected DoubleDataStore computeIDs ( DBIDs ids , KNNQuery < O > knnQ ) { WritableDoubleDataStore intDims = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Intrinsic dimensionality" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double id = 0. ; try { id = estimator . estimate ( knnQ , iter , k_c + 1 ) ; } catch ( ArithmeticException e ) { id = 0 ; } intDims . putDouble ( iter , id ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; return intDims ; }
|
Computes all IDs
|
34,202
|
protected DoubleDataStore computeIDOS ( DBIDs ids , KNNQuery < O > knnQ , DoubleDataStore intDims , DoubleMinMax idosminmax ) { WritableDoubleDataStore ldms = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "ID Outlier Scores for objects" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final KNNList neighbors = knnQ . getKNNForDBID ( iter , k_r ) ; double sum = 0. ; int cnt = 0 ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { if ( DBIDUtil . equal ( iter , neighbor ) ) { continue ; } final double id = intDims . doubleValue ( neighbor ) ; sum += id > 0 ? 1.0 / id : 0. ; if ( ++ cnt == k_r ) { break ; } } final double id_q = intDims . doubleValue ( iter ) ; final double idos = id_q > 0 ? id_q * sum / cnt : 0. ; ldms . putDouble ( iter , idos ) ; idosminmax . put ( idos ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; return ldms ; }
|
Computes all IDOS scores .
|
34,203
|
public OutlierResult run ( Database database , Relation < V > relation ) { final int dbsize = relation . size ( ) ; ArrayList < ArrayList < DBIDs > > ranges = buildRanges ( relation ) ; Heap < Individuum > . UnorderedIter individuums = ( new EvolutionarySearch ( relation , ranges , m , rnd . getSingleThreadedRandom ( ) ) ) . run ( ) ; WritableDoubleDataStore outlierScore = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC ) ; for ( ; individuums . valid ( ) ; individuums . advance ( ) ) { DBIDs ids = computeSubspaceForGene ( individuums . get ( ) . getGene ( ) , ranges ) ; double sparsityC = sparsity ( ids . size ( ) , dbsize , k , phi ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double prev = outlierScore . doubleValue ( iter ) ; if ( Double . isNaN ( prev ) || sparsityC < prev ) { outlierScore . putDouble ( iter , sparsityC ) ; } } } DoubleMinMax minmax = new DoubleMinMax ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double val = outlierScore . doubleValue ( iditer ) ; if ( Double . isNaN ( val ) ) { outlierScore . putDouble ( iditer , val = 0. ) ; } minmax . put ( val ) ; } DoubleRelation scoreResult = new MaterializedDoubleRelation ( "AggarwalYuEvolutionary" , "aggarwal-yu-outlier" , outlierScore , relation . getDBIDs ( ) ) ; OutlierScoreMeta meta = new InvertedOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) , Double . NEGATIVE_INFINITY , 0.0 ) ; return new OutlierResult ( meta , scoreResult ) ; }
|
Performs the evolutionary algorithm on the given database .
|
34,204
|
protected double [ ] [ ] buildDistanceMatrix ( ArrayDBIDs ids , DistanceQuery < ? > dq ) { final int size = ids . size ( ) ; double [ ] [ ] dmat = new double [ size ] [ size ] ; final boolean square = ! dq . getDistanceFunction ( ) . isSquared ( ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing distance matrix" , ( size * ( size - 1 ) ) >>> 1 , LOG ) : null ; Duration timer = LOG . isStatistics ( ) ? LOG . newDuration ( this . getClass ( ) . getName ( ) + ".runtime.distancematrix" ) . begin ( ) : null ; DBIDArrayIter ix = ids . iter ( ) , iy = ids . iter ( ) ; for ( ix . seek ( 0 ) ; ix . valid ( ) ; ix . advance ( ) ) { double [ ] dmat_x = dmat [ ix . getOffset ( ) ] ; for ( iy . seek ( ix . getOffset ( ) + 1 ) ; iy . valid ( ) ; iy . advance ( ) ) { final double dist = dq . distance ( ix , iy ) ; dmat [ iy . getOffset ( ) ] [ ix . getOffset ( ) ] = dmat_x [ iy . getOffset ( ) ] = square ? ( dist * dist ) : dist ; } if ( prog != null ) { int row = ix . getOffset ( ) + 1 ; prog . setProcessed ( row * size - ( ( row * ( row + 1 ) ) >>> 1 ) , LOG ) ; } } LOG . ensureCompleted ( prog ) ; if ( timer != null ) { LOG . statistics ( timer . end ( ) ) ; } return dmat ; }
|
Build a distance matrix of squared distances .
|
34,205
|
public Clustering < M > run ( Database database , Relation < V > relation ) { MutableProgress prog = LOG . isVerbose ( ) ? new MutableProgress ( "X-means number of clusters" , k_max , LOG ) : null ; innerKMeans . setK ( k_min ) ; LOG . statistics ( new StringStatistic ( KEY + ".initialization" , initializer . toString ( ) ) ) ; splitInitializer . setInitialMeans ( initializer . chooseInitialMeans ( database , relation , k_min , getDistanceFunction ( ) ) ) ; Clustering < M > clustering = innerKMeans . run ( database , relation ) ; if ( prog != null ) { prog . setProcessed ( k_min , LOG ) ; } ArrayList < Cluster < M > > clusters = new ArrayList < > ( clustering . getAllClusters ( ) ) ; while ( clusters . size ( ) <= k_max ) { ArrayList < Cluster < M > > nextClusters = new ArrayList < > ( ) ; for ( Cluster < M > cluster : clusters ) { List < Cluster < M > > childClusterList = splitCluster ( cluster , database , relation ) ; nextClusters . addAll ( childClusterList ) ; if ( childClusterList . size ( ) > 1 ) { k += childClusterList . size ( ) - 1 ; if ( prog != null ) { if ( k >= k_max ) { prog . setTotal ( k + 1 ) ; } prog . setProcessed ( k , LOG ) ; } } } if ( clusters . size ( ) == nextClusters . size ( ) ) { break ; } splitInitializer . setInitialClusters ( nextClusters ) ; innerKMeans . setK ( nextClusters . size ( ) ) ; clustering = innerKMeans . run ( database , relation ) ; clusters . clear ( ) ; clusters . addAll ( clustering . getAllClusters ( ) ) ; } if ( prog != null ) { prog . setTotal ( k ) ; prog . setProcessed ( k , LOG ) ; } return new Clustering < > ( "X-Means Result" , "X-Means" , clusters ) ; }
|
Run the algorithm on a database and relation .
|
34,206
|
protected List < Cluster < M > > splitCluster ( Cluster < M > parentCluster , Database database , Relation < V > relation ) { ArrayList < Cluster < M > > parentClusterList = new ArrayList < Cluster < M > > ( 1 ) ; parentClusterList . add ( parentCluster ) ; if ( parentCluster . size ( ) <= 1 ) { return parentClusterList ; } Clustering < M > parentClustering = new Clustering < > ( parentCluster . getName ( ) , parentCluster . getName ( ) , parentClusterList ) ; ProxyDatabase proxyDB = new ProxyDatabase ( parentCluster . getIDs ( ) , database ) ; splitInitializer . setInitialMeans ( splitCentroid ( parentCluster , relation ) ) ; innerKMeans . setK ( 2 ) ; Clustering < M > childClustering = innerKMeans . run ( proxyDB ) ; double parentEvaluation = informationCriterion . quality ( parentClustering , getDistanceFunction ( ) , relation ) ; double childrenEvaluation = informationCriterion . quality ( childClustering , getDistanceFunction ( ) , relation ) ; if ( LOG . isDebugging ( ) ) { LOG . debug ( "parentEvaluation: " + parentEvaluation ) ; LOG . debug ( "childrenEvaluation: " + childrenEvaluation ) ; } return informationCriterion . isBetter ( parentEvaluation , childrenEvaluation ) ? parentClusterList : childClustering . getAllClusters ( ) ; }
|
Conditionally splits the clusters based on the information criterion .
|
34,207
|
protected double [ ] [ ] splitCentroid ( Cluster < ? extends MeanModel > parentCluster , Relation < V > relation ) { double [ ] parentCentroid = parentCluster . getModel ( ) . getMean ( ) ; double radius = 0. ; for ( DBIDIter it = parentCluster . getIDs ( ) . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { double d = getDistanceFunction ( ) . distance ( relation . get ( it ) , DoubleVector . wrap ( parentCentroid ) ) ; radius = ( d > radius ) ? d : radius ; } Random random = rnd . getSingleThreadedRandom ( ) ; final int dim = RelationUtil . dimensionality ( relation ) ; double [ ] randomVector = normalize ( MathUtil . randomDoubleArray ( dim , random ) ) ; timesEquals ( randomVector , ( .4 + random . nextDouble ( ) * .5 ) * radius ) ; for ( int d = 0 ; d < dim ; d ++ ) { double a = parentCentroid [ d ] , b = randomVector [ d ] ; parentCentroid [ d ] = a - b ; randomVector [ d ] = a + b ; } return new double [ ] [ ] { parentCentroid , randomVector } ; }
|
Split an existing centroid into two initial centers .
|
34,208
|
private void scan ( HilbertFeatures hf , int k0 ) { final int mink0 = Math . min ( 2 * k0 , capital_n - 1 ) ; if ( LOG . isDebuggingFine ( ) ) { LOG . debugFine ( "Scanning with k0=" + k0 + " (" + mink0 + ")" + " N*=" + capital_n_star ) ; } for ( int i = 0 ; i < hf . pf . length ; i ++ ) { if ( hf . pf [ i ] . ubound < omega_star ) { continue ; } if ( hf . pf [ i ] . lbound < hf . pf [ i ] . ubound ) { double omega = hf . fastUpperBound ( i ) ; if ( omega < omega_star ) { hf . pf [ i ] . ubound = omega ; } else { int maxcount ; if ( hf . top . contains ( hf . pf [ i ] ) ) { maxcount = capital_n - 1 ; } else { maxcount = mink0 ; } innerScan ( hf , i , maxcount ) ; } } if ( hf . pf [ i ] . ubound > 0 ) { hf . updateOUT ( i ) ; } if ( hf . pf [ i ] . lbound > 0 ) { hf . updateWLB ( i ) ; } if ( hf . wlb . size ( ) >= n ) { omega_star = Math . max ( omega_star , hf . wlb . peek ( ) . lbound ) ; } } }
|
Scan function performs a squential scan over the data .
|
34,209
|
private void trueOutliers ( HilbertFeatures h ) { n_star = 0 ; for ( ObjectHeap . UnsortedIter < HilFeature > iter = h . out . unsortedIter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { HilFeature entry = iter . get ( ) ; if ( entry . ubound >= omega_star && ( entry . ubound - entry . lbound < 1E-10 ) ) { n_star ++ ; } } }
|
trueOutliers function updates n_star
|
34,210
|
public static void load ( Class < ? > restrictionClass ) { if ( MASTER_CACHE == null ) { initialize ( ) ; } if ( MASTER_CACHE . isEmpty ( ) ) { return ; } Iterator < Class < ? > > iter = MASTER_CACHE . iterator ( ) ; while ( iter . hasNext ( ) ) { Class < ? > clazz = iter . next ( ) ; if ( ! restrictionClass . isAssignableFrom ( clazz ) ) { continue ; } if ( Modifier . isInterface ( clazz . getModifiers ( ) ) || Modifier . isAbstract ( clazz . getModifiers ( ) ) || Modifier . isPrivate ( clazz . getModifiers ( ) ) ) { continue ; } boolean instantiable = false ; try { instantiable = clazz . getConstructor ( ) != null ; } catch ( Exception | Error e ) { } try { instantiable = instantiable || ClassGenericsUtil . getParameterizer ( clazz ) != null ; } catch ( Exception | Error e ) { } if ( ! instantiable ) { continue ; } ELKIServiceRegistry . register ( restrictionClass , clazz ) ; } }
|
Load classes via linear scanning .
|
34,211
|
private static int comparePackageClass ( Class < ? > o1 , Class < ? > o2 ) { return o1 . getPackage ( ) == o2 . getPackage ( ) ? o1 . getCanonicalName ( ) . compareTo ( o2 . getCanonicalName ( ) ) : o1 . getPackage ( ) == null ? - 1 : o2 . getPackage ( ) == null ? + 1 : o1 . getPackage ( ) . getName ( ) . compareTo ( o2 . getPackage ( ) . getName ( ) ) ; }
|
Compare two classes by package name first .
|
34,212
|
private static int classPriority ( Class < ? > o1 ) { Priority p = o1 . getAnnotation ( Priority . class ) ; if ( p == null ) { Class < ? > pa = o1 . getDeclaringClass ( ) ; p = ( pa != null ) ? pa . getAnnotation ( Priority . class ) : null ; } return p != null ? p . value ( ) : Priority . DEFAULT ; }
|
Get the priority of a class or its outer class .
|
34,213
|
public int nextIndex ( int weight ) { if ( used == parent . length ) { int nsize = used + ( used >> 1 ) ; this . weight = Arrays . copyOf ( this . weight , nsize ) ; this . parent = Arrays . copyOf ( this . parent , nsize ) ; } this . weight [ used ] = weight ; this . parent [ used ] = used ; return used ++ ; }
|
Occupy the next unused index .
|
34,214
|
public int find ( int cur ) { assert ( cur >= 0 && cur < parent . length ) ; int p = parent [ cur ] , tmp ; while ( cur != p ) { tmp = p ; p = parent [ cur ] = parent [ p ] ; cur = tmp ; } return cur ; }
|
Find the parent of an object .
|
34,215
|
public int union ( int first , int second ) { int firstComponent = find ( first ) , secondComponent = find ( second ) ; if ( firstComponent == secondComponent ) { return firstComponent ; } final int w1 = weight [ firstComponent ] , w2 = weight [ secondComponent ] ; if ( w1 > w2 ) { parent [ secondComponent ] = firstComponent ; weight [ firstComponent ] += w2 ; return firstComponent ; } else { parent [ firstComponent ] = secondComponent ; weight [ secondComponent ] += w1 ; return secondComponent ; } }
|
Join the components of elements p and q .
|
34,216
|
public IntList getRoots ( ) { IntList roots = new IntArrayList ( ) ; for ( int i = 0 ; i < used ; i ++ ) { if ( parent [ i ] == i ) { roots . add ( i ) ; } } return roots ; }
|
Collect all component root elements .
|
34,217
|
public int growSuperNode ( ) { if ( getNumEntries ( ) < getCapacity ( ) ) { throw new IllegalStateException ( "This node is not yet overflowing (only " + getNumEntries ( ) + " of " + getCapacity ( ) + " entries)" ) ; } Entry [ ] old_nodes = super . entries . clone ( ) ; assert old_nodes [ old_nodes . length - 1 ] != null ; super . entries = ( Entry [ ] ) java . util . Arrays . copyOfRange ( old_nodes , 0 , getCapacity ( ) * 2 - 1 , entries . getClass ( ) ) ; assert super . entries . length == old_nodes . length * 2 - 1 ; return getCapacity ( ) ; }
|
Grows the supernode by duplicating its capacity .
|
34,218
|
public < T extends AbstractXTree < N > > void readSuperNode ( ObjectInput in , T tree ) throws IOException , ClassNotFoundException { readExternal ( in ) ; if ( capacity_to_be_filled <= 0 || ! isSuperNode ( ) ) { throw new IllegalStateException ( "This node does not appear to be a supernode" ) ; } if ( isLeaf ) { throw new IllegalStateException ( "A supernode is cannot be a leaf" ) ; } entries = new Entry [ capacity_to_be_filled ] ; capacity_to_be_filled = 0 ; for ( int i = 0 ; i < numEntries ; i ++ ) { SpatialEntry s = new SpatialDirectoryEntry ( ) ; s . readExternal ( in ) ; entries [ i ] = s ; } N n = tree . getSupernodes ( ) . put ( ( long ) getPageID ( ) , ( N ) this ) ; if ( n != null ) { Logging . getLogger ( this . getClass ( ) ) . fine ( "Warning: this supernode should only be read once. Now a node of size " + entries . length + " has replaced a node of size " + n . entries . length + " for id " + getPageID ( ) ) ; } }
|
Reads the id of this supernode the numEntries and the entries array from the specified stream .
|
34,219
|
public static int compare ( DBIDRef id1 , DBIDRef id2 ) { return DBIDFactory . FACTORY . compare ( id1 , id2 ) ; }
|
Compare two DBIDs .
|
34,220
|
public static boolean equal ( DBIDRef id1 , DBIDRef id2 ) { return DBIDFactory . FACTORY . equal ( id1 , id2 ) ; }
|
Test two DBIDs for equality .
|
34,221
|
public static DBID deref ( DBIDRef ref ) { return ref instanceof DBID ? ( DBID ) ref : importInteger ( ref . internalGetIndex ( ) ) ; }
|
Dereference a DBID reference .
|
34,222
|
public static ModifiableDBIDs union ( DBIDs ids1 , DBIDs ids2 ) { ModifiableDBIDs result = DBIDUtil . newHashSet ( Math . max ( ids1 . size ( ) , ids2 . size ( ) ) ) ; result . addDBIDs ( ids1 ) ; result . addDBIDs ( ids2 ) ; return result ; }
|
Returns the union of the two specified collection of IDs .
|
34,223
|
public static ModifiableDBIDs difference ( DBIDs ids1 , DBIDs ids2 ) { ModifiableDBIDs result = DBIDUtil . newHashSet ( ids1 ) ; result . removeDBIDs ( ids2 ) ; return result ; }
|
Returns the difference of the two specified collection of IDs .
|
34,224
|
public static ArrayDBIDs ensureArray ( DBIDs ids ) { return ids instanceof ArrayDBIDs ? ( ArrayDBIDs ) ids : newArray ( ids ) ; }
|
Ensure that the given DBIDs are array - indexable .
|
34,225
|
public static SetDBIDs ensureSet ( DBIDs ids ) { return ids instanceof SetDBIDs ? ( SetDBIDs ) ids : newHashSet ( ids ) ; }
|
Ensure that the given DBIDs support fast contains operations .
|
34,226
|
public static ModifiableDBIDs ensureModifiable ( DBIDs ids ) { return ids instanceof ModifiableDBIDs ? ( ModifiableDBIDs ) ids : ids instanceof HashSetDBIDs ? newHashSet ( ids ) : newArray ( ids ) ; }
|
Ensure modifiable .
|
34,227
|
public static DBIDPair newPair ( DBIDRef id1 , DBIDRef id2 ) { return DBIDFactory . FACTORY . newPair ( id1 , id2 ) ; }
|
Make a DBID pair .
|
34,228
|
public static DoubleDBIDPair newPair ( double val , DBIDRef id ) { return DBIDFactory . FACTORY . newPair ( val , id ) ; }
|
Make a DoubleDBIDPair .
|
34,229
|
public static void sort ( int [ ] data , Comparator < ? super DBIDRef > comp ) { sort ( data , 0 , data . length , comp ) ; }
|
Sort the full array using the given comparator .
|
34,230
|
private static int compare ( IntegerDBIDVar i1 , int p1 , IntegerDBIDVar i2 , int p2 , Comparator < ? super DBIDRef > comp ) { i1 . internalSetIndex ( p1 ) ; i2 . internalSetIndex ( p2 ) ; return comp . compare ( i1 , i2 ) ; }
|
Compare two elements .
|
34,231
|
protected int computeHeight ( ) { N node = getRoot ( ) ; int tHeight = 1 ; while ( ! node . isLeaf ( ) && node . getNumEntries ( ) != 0 ) { SpatialEntry entry = node . getEntry ( 0 ) ; node = getNode ( entry ) ; tHeight ++ ; } return tHeight ; }
|
Computes the height of this XTree . Is called by the constructor . and should be overwritten by subclasses if necessary .
|
34,232
|
public long commit ( ) throws IOException { final PageFile < N > file = super . getFile ( ) ; if ( ! ( file instanceof PersistentPageFile ) ) { throw new IllegalStateException ( "Trying to commit a non-persistent XTree" ) ; } long npid = file . getNextPageID ( ) ; XTreeHeader ph = ( XTreeHeader ) ( ( PersistentPageFile < ? > ) file ) . getHeader ( ) ; long offset = ( ph . getReservedPages ( ) + npid ) * ph . getPageSize ( ) ; ph . setSupernode_offset ( npid * ph . getPageSize ( ) ) ; ph . setNumberOfElements ( num_elements ) ; RandomAccessFile ra_file = ( ( PersistentPageFile < ? > ) file ) . getFile ( ) ; ph . writeHeader ( ra_file ) ; ra_file . seek ( offset ) ; long nBytes = 0 ; for ( Iterator < N > iterator = supernodes . values ( ) . iterator ( ) ; iterator . hasNext ( ) ; ) { N supernode = iterator . next ( ) ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; ObjectOutputStream oos = new ObjectOutputStream ( baos ) ; supernode . writeSuperNode ( oos ) ; oos . close ( ) ; baos . close ( ) ; byte [ ] array = baos . toByteArray ( ) ; byte [ ] sn_array = new byte [ getPageSize ( ) * ( int ) Math . ceil ( ( double ) supernode . getCapacity ( ) / dirCapacity ) ] ; if ( array . length > sn_array . length ) { throw new IllegalStateException ( "Supernode is too large for fitting in " + ( ( int ) Math . ceil ( ( double ) supernode . getCapacity ( ) / dirCapacity ) ) + " pages of total size " + sn_array . length ) ; } System . arraycopy ( array , 0 , sn_array , 0 , array . length ) ; ra_file . write ( sn_array ) ; nBytes += sn_array . length ; } return nBytes ; }
|
Writes all supernodes to the end of the file . This is only supposed to be used for a final saving of an XTree . If another page is added to this tree the supernodes written to file by this operation are over - written .
|
34,233
|
public void setExpanded ( SpatialEntry entry1 , SpatialEntry entry2 ) { IntSet exp1 = expanded . get ( getPageID ( entry1 ) ) ; if ( exp1 == null ) { exp1 = new IntOpenHashSet ( ) ; expanded . put ( getPageID ( entry1 ) , exp1 ) ; } exp1 . add ( getPageID ( entry2 ) ) ; }
|
Marks the nodes with the specified ids as expanded .
|
34,234
|
public IntSet getExpanded ( SpatialEntry entry ) { IntSet exp = expanded . get ( getPageID ( entry ) ) ; return ( exp != null ) ? exp : IntSets . EMPTY_SET ; }
|
Returns the nodes which are already expanded with the specified node .
|
34,235
|
public void increment ( double coord , double val ) { int bin = getBinNr ( coord ) ; if ( bin < 0 ) { if ( size - bin > data . length ) { double [ ] tmpdata = new double [ growSize ( data . length , size - bin ) ] ; System . arraycopy ( data , 0 , tmpdata , - bin , size ) ; data = tmpdata ; } else { System . arraycopy ( data , 0 , data , - bin , size ) ; Arrays . fill ( data , 0 , - bin , ( double ) 0 ) ; } data [ 0 ] = val ; assert ( data . length >= size - bin ) ; offset -= bin ; size -= bin ; } else if ( bin >= data . length ) { double [ ] tmpdata = new double [ growSize ( data . length , bin + 1 ) ] ; System . arraycopy ( data , 0 , tmpdata , 0 , size ) ; tmpdata [ bin ] = val ; data = tmpdata ; size = bin + 1 ; max = Double . MAX_VALUE ; } else { if ( bin >= size ) { size = bin + 1 ; } data [ bin ] += val ; } }
|
Increment the value of a bin .
|
34,236
|
public double get ( double coord ) { int bin = getBinNr ( coord ) ; return ( bin < 0 || bin >= size ) ? 0 : data [ bin ] ; }
|
Get the value at a particular position .
|
34,237
|
public Assignment update ( Border border ) { Arrays . sort ( cs ) ; int j = 1 ; boolean found = ( cs [ 0 ] . core == border . core ) ; for ( int i = 1 ; i < cs . length ; i ++ ) { if ( cs [ i ] . core != cs [ i - 1 ] . core ) { cs [ j ++ ] = cs [ i ] ; } found |= ( cs [ i ] . core == border . core ) ; } if ( found ) { if ( j == 1 ) { Border r = cs [ 0 ] ; cs = null ; return r ; } if ( j < cs . length ) { cs = Arrays . copyOf ( cs , j ) ; } return this ; } if ( j + 1 != cs . length ) { cs = Arrays . copyOf ( cs , j + 1 ) ; } cs [ j ] = border ; return this ; }
|
Add a new border to the existing borders .
|
34,238
|
public Core getCore ( ) { Core a = cs [ 0 ] . core ; for ( int i = 1 ; i < cs . length ; i ++ ) { Core v = cs [ i ] . core ; a = a . num > v . num ? a : v ; } return a ; }
|
Get the core this is assigned to .
|
34,239
|
protected int currentCluster ( List < ? extends ModifiableDBIDs > clusters , DBIDRef id ) { for ( int i = 0 ; i < k ; i ++ ) { if ( clusters . get ( i ) . contains ( id ) ) { return i ; } } return - 1 ; }
|
Find the current cluster assignment .
|
34,240
|
protected void computeINFLO ( Relation < O > relation , ModifiableDBIDs pruned , KNNQuery < O > knnq , WritableDataStore < ModifiableDBIDs > rNNminuskNNs , WritableDoubleDataStore inflos , DoubleMinMax inflominmax ) { FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing INFLOs" , relation . size ( ) , LOG ) : null ; HashSetModifiableDBIDs set = DBIDUtil . newHashSet ( ) ; for ( DBIDIter iter = relation . iterDBIDs ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( pruned . contains ( iter ) ) { inflos . putDouble ( iter , 1. ) ; inflominmax . put ( 1. ) ; LOG . incrementProcessed ( prog ) ; continue ; } final KNNList knn = knnq . getKNNForDBID ( iter , kplus1 ) ; if ( knn . getKNNDistance ( ) == 0. ) { inflos . putDouble ( iter , 1. ) ; inflominmax . put ( 1. ) ; LOG . incrementProcessed ( prog ) ; continue ; } set . clear ( ) ; set . addDBIDs ( knn ) ; set . addDBIDs ( rNNminuskNNs . get ( iter ) ) ; double sum = 0. ; int c = 0 ; for ( DBIDIter niter = set . iter ( ) ; niter . valid ( ) ; niter . advance ( ) ) { if ( DBIDUtil . equal ( iter , niter ) ) { continue ; } final double kdist = knnq . getKNNForDBID ( niter , kplus1 ) . getKNNDistance ( ) ; if ( kdist <= 0 ) { sum = Double . POSITIVE_INFINITY ; c ++ ; break ; } sum += 1. / kdist ; c ++ ; } sum *= knn . getKNNDistance ( ) ; final double inflo = sum == 0 ? 1. : sum / c ; inflos . putDouble ( iter , inflo ) ; inflominmax . put ( inflo ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
|
Compute the final INFLO scores .
|
34,241
|
public OutlierResult run ( Database database , Relation < O > relation ) { DistanceQuery < O > dq = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; KNNQuery < O > knnq = database . getKNNQuery ( dq , k ) ; DBIDs ids = relation . getDBIDs ( ) ; WritableDoubleDataStore scores = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_DB , 0. ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { KNNList neighbors = knnq . getKNNForDBID ( iter , k ) ; for ( DBIDIter nei = neighbors . iter ( ) ; nei . valid ( ) ; nei . advance ( ) ) { if ( DBIDUtil . equal ( iter , nei ) ) { continue ; } scores . put ( nei , scores . doubleValue ( nei ) + 1 ) ; } } double min = Double . POSITIVE_INFINITY , max = 0.0 ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { min = Math . min ( min , scores . doubleValue ( iter ) ) ; max = Math . max ( max , scores . doubleValue ( iter ) ) ; } OutlierScoreMeta meta = new InvertedOutlierScoreMeta ( min , max , 0. , ids . size ( ) - 1 , k ) ; DoubleRelation rel = new MaterializedDoubleRelation ( "ODIN In-Degree" , "odin" , scores , ids ) ; return new OutlierResult ( meta , rel ) ; }
|
Run the ODIN algorithm
|
34,242
|
public static Icon getStockIcon ( String name ) { SoftReference < Icon > ref = iconcache . get ( name ) ; if ( ref != null ) { Icon icon = ref . get ( ) ; if ( icon != null ) { return icon ; } } java . net . URL imgURL = StockIcon . class . getResource ( name + ".png" ) ; if ( imgURL != null ) { Icon icon = new ImageIcon ( imgURL ) ; iconcache . put ( name , new SoftReference < > ( icon ) ) ; return icon ; } LoggingUtil . warning ( "Could not find stock icon: " + name ) ; return null ; }
|
Get a particular stock icon .
|
34,243
|
public void initializeFromFile ( TreeIndexHeader header , PageFile < FlatRStarTreeNode > file ) { super . initializeFromFile ( header , file ) ; int nextPageID = file . getNextPageID ( ) ; dirCapacity = nextPageID ; root = createNewDirectoryNode ( ) ; for ( int i = 1 ; i < nextPageID ; i ++ ) { FlatRStarTreeNode node = getNode ( i ) ; root . addDirectoryEntry ( createNewDirectoryEntry ( node ) ) ; } if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "root: " + root + " with " + nextPageID + " leafNodes." ) ; } }
|
Initializes the flat RTree from an existing persistent file .
|
34,244
|
protected Node bulkConstruct ( DBIDRef cur , int maxScale , double parentDist , ModifiableDoubleDBIDList elems ) { assert ( ! elems . contains ( cur ) ) ; final double max = maxDistance ( elems ) ; final int scale = Math . min ( distToScale ( max ) - 1 , maxScale ) ; final int nextScale = scale - 1 ; if ( max <= 0 || scale <= scaleBottom || elems . size ( ) < truncate ) { return new Node ( cur , max , parentDist , elems ) ; } ModifiableDoubleDBIDList candidates = DBIDUtil . newDistanceDBIDList ( ) ; excludeNotCovered ( elems , scaleToDist ( scale ) , candidates ) ; if ( candidates . size ( ) == 0 ) { LOG . warning ( "Scale not chosen appropriately? " + max + " " + scaleToDist ( scale ) ) ; return bulkConstruct ( cur , nextScale , parentDist , elems ) ; } Node node = new Node ( cur , max , parentDist ) ; final boolean curSingleton = elems . size ( ) == 0 ; if ( ! curSingleton ) { node . children . add ( bulkConstruct ( cur , nextScale , 0 , elems ) ) ; } final double fmax = scaleToDist ( nextScale ) ; for ( DoubleDBIDListIter it = candidates . iter ( ) ; it . valid ( ) ; ) { assert ( it . getOffset ( ) == 0 ) ; DBID t = DBIDUtil . deref ( it ) ; elems . clear ( ) ; collectByCover ( it , candidates , fmax , elems ) ; assert ( DBIDUtil . equal ( t , it ) ) : "First element in candidates must not change!" ; if ( elems . size ( ) == 0 ) { node . singletons . add ( it . doubleValue ( ) , it ) ; } else { node . children . add ( bulkConstruct ( it , nextScale , it . doubleValue ( ) , elems ) ) ; } candidates . removeSwap ( 0 ) ; } assert ( candidates . size ( ) == 0 ) ; if ( curSingleton ) { if ( node . isLeaf ( ) ) { node . children = null ; } else { node . singletons . add ( parentDist , cur ) ; } } return node ; }
|
Bulk - load the cover tree .
|
34,245
|
public double getWeight ( double distance , double max , double stddev ) { if ( max <= 0 ) { return 1.0 ; } double relativedistance = distance / max ; return 1.0 - 0.9 * relativedistance * relativedistance ; }
|
Evaluate quadratic weight . stddev is ignored .
|
34,246
|
public double maxDist ( SpatialComparable mbr1 , SpatialComparable mbr2 ) { final int dim1 = mbr1 . getDimensionality ( ) , dim2 = mbr2 . getDimensionality ( ) ; final int mindim = dim1 < dim2 ? dim1 : dim2 ; double agg = 0. ; for ( int d = 0 ; d < mindim ; d ++ ) { double d1 = mbr1 . getMax ( d ) - mbr2 . getMin ( d ) ; double d2 = mbr2 . getMax ( d ) - mbr1 . getMin ( d ) ; double delta = d1 > d2 ? d1 : d2 ; agg += delta * delta ; } for ( int d = mindim ; d < dim1 ; d ++ ) { double d1 = Math . abs ( mbr1 . getMin ( d ) ) , d2 = Math . abs ( mbr1 . getMax ( d ) ) ; double delta = d1 > d2 ? d1 : d2 ; agg += delta * delta ; } for ( int d = mindim ; d < dim2 ; d ++ ) { double d1 = Math . abs ( mbr2 . getMin ( d ) ) , d2 = Math . abs ( mbr2 . getMax ( d ) ) ; double delta = d1 > d2 ? d1 : d2 ; agg += delta * delta ; } return FastMath . sqrt ( agg ) ; }
|
Maximum distance of two objects .
|
34,247
|
public synchronized void updateFromTrackParameters ( TrackParameters track ) { parameters . clear ( ) ; for ( TrackedParameter p : track . getAllParameters ( ) ) { Parameter < ? > option = p . getParameter ( ) ; String value = null ; if ( option . isDefined ( ) ) { if ( option . tookDefaultValue ( ) ) { value = DynamicParameters . STRING_USE_DEFAULT + option . getDefaultValueAsString ( ) ; } else { value = option . getValueAsString ( ) ; } } if ( value == null ) { value = ( option instanceof Flag ) ? Flag . NOT_SET : "" ; } int bits = 0 ; if ( option . isOptional ( ) ) { bits |= BIT_OPTIONAL ; } if ( option . hasDefaultValue ( ) && option . tookDefaultValue ( ) ) { bits |= BIT_DEFAULT_VALUE ; } if ( value . length ( ) <= 0 ) { if ( ( bits & BIT_DEFAULT_VALUE ) == 0 && ( bits & BIT_OPTIONAL ) == 0 ) { bits |= BIT_INCOMPLETE ; } } else { try { if ( ! option . tookDefaultValue ( ) && ! option . isValid ( value ) ) { bits |= BIT_INVALID ; } } catch ( ParameterException e ) { bits |= BIT_INVALID ; } } int depth = 0 ; { Object pos = track . getParent ( option ) ; while ( pos != null ) { pos = track . getParent ( pos ) ; depth += 1 ; if ( depth > 10 ) { break ; } } } parameters . add ( new Node ( option , value , bits , depth ) ) ; } }
|
Update the Parameter list from the collected options of an ELKI context
|
34,248
|
public synchronized void addParameter ( Parameter < ? > option , String value , int bits , int depth ) { parameters . add ( new Node ( option , value , bits , depth ) ) ; }
|
Add a single parameter to the list
|
34,249
|
public static List < Clustering < ? extends Model > > getClusteringResults ( Result r ) { if ( r instanceof Clustering < ? > ) { List < Clustering < ? > > crs = new ArrayList < > ( 1 ) ; crs . add ( ( Clustering < ? > ) r ) ; return crs ; } if ( r instanceof HierarchicalResult ) { return ResultUtil . filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , Clustering . class ) ; } return Collections . emptyList ( ) ; }
|
Collect all clustering results from a Result
|
34,250
|
private static double [ ] randomLatitudeLongitude ( Random r ) { double lat = Math . pow ( 1. - r . nextDouble ( ) * 2. , 2 ) / 2. * 180 ; double lng = ( .5 - r . nextDouble ( ) ) * 360. ; return new double [ ] { lat , lng } ; }
|
Generate random coordinates .
|
34,251
|
public double distance ( NumberVector v1 , NumberVector v2 ) { return 1 - Math . abs ( PearsonCorrelation . coefficient ( v1 , v2 ) ) ; }
|
Computes the absolute Pearson correlation distance for two given feature vectors .
|
34,252
|
private long inverse ( double current ) { short [ ] digits = new short [ maxi ] ; for ( int j = 0 ; j < maxi ; j ++ ) { current *= base ; digits [ j ] = ( short ) current ; current -= digits [ j ] ; if ( current <= 1e-10 ) { break ; } } long inv = 0 ; for ( int j = maxi - 1 ; j >= 0 ; j -- ) { inv = inv * base + digits [ j ] ; } return inv ; }
|
Compute the inverse with respect to the given base .
|
34,253
|
private double radicalInverse ( long i ) { double digit = 1.0 / ( double ) base ; double radical = digit ; double inverse = 0.0 ; while ( i > 0 ) { inverse += digit * ( double ) ( i % base ) ; digit *= radical ; i /= base ; } return inverse ; }
|
Compute the radical inverse of i .
|
34,254
|
private double nextRadicalInverse ( ) { counter ++ ; if ( counter >= MAXFAST ) { counter = 0 ; inverse += MAXFAST ; current = radicalInverse ( inverse ) ; return current ; } double nextInverse = current + invbase ; if ( nextInverse < ALMOST_ONE ) { current = nextInverse ; return current ; } else { double digit1 = invbase , digit2 = invbase * invbase ; while ( current + digit2 >= ALMOST_ONE ) { digit1 = digit2 ; digit2 *= invbase ; } current += ( digit1 - 1.0 ) + digit2 ; return current ; } }
|
Compute the next radical inverse .
|
34,255
|
public String dimensonsToString ( String sep ) { StringBuilder result = new StringBuilder ( 100 ) . append ( '[' ) ; for ( int dim = BitsUtil . nextSetBit ( dimensions , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( dimensions , dim + 1 ) ) { result . append ( dim + 1 ) . append ( sep ) ; } if ( result . length ( ) > sep . length ( ) ) { result . setLength ( result . length ( ) - sep . length ( ) ) ; } return result . append ( ']' ) . toString ( ) ; }
|
Returns a string representation of the dimensions of this subspace .
|
34,256
|
public boolean isSubspace ( Subspace subspace ) { return this . dimensionality <= subspace . dimensionality && BitsUtil . intersectionSize ( dimensions , subspace . dimensions ) == dimensionality ; }
|
Returns true if this subspace is a subspace of the specified subspace i . e . if the set of dimensions building this subspace are contained in the set of dimensions building the specified subspace .
|
34,257
|
protected < E extends SpatialComparable , A > double computeOverlap ( A entries , ArrayAdapter < E , A > getter , long [ ] assign ) { ModifiableHyperBoundingBox mbr1 = null , mbr2 = null ; for ( int i = 0 ; i < getter . size ( entries ) ; i ++ ) { E e = getter . get ( entries , i ) ; if ( BitsUtil . get ( assign , i ) ) { if ( mbr1 == null ) { mbr1 = new ModifiableHyperBoundingBox ( e ) ; } else { mbr1 . extend ( e ) ; } } else { if ( mbr2 == null ) { mbr2 = new ModifiableHyperBoundingBox ( e ) ; } else { mbr2 . extend ( e ) ; } } } if ( mbr1 == null || mbr2 == null ) { throw new AbortException ( "Invalid state in split: one of the sets is empty." ) ; } return SpatialUtil . overlap ( mbr1 , mbr2 ) ; }
|
Compute overlap of assignment
|
34,258
|
private void binarySplitSort ( List < ? extends SpatialComparable > objs , final int start , final int end , int depth , final int numdim , int [ ] dims , Sorter comp ) { final int mid = start + ( ( end - start ) >>> 1 ) ; comp . setDimension ( dims != null ? dims [ depth ] : depth ) ; QuickSelect . quickSelect ( objs , comp , start , end , mid ) ; final int nextdim = ( depth + 1 ) % numdim ; if ( start < mid - 1 ) { binarySplitSort ( objs , start , mid , nextdim , numdim , dims , comp ) ; } if ( mid + 2 < end ) { binarySplitSort ( objs , mid + 1 , end , nextdim , numdim , dims , comp ) ; } }
|
Sort the array using a binary split in dimension curdim then recurse with the next dimension .
|
34,259
|
public static Element svgElement ( Document document , String name ) { return document . createElementNS ( SVGConstants . SVG_NAMESPACE_URI , name ) ; }
|
Create a SVG element in appropriate namespace
|
34,260
|
public static void setStyle ( Element el , String d ) { el . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , d ) ; }
|
Set a SVG style attribute
|
34,261
|
public static void addCSSClass ( Element e , String cssclass ) { String oldval = e . getAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; if ( oldval == null || oldval . length ( ) == 0 ) { setAtt ( e , SVGConstants . SVG_CLASS_ATTRIBUTE , cssclass ) ; return ; } String [ ] classes = oldval . split ( " " ) ; for ( String c : classes ) { if ( c . equals ( cssclass ) ) { return ; } } setAtt ( e , SVGConstants . SVG_CLASS_ATTRIBUTE , oldval + " " + cssclass ) ; }
|
Add a CSS class to an Element .
|
34,262
|
public static void removeCSSClass ( Element e , String cssclass ) { String oldval = e . getAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; if ( oldval == null ) { return ; } String [ ] classes = oldval . split ( " " ) ; if ( classes . length == 1 ) { if ( cssclass . equals ( classes [ 0 ] ) ) { e . removeAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; } } else if ( classes . length == 2 ) { if ( cssclass . equals ( classes [ 0 ] ) ) { if ( cssclass . equals ( classes [ 1 ] ) ) { e . removeAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; } else { e . setAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE , classes [ 1 ] ) ; } } else if ( cssclass . equals ( classes [ 1 ] ) ) { e . setAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE , classes [ 0 ] ) ; } } else { StringBuilder joined = new StringBuilder ( ) ; for ( String c : classes ) { if ( ! c . equals ( cssclass ) ) { if ( joined . length ( ) > 0 ) { joined . append ( ' ' ) ; } joined . append ( c ) ; } } e . setAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE , joined . toString ( ) ) ; } }
|
Remove a CSS class from an Element .
|
34,263
|
public static Element makeStyleElement ( Document document ) { Element style = SVGUtil . svgElement ( document , SVGConstants . SVG_STYLE_TAG ) ; style . setAttribute ( SVGConstants . SVG_TYPE_ATTRIBUTE , SVGConstants . CSS_MIME_TYPE ) ; return style ; }
|
Make a new CSS style element for the given Document .
|
34,264
|
public static Element svgRect ( Document document , double x , double y , double w , double h ) { Element rect = SVGUtil . svgElement ( document , SVGConstants . SVG_RECT_TAG ) ; SVGUtil . setAtt ( rect , SVGConstants . SVG_X_ATTRIBUTE , x ) ; SVGUtil . setAtt ( rect , SVGConstants . SVG_Y_ATTRIBUTE , y ) ; SVGUtil . setAtt ( rect , SVGConstants . SVG_WIDTH_ATTRIBUTE , w ) ; SVGUtil . setAtt ( rect , SVGConstants . SVG_HEIGHT_ATTRIBUTE , h ) ; return rect ; }
|
Create a SVG rectangle element .
|
34,265
|
public static Element svgCircle ( Document document , double cx , double cy , double r ) { Element circ = SVGUtil . svgElement ( document , SVGConstants . SVG_CIRCLE_TAG ) ; SVGUtil . setAtt ( circ , SVGConstants . SVG_CX_ATTRIBUTE , cx ) ; SVGUtil . setAtt ( circ , SVGConstants . SVG_CY_ATTRIBUTE , cy ) ; SVGUtil . setAtt ( circ , SVGConstants . SVG_R_ATTRIBUTE , r ) ; return circ ; }
|
Create a SVG circle element .
|
34,266
|
public static Element svgLine ( Document document , double x1 , double y1 , double x2 , double y2 ) { Element line = SVGUtil . svgElement ( document , SVGConstants . SVG_LINE_TAG ) ; SVGUtil . setAtt ( line , SVGConstants . SVG_X1_ATTRIBUTE , x1 ) ; SVGUtil . setAtt ( line , SVGConstants . SVG_Y1_ATTRIBUTE , y1 ) ; SVGUtil . setAtt ( line , SVGConstants . SVG_X2_ATTRIBUTE , x2 ) ; SVGUtil . setAtt ( line , SVGConstants . SVG_Y2_ATTRIBUTE , y2 ) ; return line ; }
|
Create a SVG line element . Do not confuse this with path elements .
|
34,267
|
public static Color stringToColor ( String str ) { int icol = SVG_COLOR_NAMES . getInt ( str . toLowerCase ( ) ) ; if ( icol != NO_VALUE ) { return new Color ( icol , false ) ; } return colorLookupStylesheet . stringToColor ( str ) ; }
|
Convert a color name from SVG syntax to an AWT color object .
|
34,268
|
public static String colorToString ( int col ) { final char [ ] buf = new char [ ] { '#' , 'X' , 'X' , 'X' , 'X' , 'X' , 'X' } ; for ( int i = 6 ; i > 0 ; i -- ) { final int v = ( col & 0xF ) ; buf [ i ] = ( char ) ( ( v < 10 ) ? ( '0' + v ) : ( 'a' + v - 10 ) ) ; col >>>= 4 ; } return new String ( buf ) ; }
|
Convert a color name from an integer RGB color to CSS syntax
|
34,269
|
public static SVGPoint elementCoordinatesFromEvent ( Document doc , Element tag , Event evt ) { try { DOMMouseEvent gnme = ( DOMMouseEvent ) evt ; SVGMatrix mat = ( ( SVGLocatable ) tag ) . getScreenCTM ( ) ; SVGMatrix imat = mat . inverse ( ) ; SVGPoint cPt = ( ( SVGDocument ) doc ) . getRootElement ( ) . createSVGPoint ( ) ; cPt . setX ( gnme . getClientX ( ) ) ; cPt . setY ( gnme . getClientY ( ) ) ; return cPt . matrixTransform ( imat ) ; } catch ( Exception e ) { LoggingUtil . warning ( "Error getting coordinates from SVG event." , e ) ; return null ; } }
|
Convert the coordinates of an DOM Event from screen into element coordinates .
|
34,270
|
public static void removeLastChild ( Element tag ) { final Node last = tag . getLastChild ( ) ; if ( last != null ) { tag . removeChild ( last ) ; } }
|
Remove last child of an element when present
|
34,271
|
public static void removeFromParent ( Element elem ) { if ( elem != null && elem . getParentNode ( ) != null ) { elem . getParentNode ( ) . removeChild ( elem ) ; } }
|
Remove an element from its parent if defined .
|
34,272
|
public static Element svgCircleSegment ( SVGPlot svgp , double centerx , double centery , double angleStart , double angleDelta , double innerRadius , double outerRadius ) { final DoubleWrapper tmp = new DoubleWrapper ( ) ; double sin1st = FastMath . sinAndCos ( angleStart , tmp ) ; double cos1st = tmp . value ; double sin2nd = FastMath . sinAndCos ( angleStart + angleDelta , tmp ) ; double cos2nd = tmp . value ; double inner1stx = centerx + ( innerRadius * sin1st ) ; double inner1sty = centery - ( innerRadius * cos1st ) ; double outer1stx = centerx + ( outerRadius * sin1st ) ; double outer1sty = centery - ( outerRadius * cos1st ) ; double inner2ndx = centerx + ( innerRadius * sin2nd ) ; double inner2ndy = centery - ( innerRadius * cos2nd ) ; double outer2ndx = centerx + ( outerRadius * sin2nd ) ; double outer2ndy = centery - ( outerRadius * cos2nd ) ; double largeArc = angleDelta >= Math . PI ? 1 : 0 ; SVGPath path = new SVGPath ( inner1stx , inner1sty ) . lineTo ( outer1stx , outer1sty ) . ellipticalArc ( outerRadius , outerRadius , 0 , largeArc , 1 , outer2ndx , outer2ndy ) . lineTo ( inner2ndx , inner2ndy ) ; if ( innerRadius > 0 ) { path . ellipticalArc ( innerRadius , innerRadius , 0 , largeArc , 0 , inner1stx , inner1sty ) ; } return path . makeElement ( svgp ) ; }
|
Create a circle segment .
|
34,273
|
protected WritableDoubleDataStore computeCoreDists ( DBIDs ids , KNNQuery < O > knnQ , int minPts ) { final Logging LOG = getLogger ( ) ; final WritableDoubleDataStore coredists = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; FiniteProgress cprog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing core sizes" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { coredists . put ( iter , knnQ . getKNNForDBID ( iter , minPts ) . getKNNDistance ( ) ) ; LOG . incrementProcessed ( cprog ) ; } LOG . ensureCompleted ( cprog ) ; return coredists ; }
|
Compute the core distances for all objects .
|
34,274
|
protected void convertToPointerRepresentation ( ArrayDBIDs ids , DoubleLongHeap heap , WritableDBIDDataStore pi , WritableDoubleDataStore lambda ) { final Logging LOG = getLogger ( ) ; for ( DBIDArrayIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { pi . put ( iter , iter ) ; } DBIDVar p = DBIDUtil . newVar ( ) , q = DBIDUtil . newVar ( ) , n = DBIDUtil . newVar ( ) ; FiniteProgress pprog = LOG . isVerbose ( ) ? new FiniteProgress ( "Converting MST to pointer representation" , heap . size ( ) , LOG ) : null ; while ( ! heap . isEmpty ( ) ) { final double dist = heap . peekKey ( ) ; final long pair = heap . peekValue ( ) ; final int i = ( int ) ( pair >>> 31 ) , j = ( int ) ( pair & 0x7FFFFFFFL ) ; ids . assignVar ( i , p ) ; while ( ! DBIDUtil . equal ( p , pi . assignVar ( p , n ) ) ) { p . set ( n ) ; } ids . assignVar ( j , q ) ; while ( ! DBIDUtil . equal ( q , pi . assignVar ( q , n ) ) ) { q . set ( n ) ; } int c = DBIDUtil . compare ( p , q ) ; if ( c < 0 ) { pi . put ( p , q ) ; lambda . put ( p , dist ) ; } else { assert ( c != 0 ) : "This should never happen!" ; pi . put ( q , p ) ; lambda . put ( q , dist ) ; } heap . poll ( ) ; LOG . incrementProcessed ( pprog ) ; } LOG . ensureCompleted ( pprog ) ; for ( DBIDArrayIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double d = lambda . doubleValue ( iter ) ; pi . assignVar ( iter , p ) ; q . set ( p ) ; while ( d >= lambda . doubleValue ( q ) && ! DBIDUtil . equal ( q , pi . assignVar ( q , n ) ) ) { q . set ( n ) ; } if ( ! DBIDUtil . equal ( p , q ) ) { if ( LOG . isDebuggingFinest ( ) ) { LOG . finest ( "Correcting parent: " + p + " -> " + q ) ; } pi . put ( iter , q ) ; } } }
|
Convert spanning tree to a pointer representation .
|
34,275
|
private void updateHeap ( final double distance , final int iid ) { final double prevdist = kdist ; final int previd = heap . peekValue ( ) ; heap . replaceTopElement ( distance , iid ) ; kdist = heap . peekKey ( ) ; if ( kdist < prevdist ) { numties = 0 ; } else { addToTies ( previd ) ; } }
|
Do a full update for the heap .
|
34,276
|
private void addToTies ( int id ) { if ( ties . length == numties ) { ties = Arrays . copyOf ( ties , ( ties . length << 1 ) + 1 ) ; } ties [ numties ] = id ; ++ numties ; }
|
Ensure the ties array has capacity for at least one more element .
|
34,277
|
public static int numberOfFreeParameters ( Relation < ? extends NumberVector > relation , Clustering < ? extends MeanModel > clustering ) { int m = clustering . getAllClusters ( ) . size ( ) ; int dim = RelationUtil . dimensionality ( relation ) ; return ( m - 1 ) + m * dim + m ; }
|
Compute the number of free parameters .
|
34,278
|
protected void dumpClusteringOutput ( PrintStream writer , ResultHierarchy hierarchy , Clustering < ? > c ) { DBIDRange ids = null ; for ( It < Relation < ? > > iter = hierarchy . iterParents ( c ) . filter ( Relation . class ) ; iter . valid ( ) ; iter . advance ( ) ) { DBIDs pids = iter . get ( ) . getDBIDs ( ) ; if ( pids instanceof DBIDRange ) { ids = ( DBIDRange ) pids ; break ; } LOG . warning ( "Parent result " + iter . get ( ) . getLongName ( ) + " has DBID type " + pids . getClass ( ) ) ; } if ( ids == null ) { for ( It < Database > iter = hierarchy . iterAll ( ) . filter ( Database . class ) ; iter . valid ( ) ; iter . advance ( ) ) { DBIDs pids = iter . get ( ) . getRelation ( TypeUtil . ANY ) . getDBIDs ( ) ; if ( pids instanceof DBIDRange ) { ids = ( DBIDRange ) pids ; break ; } LOG . warning ( "Parent result " + iter . get ( ) . getLongName ( ) + " has DBID type " + pids . getClass ( ) ) ; } } if ( ids == null ) { LOG . warning ( "Cannot dump cluster assignment, as I do not have a well-defined DBIDRange to use for a unique column assignment. DBIDs must be a continuous range." ) ; return ; } WritableIntegerDataStore map = DataStoreUtil . makeIntegerStorage ( ids , DataStoreFactory . HINT_TEMP ) ; int cnum = 0 ; for ( Cluster < ? > clu : c . getAllClusters ( ) ) { for ( DBIDIter iter = clu . getIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { map . putInt ( iter , cnum ) ; } ++ cnum ; } for ( DBIDArrayIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( iter . getOffset ( ) > 0 ) { writer . append ( ' ' ) ; } writer . append ( Integer . toString ( map . intValue ( iter ) ) ) ; } if ( forceLabel != null ) { if ( forceLabel . length ( ) > 0 ) { writer . append ( ' ' ) . append ( forceLabel ) ; } } else { writer . append ( ' ' ) . append ( c . getLongName ( ) ) ; } writer . append ( '\n' ) ; }
|
Dump a single clustering result .
|
34,279
|
public < F extends NumberVector > F getMeanVector ( Relation < ? extends F > relation ) { return RelationUtil . getNumberVectorFactory ( relation ) . newNumberVector ( mean ) ; }
|
Get the mean as vector .
|
34,280
|
public void reset ( ) { Arrays . fill ( mean , 0. ) ; Arrays . fill ( nmea , 0. ) ; if ( elements != null ) { for ( int i = 0 ; i < elements . length ; i ++ ) { Arrays . fill ( elements [ i ] , 0. ) ; } } else { elements = new double [ mean . length ] [ mean . length ] ; } wsum = 0. ; }
|
Reset the covariance matrix .
|
34,281
|
public static CovarianceMatrix make ( Relation < ? extends NumberVector > relation ) { int dim = RelationUtil . dimensionality ( relation ) ; CovarianceMatrix c = new CovarianceMatrix ( dim ) ; double [ ] mean = c . mean ; int count = 0 ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { NumberVector vec = relation . get ( iditer ) ; for ( int i = 0 ; i < dim ; i ++ ) { mean [ i ] += vec . doubleValue ( i ) ; } count ++ ; } if ( count == 0 ) { return c ; } for ( int i = 0 ; i < dim ; i ++ ) { mean [ i ] /= count ; } double [ ] tmp = c . nmea ; double [ ] [ ] elems = c . elements ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { NumberVector vec = relation . get ( iditer ) ; for ( int i = 0 ; i < dim ; i ++ ) { tmp [ i ] = vec . doubleValue ( i ) - mean [ i ] ; } for ( int i = 0 ; i < dim ; i ++ ) { for ( int j = i ; j < dim ; j ++ ) { elems [ i ] [ j ] += tmp [ i ] * tmp [ j ] ; } } } for ( int i = 0 ; i < dim ; i ++ ) { for ( int j = i + 1 ; j < dim ; j ++ ) { elems [ j ] [ i ] = elems [ i ] [ j ] ; } } c . wsum = count ; return c ; }
|
Static Constructor from a full relation .
|
34,282
|
public StringBuilder appendToBuffer ( StringBuilder buf ) { String processedString = Integer . toString ( getProcessed ( ) ) ; int percentage = ( int ) ( getProcessed ( ) * 100.0 / total ) ; buf . append ( getTask ( ) ) ; buf . append ( ": " ) ; for ( int i = 0 ; i < totalLength - processedString . length ( ) ; i ++ ) { buf . append ( ' ' ) ; } buf . append ( getProcessed ( ) ) ; buf . append ( " [" ) ; if ( percentage < 100 ) { buf . append ( ' ' ) ; } if ( percentage < 10 ) { buf . append ( ' ' ) ; } buf . append ( percentage ) ; buf . append ( "%]" ) ; if ( ratems > 0. && getProcessed ( ) < total ) { buf . append ( ' ' ) ; int secs = ( int ) Math . round ( ( total - getProcessed ( ) ) / ratems / 1000. + .2 ) ; if ( secs > 300 ) { buf . append ( secs / 60 ) ; buf . append ( " min remaining" ) ; } else { buf . append ( secs ) ; buf . append ( " sec remaining" ) ; } } return buf ; }
|
Append a string representation of the progress to the given string buffer .
|
34,283
|
public void ensureCompleted ( Logging logger ) { if ( ! isComplete ( ) ) { logger . warning ( "Progress had not completed automatically as expected: " + getProcessed ( ) + "/" + total , new Throwable ( ) ) ; setProcessed ( getTotal ( ) ) ; logger . progress ( this ) ; } }
|
Ensure that the progress was completed to make progress bars disappear
|
34,284
|
private void clusterData ( DBIDs ids , RangeQuery < O > rnnQuery , WritableDoubleDataStore radii , WritableDataStore < ModifiableDBIDs > labels ) { FiniteProgress clustProg = LOG . isVerbose ( ) ? new FiniteProgress ( "Density-Based Clustering" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( labels . get ( iter ) != null ) { continue ; } ModifiableDBIDs newCluster = DBIDUtil . newArray ( ) ; newCluster . add ( iter ) ; labels . put ( iter , newCluster ) ; LOG . incrementProcessed ( clustProg ) ; ModifiableDBIDs nChain = DBIDUtil . newArray ( ) ; nChain . add ( iter ) ; for ( DBIDIter toGetNeighbors = nChain . iter ( ) ; toGetNeighbors . valid ( ) ; toGetNeighbors . advance ( ) ) { double range = radii . doubleValue ( toGetNeighbors ) ; DoubleDBIDList nNeighbors = rnnQuery . getRangeForDBID ( toGetNeighbors , range ) ; for ( DoubleDBIDListIter iter2 = nNeighbors . iter ( ) ; iter2 . valid ( ) ; iter2 . advance ( ) ) { if ( DBIDUtil . equal ( toGetNeighbors , iter2 ) ) { continue ; } if ( labels . get ( iter2 ) == null ) { newCluster . add ( iter2 ) ; labels . put ( iter2 , newCluster ) ; nChain . add ( iter2 ) ; LOG . incrementProcessed ( clustProg ) ; } else if ( labels . get ( iter2 ) != newCluster ) { ModifiableDBIDs toBeDeleted = labels . get ( iter2 ) ; newCluster . addDBIDs ( toBeDeleted ) ; for ( DBIDIter iter3 = toBeDeleted . iter ( ) ; iter3 . valid ( ) ; iter3 . advance ( ) ) { labels . put ( iter3 , newCluster ) ; } toBeDeleted . clear ( ) ; } } } } LOG . ensureCompleted ( clustProg ) ; }
|
This method applies a density based clustering algorithm .
|
34,285
|
private int updateSizes ( DBIDs ids , WritableDataStore < ModifiableDBIDs > labels , WritableIntegerDataStore newSizes ) { int countUnmerged = 0 ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { int newClusterSize = labels . get ( iter ) . size ( ) ; newSizes . putInt ( iter , newClusterSize ) ; if ( newClusterSize == 1 ) { countUnmerged ++ ; } } return countUnmerged ; }
|
This method updates each object s cluster size after the clustering step .
|
34,286
|
public PointerHierarchyRepresentationResult run ( Database database , Relation < O > relation ) { DBIDs ids = relation . getDBIDs ( ) ; WritableDBIDDataStore pi = DataStoreUtil . makeDBIDStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC ) ; WritableDoubleDataStore lambda = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC , Double . POSITIVE_INFINITY ) ; WritableDoubleDataStore m = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; final Logging log = getLogger ( ) ; FiniteProgress progress = log . isVerbose ( ) ? new FiniteProgress ( "Running SLINK" , ids . size ( ) , log ) : null ; ArrayDBIDs aids = DBIDUtil . ensureArray ( ids ) ; DBIDArrayIter id = aids . iter ( ) , it = aids . iter ( ) ; for ( ; id . valid ( ) ; id . advance ( ) ) { pi . put ( id , id ) ; } log . incrementProcessed ( progress ) ; if ( getDistanceFunction ( ) instanceof PrimitiveDistanceFunction ) { PrimitiveDistanceFunction < ? super O > distf = ( PrimitiveDistanceFunction < ? super O > ) getDistanceFunction ( ) ; for ( id . seek ( 1 ) ; id . valid ( ) ; id . advance ( ) ) { step2primitive ( id , it , id . getOffset ( ) , relation , distf , m ) ; process ( id , aids , it , id . getOffset ( ) , pi , lambda , m ) ; log . incrementProcessed ( progress ) ; } } else { DistanceQuery < O > distQ = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; for ( id . seek ( 1 ) ; id . valid ( ) ; id . advance ( ) ) { step2 ( id , it , id . getOffset ( ) , distQ , m ) ; process ( id , aids , it , id . getOffset ( ) , pi , lambda , m ) ; log . incrementProcessed ( progress ) ; } } log . ensureCompleted ( progress ) ; m . destroy ( ) ; m = null ; return new PointerHierarchyRepresentationResult ( ids , pi , lambda , getDistanceFunction ( ) . isSquared ( ) ) ; }
|
Performs the SLINK algorithm on the given database .
|
34,287
|
protected void process ( DBIDRef id , ArrayDBIDs ids , DBIDArrayIter it , int n , WritableDBIDDataStore pi , WritableDoubleDataStore lambda , WritableDoubleDataStore m ) { slinkstep3 ( id , it , n , pi , lambda , m ) ; slinkstep4 ( id , it , n , pi , lambda ) ; }
|
SLINK main loop .
|
34,288
|
public void add ( DBIDRef id , double reach , DBIDRef pre ) { ids . add ( id ) ; reachability . putDouble ( id , reach ) ; if ( pre == null || pre instanceof DBIDVar && ! ( ( DBIDVar ) pre ) . isSet ( ) ) { return ; } predecessor . putDBID ( id , pre ) ; }
|
Add an object to the cluster order .
|
34,289
|
public ArrayModifiableDBIDs order ( DBIDs ids ) { ArrayModifiableDBIDs res = DBIDUtil . newArray ( ids . size ( ) ) ; for ( DBIDIter it = this . ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( ids . contains ( it ) ) { res . add ( it ) ; } } return res ; }
|
Use the cluster order to sort the given collection ids .
|
34,290
|
public void getPredecessor ( DBIDRef id , DBIDVar out ) { if ( predecessor == null ) { out . unset ( ) ; return ; } predecessor . assignVar ( id , out ) ; }
|
Get the predecessor .
|
34,291
|
public OutlierResult run ( Database database , Relation < O > relation ) { StepProgress stepprog = LOG . isVerbose ( ) ? new StepProgress ( "COF" , 3 ) : null ; DistanceQuery < O > dq = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; LOG . beginStep ( stepprog , 1 , "Materializing COF neighborhoods." ) ; KNNQuery < O > knnq = DatabaseUtil . precomputedKNNQuery ( database , relation , dq , k ) ; DBIDs ids = relation . getDBIDs ( ) ; LOG . beginStep ( stepprog , 2 , "Computing Average Chaining Distances." ) ; WritableDoubleDataStore acds = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; computeAverageChainingDistances ( knnq , dq , ids , acds ) ; LOG . beginStep ( stepprog , 3 , "Computing Connectivity-based Outlier Factors." ) ; WritableDoubleDataStore cofs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; DoubleMinMax cofminmax = new DoubleMinMax ( ) ; computeCOFScores ( knnq , ids , acds , cofs , cofminmax ) ; LOG . setCompleted ( stepprog ) ; DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Connectivity-Based Outlier Factor" , "cof-outlier" , cofs , ids ) ; OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta ( cofminmax . getMin ( ) , cofminmax . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY , 1.0 ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
|
Runs the COF algorithm on the given database .
|
34,292
|
private void computeCOFScores ( KNNQuery < O > knnq , DBIDs ids , DoubleDataStore acds , WritableDoubleDataStore cofs , DoubleMinMax cofminmax ) { FiniteProgress progressCOFs = LOG . isVerbose ( ) ? new FiniteProgress ( "COF for objects" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final KNNList neighbors = knnq . getKNNForDBID ( iter , k ) ; double sum = 0. ; for ( DBIDIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { if ( DBIDUtil . equal ( neighbor , iter ) ) { continue ; } sum += acds . doubleValue ( neighbor ) ; } final double cof = ( sum > 0. ) ? ( acds . doubleValue ( iter ) * k / sum ) : ( acds . doubleValue ( iter ) > 0. ? Double . POSITIVE_INFINITY : 1. ) ; cofs . putDouble ( iter , cof ) ; cofminmax . put ( cof ) ; LOG . incrementProcessed ( progressCOFs ) ; } LOG . ensureCompleted ( progressCOFs ) ; }
|
Compute Connectivity outlier factors .
|
34,293
|
public void invokeLater ( Runnable r ) { queue . add ( r ) ; synchronized ( this ) { if ( synchronizer == null ) { runQueue ( ) ; } else { synchronizer . activate ( ) ; } } }
|
Add a new update to run at any appropriate time .
|
34,294
|
public void runQueue ( ) { synchronized ( sync ) { while ( ! queue . isEmpty ( ) ) { Runnable r = queue . poll ( ) ; if ( r != null ) { try { r . run ( ) ; } catch ( Exception e ) { LoggingUtil . exception ( e ) ; } } else { LoggingUtil . warning ( "Tried to run a 'null' Object." ) ; } } } }
|
Run the processing queue now . This should usually be only invoked by the UpdateSynchronizer
|
34,295
|
public synchronized void synchronizeWith ( UpdateSynchronizer newsync ) { if ( synchronizer == newsync ) { LoggingUtil . warning ( "Double-synced to the same plot!" , new Throwable ( ) ) ; return ; } if ( synchronizer != null ) { LoggingUtil . warning ( "Attempting to synchronize to more than one synchronizer." ) ; return ; } synchronizer = newsync ; newsync . addUpdateRunner ( this ) ; }
|
Set a new update synchronizer .
|
34,296
|
public synchronized void unsynchronizeWith ( UpdateSynchronizer oldsync ) { if ( synchronizer == null ) { LoggingUtil . warning ( "Warning: was not synchronized." ) ; return ; } if ( synchronizer != oldsync ) { LoggingUtil . warning ( "Warning: was synchronized differently!" ) ; return ; } synchronizer = null ; runQueue ( ) ; }
|
Remove an update synchronizer
|
34,297
|
protected static double estimateInitialBeta ( double [ ] dist_i , double perplexity ) { double sum = 0. ; for ( double d : dist_i ) { double d2 = d * d ; sum += d2 < Double . POSITIVE_INFINITY ? d2 : 0. ; } return sum > 0 && sum < Double . POSITIVE_INFINITY ? .5 / sum * perplexity * ( dist_i . length - 1. ) : 1. ; }
|
Estimate beta from the distances in a row .
|
34,298
|
public static List < Relation < ? > > getRelations ( Result r ) { if ( r instanceof Relation < ? > ) { List < Relation < ? > > anns = new ArrayList < > ( 1 ) ; anns . add ( ( Relation < ? > ) r ) ; return anns ; } if ( r instanceof HierarchicalResult ) { return filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , Relation . class ) ; } return Collections . emptyList ( ) ; }
|
Collect all Annotation results from a Result
|
34,299
|
public static List < OrderingResult > getOrderingResults ( Result r ) { if ( r instanceof OrderingResult ) { List < OrderingResult > ors = new ArrayList < > ( 1 ) ; ors . add ( ( OrderingResult ) r ) ; return ors ; } if ( r instanceof HierarchicalResult ) { return filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , OrderingResult . class ) ; } return Collections . emptyList ( ) ; }
|
Collect all ordering results from a Result
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.