idx
int64
0
41.2k
question
stringlengths
83
4.15k
target
stringlengths
5
715
34,300
public static List < CollectionResult < ? > > getCollectionResults ( Result r ) { if ( r instanceof CollectionResult < ? > ) { List < CollectionResult < ? > > crs = new ArrayList < > ( 1 ) ; crs . add ( ( CollectionResult < ? > ) r ) ; return crs ; } if ( r instanceof HierarchicalResult ) { return filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , CollectionResult . class ) ; } return Collections . emptyList ( ) ; }
Collect all collection results from a Result
34,301
public static List < IterableResult < ? > > getIterableResults ( Result r ) { if ( r instanceof IterableResult < ? > ) { List < IterableResult < ? > > irs = new ArrayList < > ( 1 ) ; irs . add ( ( IterableResult < ? > ) r ) ; return irs ; } if ( r instanceof HierarchicalResult ) { return filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , IterableResult . class ) ; } return Collections . emptyList ( ) ; }
Return all Iterable results
34,302
public static < C extends Result > ArrayList < C > filterResults ( ResultHierarchy hier , Result r , Class < ? super C > restrictionClass ) { ArrayList < C > res = new ArrayList < > ( ) ; final It < C > it = hier . iterDescendantsSelf ( r ) . filter ( restrictionClass ) ; it . forEach ( res :: add ) ; return res ; }
Return only results of the given restriction class
34,303
public static void addChildResult ( HierarchicalResult parent , Result child ) { parent . getHierarchy ( ) . add ( parent , child ) ; }
Add a child result .
34,304
public static Database findDatabase ( ResultHierarchy hier , Result baseResult ) { final List < Database > dbs = filterResults ( hier , baseResult , Database . class ) ; return ( ! dbs . isEmpty ( ) ) ? dbs . get ( 0 ) : null ; }
Find the first database result in the tree .
34,305
public static void removeRecursive ( ResultHierarchy hierarchy , Result child ) { for ( It < Result > iter = hierarchy . iterParents ( child ) ; iter . valid ( ) ; iter . advance ( ) ) { hierarchy . remove ( iter . get ( ) , child ) ; } for ( It < Result > iter = hierarchy . iterChildren ( child ) ; iter . valid ( ) ; iter . advance ( ) ) { removeRecursive ( hierarchy , iter . get ( ) ) ; } }
Recursively remove a result and its children .
34,306
protected void findEigenVectors ( double [ ] [ ] imat , double [ ] [ ] evs , double [ ] lambda ) { final int size = imat . length ; Random rnd = random . getSingleThreadedRandom ( ) ; double [ ] tmp = new double [ size ] ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Learning projections" , tdim , LOG ) : null ; for ( int d = 0 ; d < tdim ; ) { final double [ ] cur = evs [ d ] ; randomInitialization ( cur , rnd ) ; double l = multiply ( imat , cur , tmp ) ; for ( int iter = 0 ; iter < 100 ; iter ++ ) { double delta = updateEigenvector ( tmp , cur , l ) ; if ( delta < 1e-10 ) { break ; } l = multiply ( imat , cur , tmp ) ; } lambda [ d ++ ] = l = estimateEigenvalue ( imat , cur ) ; LOG . incrementProcessed ( prog ) ; if ( d == tdim ) { break ; } updateMatrix ( imat , cur , l ) ; } LOG . ensureCompleted ( prog ) ; }
Find the first eigenvectors and eigenvalues using power iterations .
34,307
protected void randomInitialization ( double [ ] out , Random rnd ) { double l2 = 0. ; while ( ! ( l2 > 0 ) ) { for ( int d = 0 ; d < out . length ; d ++ ) { final double val = rnd . nextDouble ( ) ; out [ d ] = val ; l2 += val * val ; } } final double s = 1. / FastMath . sqrt ( l2 ) ; for ( int d = 0 ; d < out . length ; d ++ ) { out [ d ] *= s ; } }
Choose a random vector of unit norm for power iterations .
34,308
protected double updateEigenvector ( double [ ] in , double [ ] out , double l ) { double s = 1. / ( l > 0. ? l : l < 0. ? - l : 1. ) ; s = ( in [ 0 ] > 0. ) ? s : - s ; double diff = 0. ; for ( int d = 0 ; d < in . length ; d ++ ) { in [ d ] *= s ; double delta = in [ d ] - out [ d ] ; diff += delta * delta ; out [ d ] = in [ d ] ; } return diff ; }
Compute the change in the eigenvector and normalize the output vector while doing so .
34,309
protected void updateMatrix ( double [ ] [ ] mat , final double [ ] evec , double eval ) { final int size = mat . length ; for ( int i = 0 ; i < size ; i ++ ) { final double [ ] mati = mat [ i ] ; final double eveci = evec [ i ] ; for ( int j = 0 ; j < size ; j ++ ) { mati [ j ] -= eval * eveci * evec [ j ] ; } } }
Update matrix by removing the effects of a known Eigenvector .
34,310
public static double pdf ( double x , double mu , double sigma , double k ) { if ( x == Double . POSITIVE_INFINITY || x == Double . NEGATIVE_INFINITY ) { return 0. ; } x = ( x - mu ) / sigma ; if ( k > 0 || k < 0 ) { if ( k * x > 1 ) { return 0. ; } double t = FastMath . log ( 1 - k * x ) ; return t == Double . NEGATIVE_INFINITY ? 1. / sigma : t == Double . POSITIVE_INFINITY ? 0. : FastMath . exp ( ( 1 - k ) * t / k - FastMath . exp ( t / k ) ) / sigma ; } else { return FastMath . exp ( - x - FastMath . exp ( - x ) ) / sigma ; } }
PDF of GEV distribution
34,311
public static double cdf ( double val , double mu , double sigma , double k ) { final double x = ( val - mu ) / sigma ; if ( k > 0 || k < 0 ) { if ( k * x > 1 ) { return k > 0 ? 1 : 0 ; } return FastMath . exp ( - FastMath . exp ( FastMath . log ( 1 - k * x ) / k ) ) ; } else { return FastMath . exp ( - FastMath . exp ( - x ) ) ; } }
CDF of GEV distribution
34,312
public static double quantile ( double val , double mu , double sigma , double k ) { if ( val < 0.0 || val > 1.0 ) { return Double . NaN ; } if ( k < 0 ) { return mu + sigma * Math . max ( ( 1. - FastMath . pow ( - FastMath . log ( val ) , k ) ) / k , 1. / k ) ; } else if ( k > 0 ) { return mu + sigma * Math . min ( ( 1. - FastMath . pow ( - FastMath . log ( val ) , k ) ) / k , 1. / k ) ; } else { return mu + sigma * FastMath . log ( 1. / FastMath . log ( 1. / val ) ) ; } }
Quantile function of GEV distribution
34,313
public static double cdf ( double x , double sigma ) { if ( x <= 0. ) { return 0. ; } final double xs = x / sigma ; return 1. - FastMath . exp ( - .5 * xs * xs ) ; }
CDF of Rayleigh distribution
34,314
public static double quantile ( double val , double sigma ) { if ( ! ( val >= 0. ) || ! ( val <= 1. ) ) { return Double . NaN ; } if ( val == 0. ) { return 0. ; } if ( val == 1. ) { return Double . POSITIVE_INFINITY ; } return sigma * FastMath . sqrt ( - 2. * FastMath . log ( 1. - val ) ) ; }
Quantile function of Rayleigh distribution
34,315
public OutlierResult run ( Database db , Relation < V > relation ) { ArrayDBIDs ids = DBIDUtil . ensureArray ( relation . getDBIDs ( ) ) ; SimilarityQuery < V > sq = db . getSimilarityQuery ( relation , kernelFunction ) ; KernelMatrix kernelMatrix = new KernelMatrix ( sq , relation , ids ) ; WritableDoubleDataStore abodvalues = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmaxabod = new DoubleMinMax ( ) ; MeanVariance s = new MeanVariance ( ) ; DBIDArrayIter pA = ids . iter ( ) , pB = ids . iter ( ) , pC = ids . iter ( ) ; for ( ; pA . valid ( ) ; pA . advance ( ) ) { final double abof = computeABOF ( kernelMatrix , pA , pB , pC , s ) ; minmaxabod . put ( abof ) ; abodvalues . putDouble ( pA , abof ) ; } DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Angle-Based Outlier Degree" , "abod-outlier" , abodvalues , relation . getDBIDs ( ) ) ; OutlierScoreMeta scoreMeta = new InvertedOutlierScoreMeta ( minmaxabod . getMin ( ) , minmaxabod . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Run ABOD on the data set .
34,316
protected double computeABOF ( KernelMatrix kernelMatrix , DBIDRef pA , DBIDArrayIter pB , DBIDArrayIter pC , MeanVariance s ) { s . reset ( ) ; double simAA = kernelMatrix . getSimilarity ( pA , pA ) ; for ( pB . seek ( 0 ) ; pB . valid ( ) ; pB . advance ( ) ) { if ( DBIDUtil . equal ( pB , pA ) ) { continue ; } double simBB = kernelMatrix . getSimilarity ( pB , pB ) ; double simAB = kernelMatrix . getSimilarity ( pA , pB ) ; double sqdAB = simAA + simBB - simAB - simAB ; if ( ! ( sqdAB > 0. ) ) { continue ; } for ( pC . seek ( pB . getOffset ( ) + 1 ) ; pC . valid ( ) ; pC . advance ( ) ) { if ( DBIDUtil . equal ( pC , pA ) ) { continue ; } double simCC = kernelMatrix . getSimilarity ( pC , pC ) ; double simAC = kernelMatrix . getSimilarity ( pA , pC ) ; double sqdAC = simAA + simCC - simAC - simAC ; if ( ! ( sqdAC > 0. ) ) { continue ; } double simBC = kernelMatrix . getSimilarity ( pB , pC ) ; double numerator = simBC - simAB - simAC + simAA ; double div = 1. / ( sqdAB * sqdAC ) ; s . put ( numerator * div , FastMath . sqrt ( div ) ) ; } } return s . getNaiveVariance ( ) ; }
Compute the exact ABOF value .
34,317
public OutlierResult run ( Database database , Relation < O > relation ) { DBIDs ids = relation . getDBIDs ( ) ; WritableDoubleDataStore store = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_DB ) ; DistanceQuery < O > distq = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; KNNQuery < O > knnq = database . getKNNQuery ( distq , k + 1 ) ; KNNProcessor < O > knnm = new KNNProcessor < > ( k + 1 , knnq ) ; SharedObject < KNNList > knnv = new SharedObject < > ( ) ; knnm . connectKNNOutput ( knnv ) ; KNNWeightProcessor kdistm = new KNNWeightProcessor ( k + 1 ) ; SharedDouble kdistv = new SharedDouble ( ) ; kdistm . connectKNNInput ( knnv ) ; kdistm . connectOutput ( kdistv ) ; WriteDoubleDataStoreProcessor storem = new WriteDoubleDataStoreProcessor ( store ) ; storem . connectInput ( kdistv ) ; DoubleMinMaxProcessor mmm = new DoubleMinMaxProcessor ( ) ; mmm . connectInput ( kdistv ) ; ParallelExecutor . run ( ids , knnm , kdistm , storem , mmm ) ; DoubleMinMax minmax = mmm . getMinMax ( ) ; DoubleRelation scoreres = new MaterializedDoubleRelation ( "kNN weight Outlier Score" , "knnw-outlier" , store , ids ) ; OutlierScoreMeta meta = new BasicOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) , 0. , Double . POSITIVE_INFINITY , 0. ) ; return new OutlierResult ( meta , scoreres ) ; }
Run the parallel kNN weight outlier detector .
34,318
public Clustering < ? > run ( final Database database , final Relation < DiscreteUncertainObject > relation ) { if ( relation . size ( ) <= 0 ) { return new Clustering < > ( "Uk-Means Clustering" , "ukmeans-clustering" ) ; } DBIDs sampleids = DBIDUtil . randomSample ( relation . getDBIDs ( ) , k , rnd ) ; List < double [ ] > means = new ArrayList < > ( k ) ; for ( DBIDIter iter = sampleids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { means . add ( ArrayLikeUtil . toPrimitiveDoubleArray ( relation . get ( iter ) . getCenterOfMass ( ) ) ) ; } List < ModifiableDBIDs > clusters = new ArrayList < > ( ) ; for ( int i = 0 ; i < k ; i ++ ) { clusters . add ( DBIDUtil . newHashSet ( ( int ) ( relation . size ( ) * 2. / k ) ) ) ; } WritableIntegerDataStore assignment = DataStoreUtil . makeIntegerStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_TEMP | DataStoreFactory . HINT_HOT , - 1 ) ; double [ ] varsum = new double [ k ] ; IndefiniteProgress prog = LOG . isVerbose ( ) ? new IndefiniteProgress ( "UK-Means iteration" , LOG ) : null ; DoubleStatistic varstat = LOG . isStatistics ( ) ? new DoubleStatistic ( this . getClass ( ) . getName ( ) + ".variance-sum" ) : null ; int iteration = 0 ; for ( ; maxiter <= 0 || iteration < maxiter ; iteration ++ ) { LOG . incrementProcessed ( prog ) ; boolean changed = assignToNearestCluster ( relation , means , clusters , assignment , varsum ) ; logVarstat ( varstat , varsum ) ; if ( ! changed ) { break ; } means = means ( clusters , means , relation ) ; } LOG . setCompleted ( prog ) ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( new LongStatistic ( KEY + ".iterations" , iteration ) ) ; } Clustering < KMeansModel > result = new Clustering < > ( "Uk-Means Clustering" , "ukmeans-clustering" ) ; for ( int i = 0 ; i < clusters . size ( ) ; i ++ ) { DBIDs ids = clusters . get ( i ) ; if ( ids . isEmpty ( ) ) { continue ; } result . addToplevelCluster ( new Cluster < > ( ids , new KMeansModel ( means . get ( i ) , varsum [ i ] ) ) ) ; } return result ; }
Run the clustering .
34,319
protected boolean updateAssignment ( DBIDIter iditer , List < ? extends ModifiableDBIDs > clusters , WritableIntegerDataStore assignment , int newA ) { final int oldA = assignment . intValue ( iditer ) ; if ( oldA == newA ) { return false ; } clusters . get ( newA ) . add ( iditer ) ; assignment . putInt ( iditer , newA ) ; if ( oldA >= 0 ) { clusters . get ( oldA ) . remove ( iditer ) ; } return true ; }
Update the cluster assignment .
34,320
protected double getExpectedRepDistance ( NumberVector rep , DiscreteUncertainObject uo ) { SquaredEuclideanDistanceFunction euclidean = SquaredEuclideanDistanceFunction . STATIC ; int counter = 0 ; double sum = 0.0 ; for ( int i = 0 ; i < uo . getNumberSamples ( ) ; i ++ ) { sum += euclidean . distance ( rep , uo . getSample ( i ) ) ; counter ++ ; } return sum / counter ; }
Get expected distance between a Vector and an uncertain object
34,321
protected void logVarstat ( DoubleStatistic varstat , double [ ] varsum ) { if ( varstat != null ) { double s = sum ( varsum ) ; getLogger ( ) . statistics ( varstat . setDouble ( s ) ) ; } }
Log statistics on the variance sum .
34,322
public void save ( ) throws FileNotFoundException { PrintStream p = new PrintStream ( file ) ; p . println ( COMMENT_PREFIX + "Saved ELKI settings. First line is title, remaining lines are parameters." ) ; for ( Pair < String , ArrayList < String > > settings : store ) { p . println ( settings . first ) ; for ( String str : settings . second ) { p . println ( str ) ; } p . println ( ) ; } p . close ( ) ; }
Save the current data to the given file .
34,323
public void load ( ) throws FileNotFoundException , IOException { BufferedReader is = new BufferedReader ( new InputStreamReader ( new FileInputStream ( file ) ) ) ; ArrayList < String > buf = new ArrayList < > ( ) ; while ( is . ready ( ) ) { String line = is . readLine ( ) ; if ( line . startsWith ( COMMENT_PREFIX ) ) { continue ; } if ( line . length ( ) == 0 && ! buf . isEmpty ( ) ) { String title = buf . remove ( 0 ) ; store . add ( new Pair < > ( title , buf ) ) ; buf = new ArrayList < > ( ) ; } else { buf . add ( line ) ; } } if ( ! buf . isEmpty ( ) ) { String title = buf . remove ( 0 ) ; store . add ( new Pair < > ( title , buf ) ) ; buf = new ArrayList < > ( ) ; } is . close ( ) ; }
Read the current file
34,324
public void remove ( String key ) { Iterator < Pair < String , ArrayList < String > > > it = store . iterator ( ) ; while ( it . hasNext ( ) ) { String thisKey = it . next ( ) . first ; if ( key . equals ( thisKey ) ) { it . remove ( ) ; break ; } } }
Remove a given key from the file .
34,325
public ArrayList < String > get ( String key ) { Iterator < Pair < String , ArrayList < String > > > it = store . iterator ( ) ; while ( it . hasNext ( ) ) { Pair < String , ArrayList < String > > pair = it . next ( ) ; if ( key . equals ( pair . first ) ) { return pair . second ; } } return null ; }
Find a saved setting by key .
34,326
public Clustering < Model > run ( Database database , Relation < V > relation ) { int dim_c = RelationUtil . dimensionality ( relation ) ; if ( dim_c < l ) { throw new IllegalStateException ( "Dimensionality of data < parameter l! " + "(" + dim_c + " < " + l + ")" ) ; } int k_c = Math . min ( relation . size ( ) , k_i * k ) ; List < ORCLUSCluster > clusters = initialSeeds ( relation , k_c ) ; double beta = FastMath . exp ( - FastMath . log ( dim_c / ( double ) l ) * FastMath . log ( 1 / alpha ) / FastMath . log ( k_c / ( double ) k ) ) ; IndefiniteProgress cprogress = LOG . isVerbose ( ) ? new IndefiniteProgress ( "Current number of clusters:" , LOG ) : null ; while ( k_c > k ) { assign ( relation , clusters ) ; for ( ORCLUSCluster cluster : clusters ) { if ( cluster . objectIDs . size ( ) > 0 ) { cluster . basis = findBasis ( relation , cluster , dim_c ) ; } } k_c = ( int ) Math . max ( k , k_c * alpha ) ; dim_c = ( int ) Math . max ( l , dim_c * beta ) ; merge ( relation , clusters , k_c , dim_c , cprogress ) ; if ( cprogress != null ) { cprogress . setProcessed ( clusters . size ( ) , LOG ) ; } } assign ( relation , clusters ) ; LOG . setCompleted ( cprogress ) ; Clustering < Model > r = new Clustering < > ( "ORCLUS clustering" , "orclus-clustering" ) ; for ( ORCLUSCluster c : clusters ) { r . addToplevelCluster ( new Cluster < Model > ( c . objectIDs , ClusterModel . CLUSTER ) ) ; } return r ; }
Performs the ORCLUS algorithm on the given database .
34,327
private List < ORCLUSCluster > initialSeeds ( Relation < V > database , int k ) { DBIDs randomSample = DBIDUtil . randomSample ( database . getDBIDs ( ) , k , rnd ) ; List < ORCLUSCluster > seeds = new ArrayList < > ( k ) ; for ( DBIDIter iter = randomSample . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { seeds . add ( new ORCLUSCluster ( database . get ( iter ) . toArray ( ) , iter ) ) ; } return seeds ; }
Initializes the list of seeds wit a random sample of size k .
34,328
private void assign ( Relation < V > database , List < ORCLUSCluster > clusters ) { NumberVectorDistanceFunction < ? super V > distFunc = SquaredEuclideanDistanceFunction . STATIC ; for ( ORCLUSCluster cluster : clusters ) { cluster . objectIDs . clear ( ) ; } List < NumberVector > projectedCentroids = new ArrayList < > ( clusters . size ( ) ) ; for ( ORCLUSCluster c : clusters ) { projectedCentroids . add ( DoubleVector . wrap ( project ( c , c . centroid ) ) ) ; } for ( DBIDIter it = database . iterDBIDs ( ) ; it . valid ( ) ; it . advance ( ) ) { double [ ] o = database . get ( it ) . toArray ( ) ; double minDist = Double . POSITIVE_INFINITY ; ORCLUSCluster minCluster = null ; for ( int i = 0 ; i < clusters . size ( ) ; i ++ ) { ORCLUSCluster c = clusters . get ( i ) ; NumberVector o_proj = DoubleVector . wrap ( project ( c , o ) ) ; double dist = distFunc . distance ( o_proj , projectedCentroids . get ( i ) ) ; if ( dist < minDist ) { minDist = dist ; minCluster = c ; } } minCluster . objectIDs . add ( it ) ; } for ( ORCLUSCluster cluster : clusters ) { if ( cluster . objectIDs . size ( ) > 0 ) { cluster . centroid = Centroid . make ( database , cluster . objectIDs ) . toArray ( ) ; } } }
Creates a partitioning of the database by assigning each object to its closest seed .
34,329
private void merge ( Relation < V > relation , List < ORCLUSCluster > clusters , int k_new , int d_new , IndefiniteProgress cprogress ) { ArrayList < ProjectedEnergy > projectedEnergies = new ArrayList < > ( ( clusters . size ( ) * ( clusters . size ( ) - 1 ) ) >>> 1 ) ; for ( int i = 0 ; i < clusters . size ( ) ; i ++ ) { for ( int j = i + 1 ; j < clusters . size ( ) ; j ++ ) { ORCLUSCluster c_i = clusters . get ( i ) ; ORCLUSCluster c_j = clusters . get ( j ) ; projectedEnergies . add ( projectedEnergy ( relation , c_i , c_j , i , j , d_new ) ) ; } } while ( clusters . size ( ) > k_new ) { if ( cprogress != null ) { cprogress . setProcessed ( clusters . size ( ) , LOG ) ; } ProjectedEnergy minPE = Collections . min ( projectedEnergies ) ; for ( int c = 0 ; c < clusters . size ( ) ; c ++ ) { if ( c == minPE . i ) { clusters . remove ( c ) ; clusters . add ( c , minPE . cluster ) ; } if ( c == minPE . j ) { clusters . remove ( c ) ; } } int i = minPE . i , j = minPE . j ; for ( Iterator < ProjectedEnergy > it = projectedEnergies . iterator ( ) ; it . hasNext ( ) ; ) { ProjectedEnergy pe = it . next ( ) ; if ( pe . i == i || pe . i == j || pe . j == i || pe . j == j ) { it . remove ( ) ; } else { if ( pe . i > j ) { pe . i -= 1 ; } if ( pe . j > j ) { pe . j -= 1 ; } } } ORCLUSCluster c_ij = minPE . cluster ; for ( int c = 0 ; c < clusters . size ( ) ; c ++ ) { if ( c < i ) { projectedEnergies . add ( projectedEnergy ( relation , clusters . get ( c ) , c_ij , c , i , d_new ) ) ; } else if ( c > i ) { projectedEnergies . add ( projectedEnergy ( relation , clusters . get ( c ) , c_ij , i , c , d_new ) ) ; } } } }
Reduces the number of seeds to k_new
34,330
private ProjectedEnergy projectedEnergy ( Relation < V > relation , ORCLUSCluster c_i , ORCLUSCluster c_j , int i , int j , int dim ) { NumberVectorDistanceFunction < ? super V > distFunc = SquaredEuclideanDistanceFunction . STATIC ; ORCLUSCluster c_ij = union ( relation , c_i , c_j , dim ) ; double sum = 0. ; NumberVector c_proj = DoubleVector . wrap ( project ( c_ij , c_ij . centroid ) ) ; for ( DBIDIter iter = c_ij . objectIDs . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { NumberVector o_proj = DoubleVector . wrap ( project ( c_ij , relation . get ( iter ) . toArray ( ) ) ) ; sum += distFunc . distance ( o_proj , c_proj ) ; } sum /= c_ij . objectIDs . size ( ) ; return new ProjectedEnergy ( i , j , c_ij , sum ) ; }
Computes the projected energy of the specified clusters . The projected energy is given by the mean square distance of the points to the centroid of the union cluster c when all points in c are projected to the subspace of c .
34,331
private ORCLUSCluster union ( Relation < V > relation , ORCLUSCluster c1 , ORCLUSCluster c2 , int dim ) { ORCLUSCluster c = new ORCLUSCluster ( ) ; c . objectIDs = DBIDUtil . newHashSet ( c1 . objectIDs ) ; c . objectIDs . addDBIDs ( c2 . objectIDs ) ; c . objectIDs = DBIDUtil . newArray ( c . objectIDs ) ; if ( c . objectIDs . size ( ) > 0 ) { c . centroid = Centroid . make ( relation , c . objectIDs ) . getArrayRef ( ) ; c . basis = findBasis ( relation , c , dim ) ; } else { c . centroid = timesEquals ( plusEquals ( c1 . centroid , c2 . centroid ) , .5 ) ; c . basis = identity ( dim , c . centroid . length ) ; } return c ; }
Returns the union of the two specified clusters .
34,332
private static void initializeNNCache ( double [ ] scratch , double [ ] bestd , int [ ] besti ) { final int size = bestd . length ; Arrays . fill ( bestd , Double . POSITIVE_INFINITY ) ; Arrays . fill ( besti , - 1 ) ; for ( int x = 0 , p = 0 ; x < size ; x ++ ) { assert ( p == MatrixParadigm . triangleSize ( x ) ) ; double bestdx = Double . POSITIVE_INFINITY ; int bestix = - 1 ; for ( int y = 0 ; y < x ; y ++ , p ++ ) { final double v = scratch [ p ] ; if ( v < bestd [ y ] ) { bestd [ y ] = v ; besti [ y ] = x ; } if ( v < bestdx ) { bestdx = v ; bestix = y ; } } bestd [ x ] = bestdx ; besti [ x ] = bestix ; } }
Initialize the NN cache .
34,333
protected int findMerge ( int size , MatrixParadigm mat , double [ ] bestd , int [ ] besti , PointerHierarchyRepresentationBuilder builder ) { double mindist = Double . POSITIVE_INFINITY ; int x = - 1 , y = - 1 ; for ( int cx = 0 ; cx < size ; cx ++ ) { final int cy = besti [ cx ] ; if ( cy < 0 ) { continue ; } final double dist = bestd [ cx ] ; if ( dist <= mindist ) { mindist = dist ; x = cx ; y = cy ; } } assert ( x >= 0 && y >= 0 ) ; assert ( y < x ) ; merge ( size , mat , bestd , besti , builder , mindist , x , y ) ; return x ; }
Perform the next merge step .
34,334
protected void merge ( int size , MatrixParadigm mat , double [ ] bestd , int [ ] besti , PointerHierarchyRepresentationBuilder builder , double mindist , int x , int y ) { final DBIDArrayIter ix = mat . ix . seek ( x ) , iy = mat . iy . seek ( y ) ; if ( LOG . isDebuggingFine ( ) ) { LOG . debugFine ( "Merging: " + DBIDUtil . toString ( ix ) + " -> " + DBIDUtil . toString ( iy ) + " " + mindist ) ; } assert ( y < x ) ; builder . add ( ix , linkage . restore ( mindist , getDistanceFunction ( ) . isSquared ( ) ) , iy ) ; final int sizex = builder . getSize ( ix ) , sizey = builder . getSize ( iy ) ; builder . setSize ( iy , sizex + sizey ) ; besti [ x ] = - 1 ; updateMatrix ( size , mat . matrix , iy , bestd , besti , builder , mindist , x , y , sizex , sizey ) ; if ( besti [ y ] == x ) { findBest ( size , mat . matrix , bestd , besti , y ) ; } }
Execute the cluster merge .
34,335
private void updateCache ( int size , double [ ] scratch , double [ ] bestd , int [ ] besti , int x , int y , int j , double d ) { if ( d <= bestd [ j ] ) { bestd [ j ] = d ; besti [ j ] = y ; return ; } if ( besti [ j ] == x || besti [ j ] == y ) { findBest ( size , scratch , bestd , besti , j ) ; } }
Update the cache .
34,336
public VisualizerContext newContext ( ResultHierarchy hier , Result start ) { Collection < Relation < ? > > rels = ResultUtil . filterResults ( hier , Relation . class ) ; for ( Relation < ? > rel : rels ) { if ( samplesize == 0 ) { continue ; } if ( ! ResultUtil . filterResults ( hier , rel , SamplingResult . class ) . isEmpty ( ) ) { continue ; } if ( rel . size ( ) > samplesize ) { SamplingResult sample = new SamplingResult ( rel ) ; sample . setSample ( DBIDUtil . randomSample ( sample . getSample ( ) , samplesize , rnd ) ) ; ResultUtil . addChildResult ( rel , sample ) ; } } return new VisualizerContext ( hier , start , stylelib , factories ) ; }
Make a new visualization context
34,337
public static String getTitle ( Database db , Result result ) { List < TrackedParameter > settings = new ArrayList < > ( ) ; for ( SettingsResult sr : SettingsResult . getSettingsResults ( result ) ) { settings . addAll ( sr . getSettings ( ) ) ; } String algorithm = null ; String distance = null ; String dataset = null ; for ( TrackedParameter setting : settings ) { Parameter < ? > param = setting . getParameter ( ) ; OptionID option = param . getOptionID ( ) ; String value = param . isDefined ( ) ? param . getValueAsString ( ) : null ; if ( option . equals ( AlgorithmStep . Parameterizer . ALGORITHM_ID ) ) { algorithm = value ; } if ( option . equals ( DistanceBasedAlgorithm . DISTANCE_FUNCTION_ID ) ) { distance = value ; } if ( option . equals ( FileBasedDatabaseConnection . Parameterizer . INPUT_ID ) ) { dataset = value ; } } StringBuilder buf = new StringBuilder ( ) ; if ( algorithm != null ) { buf . append ( shortenClassname ( algorithm . split ( "," ) [ 0 ] , '.' ) ) ; } if ( distance != null ) { if ( buf . length ( ) > 0 ) { buf . append ( " using " ) ; } buf . append ( shortenClassname ( distance , '.' ) ) ; } if ( dataset != null ) { if ( buf . length ( ) > 0 ) { buf . append ( " on " ) ; } buf . append ( shortenClassname ( dataset , File . separatorChar ) ) ; } if ( buf . length ( ) > 0 ) { return buf . toString ( ) ; } return null ; }
Try to automatically generate a title for this .
34,338
protected static String shortenClassname ( String nam , char c ) { final int lastdot = nam . lastIndexOf ( c ) ; if ( lastdot >= 0 ) { nam = nam . substring ( lastdot + 1 ) ; } return nam ; }
Shorten the class name .
34,339
private static Class < ? > getRestrictionClass ( OptionID oid , final Parameter < ? > firstopt , Map < OptionID , List < Pair < Parameter < ? > , Class < ? > > > > byopt ) { Class < ? > superclass = getRestrictionClass ( firstopt ) ; for ( Pair < Parameter < ? > , Class < ? > > clinst : byopt . get ( oid ) ) { if ( clinst . getFirst ( ) instanceof ClassParameter ) { ClassParameter < ? > cls = ( ClassParameter < ? > ) clinst . getFirst ( ) ; if ( ! cls . getRestrictionClass ( ) . equals ( superclass ) && cls . getRestrictionClass ( ) . isAssignableFrom ( superclass ) ) { superclass = cls . getRestrictionClass ( ) ; } } if ( clinst . getFirst ( ) instanceof ClassListParameter ) { ClassListParameter < ? > cls = ( ClassListParameter < ? > ) clinst . getFirst ( ) ; if ( ! cls . getRestrictionClass ( ) . equals ( superclass ) && cls . getRestrictionClass ( ) . isAssignableFrom ( superclass ) ) { superclass = cls . getRestrictionClass ( ) ; } } } return superclass ; }
Get the restriction class of an option .
34,340
private static < T > ArrayList < T > sorted ( Collection < T > cls , Comparator < ? super T > c ) { ArrayList < T > sorted = new ArrayList < > ( cls ) ; sorted . sort ( c ) ; return sorted ; }
Sort a collection of classes .
34,341
protected void handleHoverEvent ( Event evt ) { if ( evt . getTarget ( ) instanceof Element ) { Element e = ( Element ) evt . getTarget ( ) ; Node next = e . getNextSibling ( ) ; if ( next instanceof Element ) { toggleTooltip ( ( Element ) next , evt . getType ( ) ) ; } else { LoggingUtil . warning ( "Tooltip sibling not found." ) ; } } else { LoggingUtil . warning ( "Got event for non-Element?!?" ) ; } }
Handle the hover events .
34,342
protected void toggleTooltip ( Element elem , String type ) { String csscls = elem . getAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; if ( SVGConstants . SVG_MOUSEOVER_EVENT_TYPE . equals ( type ) ) { if ( TOOLTIP_HIDDEN . equals ( csscls ) ) { SVGUtil . setAtt ( elem , SVGConstants . SVG_CLASS_ATTRIBUTE , TOOLTIP_VISIBLE ) ; } } else if ( SVGConstants . SVG_MOUSEOUT_EVENT_TYPE . equals ( type ) ) { if ( TOOLTIP_VISIBLE . equals ( csscls ) ) { SVGUtil . setAtt ( elem , SVGConstants . SVG_CLASS_ATTRIBUTE , TOOLTIP_HIDDEN ) ; } } else if ( SVGConstants . SVG_CLICK_EVENT_TYPE . equals ( type ) ) { if ( TOOLTIP_STICKY . equals ( csscls ) ) { SVGUtil . setAtt ( elem , SVGConstants . SVG_CLASS_ATTRIBUTE , TOOLTIP_HIDDEN ) ; } if ( TOOLTIP_HIDDEN . equals ( csscls ) || TOOLTIP_VISIBLE . equals ( csscls ) ) { SVGUtil . setAtt ( elem , SVGConstants . SVG_CLASS_ATTRIBUTE , TOOLTIP_STICKY ) ; } } }
Toggle the Tooltip of an element .
34,343
public DoubleDBIDList reverseKNNQuery ( DBIDRef id , int k ) { ModifiableDoubleDBIDList result = DBIDUtil . newDistanceDBIDList ( ) ; final Heap < MTreeSearchCandidate > pq = new UpdatableHeap < > ( ) ; pq . add ( new MTreeSearchCandidate ( 0. , getRootID ( ) , null , Double . NaN ) ) ; while ( ! pq . isEmpty ( ) ) { MTreeSearchCandidate pqNode = pq . poll ( ) ; MkAppTreeNode < O > node = getNode ( pqNode . nodeID ) ; if ( ! node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppEntry entry = node . getEntry ( i ) ; double distance = distance ( entry . getRoutingObjectID ( ) , id ) ; double minDist = ( entry . getCoveringRadius ( ) > distance ) ? 0. : distance - entry . getCoveringRadius ( ) ; double approxValue = settings . log ? FastMath . exp ( entry . approximatedValueAt ( k ) ) : entry . approximatedValueAt ( k ) ; if ( approxValue < 0 ) { approxValue = 0 ; } if ( minDist <= approxValue ) { pq . add ( new MTreeSearchCandidate ( minDist , getPageID ( entry ) , entry . getRoutingObjectID ( ) , Double . NaN ) ) ; } } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppLeafEntry entry = ( MkAppLeafEntry ) node . getEntry ( i ) ; double distance = distance ( entry . getRoutingObjectID ( ) , id ) ; double approxValue = settings . log ? FastMath . exp ( entry . approximatedValueAt ( k ) ) : entry . approximatedValueAt ( k ) ; if ( approxValue < 0 ) { approxValue = 0 ; } if ( distance <= approxValue ) { result . add ( distance , entry . getRoutingObjectID ( ) ) ; } } } } return result ; }
Performs a reverse k - nearest neighbor query for the given object ID . The query result is in ascending order to the distance to the query object .
34,344
private void leafEntryIDs ( MkAppTreeNode < O > node , ModifiableDBIDs result ) { if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppEntry entry = node . getEntry ( i ) ; result . add ( ( ( LeafEntry ) entry ) . getDBID ( ) ) ; } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppTreeNode < O > childNode = getNode ( node . getEntry ( i ) ) ; leafEntryIDs ( childNode , result ) ; } } }
Determines the ids of the leaf entries stored in the specified subtree .
34,345
private PolynomialApproximation approximateKnnDistances ( double [ ] knnDistances ) { StringBuilder msg = new StringBuilder ( ) ; int k_0 = 0 ; if ( settings . log ) { for ( int i = 0 ; i < settings . kmax ; i ++ ) { double dist = knnDistances [ i ] ; if ( dist == 0 ) { k_0 ++ ; } else { break ; } } } double [ ] x = new double [ settings . kmax - k_0 ] ; double [ ] y = new double [ settings . kmax - k_0 ] ; for ( int k = 0 ; k < settings . kmax - k_0 ; k ++ ) { if ( settings . log ) { x [ k ] = FastMath . log ( k + k_0 ) ; y [ k ] = FastMath . log ( knnDistances [ k + k_0 ] ) ; } else { x [ k ] = k + k_0 ; y [ k ] = knnDistances [ k + k_0 ] ; } } PolynomialRegression regression = new PolynomialRegression ( y , x , settings . p ) ; PolynomialApproximation approximation = new PolynomialApproximation ( regression . getEstimatedCoefficients ( ) ) ; if ( LOG . isDebugging ( ) ) { msg . append ( "approximation " ) . append ( approximation ) ; LOG . debugFine ( msg . toString ( ) ) ; } return approximation ; }
Computes the polynomial approximation of the specified knn - distances .
34,346
protected final int isLeft ( double [ ] a , double [ ] b , double [ ] o ) { final double cross = getRX ( a , o ) * getRY ( b , o ) - getRY ( a , o ) * getRX ( b , o ) ; if ( cross == 0 ) { final double dista = Math . abs ( getRX ( a , o ) ) + Math . abs ( getRY ( a , o ) ) ; final double distb = Math . abs ( getRX ( b , o ) ) + Math . abs ( getRY ( b , o ) ) ; return Double . compare ( dista , distb ) ; } return Double . compare ( cross , 0 ) ; }
Test whether a point is left of the other wrt . the origin .
34,347
private double mdist ( double [ ] a , double [ ] b ) { return Math . abs ( a [ 0 ] - b [ 0 ] ) + Math . abs ( a [ 1 ] - b [ 1 ] ) ; }
Manhattan distance .
34,348
private boolean isConvex ( double [ ] a , double [ ] b , double [ ] c ) { double area = ( b [ 0 ] - a [ 0 ] ) * factor * ( c [ 1 ] - a [ 1 ] ) - ( c [ 0 ] - a [ 0 ] ) * factor * ( b [ 1 ] - a [ 1 ] ) ; return ( - 1e-13 < area && area < 1e-13 ) ? ( mdist ( b , c ) > mdist ( a , b ) + mdist ( a , c ) ) : ( area < 0 ) ; }
Simple convexity test .
34,349
private void grahamScan ( ) { if ( points . size ( ) < 3 ) { return ; } Iterator < double [ ] > iter = points . iterator ( ) ; Stack < double [ ] > stack = new Stack < > ( ) ; final double [ ] first = iter . next ( ) ; stack . add ( first ) ; while ( iter . hasNext ( ) ) { double [ ] n = iter . next ( ) ; if ( mdist ( first , n ) > 0 ) { stack . add ( n ) ; break ; } } while ( iter . hasNext ( ) ) { double [ ] next = iter . next ( ) ; double [ ] curr = stack . pop ( ) ; double [ ] prev = stack . peek ( ) ; while ( ( stack . size ( ) > 1 ) && ( mdist ( curr , next ) == 0 || ! isConvex ( prev , curr , next ) ) ) { curr = stack . pop ( ) ; prev = stack . peek ( ) ; } stack . add ( curr ) ; stack . add ( next ) ; } points = stack ; }
The actual graham scan main loop .
34,350
public Polygon getHull ( ) { if ( ! ok ) { computeConvexHull ( ) ; } return new Polygon ( points , minmaxX . getMin ( ) , minmaxX . getMax ( ) , minmaxY . getMin ( ) , minmaxY . getMax ( ) ) ; }
Compute the convex hull and return the resulting polygon .
34,351
private static double coverRadius ( double [ ] [ ] matrix , int [ ] idx , int i ) { final int idx_i = idx [ i ] ; final double [ ] row_i = matrix [ i ] ; double m = 0 ; for ( int j = 0 ; j < row_i . length ; j ++ ) { if ( i != j && idx_i == idx [ j ] ) { final double d = row_i [ j ] ; m = d > m ? d : m ; } } return m ; }
Find the cover radius of a partition .
34,352
private static int [ ] mstPartition ( double [ ] [ ] matrix ) { final int n = matrix . length ; int [ ] edges = PrimsMinimumSpanningTree . processDense ( matrix ) ; double meanlength = thresholdLength ( matrix , edges ) ; int [ ] idx = new int [ n ] , best = new int [ n ] , sizes = new int [ n ] ; int bestsize = - 1 ; double bestlen = 0 ; for ( int omit = n - 2 ; omit > 0 ; -- omit ) { final double len = edgelength ( matrix , edges , omit ) ; if ( len < meanlength ) { continue ; } omitEdge ( edges , idx , sizes , omit ) ; int minsize = n ; for ( int i = 0 ; i < n ; i ++ ) { int j = idx [ i ] = follow ( i , idx ) ; if ( j == i && sizes [ i ] < minsize ) { minsize = sizes [ i ] ; } } if ( minsize > bestsize || ( minsize == bestsize && len > bestlen ) ) { bestsize = minsize ; bestlen = len ; System . arraycopy ( idx , 0 , best , 0 , n ) ; } } return best ; }
Partition the data using the minimu spanning tree .
34,353
private static double thresholdLength ( double [ ] [ ] matrix , int [ ] edges ) { double [ ] lengths = new double [ edges . length >> 1 ] ; for ( int i = 0 , e = edges . length - 1 ; i < e ; i += 2 ) { lengths [ i >> 1 ] = matrix [ edges [ i ] ] [ edges [ i + 1 ] ] ; } Arrays . sort ( lengths ) ; final int pos = ( lengths . length >> 1 ) ; return lengths [ pos ] ; }
Choose the threshold length of edges to consider omittig .
34,354
private static double edgelength ( double [ ] [ ] matrix , int [ ] edges , int i ) { i <<= 1 ; return matrix [ edges [ i ] ] [ edges [ i + 1 ] ] ; }
Length of edge i .
34,355
private static void omitEdge ( int [ ] edges , int [ ] idx , int [ ] sizes , int omit ) { for ( int i = 0 ; i < idx . length ; i ++ ) { idx [ i ] = i ; } Arrays . fill ( sizes , 1 ) ; for ( int i = 0 , j = 0 , e = edges . length - 1 ; j < e ; i ++ , j += 2 ) { if ( i == omit ) { continue ; } int ea = edges [ j + 1 ] , eb = edges [ j ] ; if ( eb < ea ) { int tmp = eb ; eb = ea ; ea = tmp ; } final int pa = follow ( ea , idx ) , pb = follow ( eb , idx ) ; assert ( pa != pb ) : "Must be disjoint - MST inconsistent." ; sizes [ idx [ pa ] ] += sizes [ idx [ pb ] ] ; idx [ pb ] = idx [ pa ] ; } }
Partition the data by omitting one edge .
34,356
private static int follow ( int i , int [ ] partitions ) { int next = partitions [ i ] , tmp ; while ( i != next ) { tmp = next ; next = partitions [ i ] = partitions [ next ] ; i = tmp ; } return i ; }
Union - find with simple path compression .
34,357
private static void computeCentroid ( double [ ] centroid , Relation < ? extends NumberVector > relation , DBIDs ids ) { Arrays . fill ( centroid , 0 ) ; int dim = centroid . length ; for ( DBIDIter it = ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { NumberVector v = relation . get ( it ) ; for ( int i = 0 ; i < dim ; i ++ ) { centroid [ i ] += v . doubleValue ( i ) ; } } timesEquals ( centroid , 1. / ids . size ( ) ) ; }
Recompute the centroid of a set .
34,358
public static < O > DistanceQuery < O > getDistanceQuery ( Database database , DistanceFunction < ? super O > distanceFunction , Object ... hints ) { final Relation < O > objectQuery = database . getRelation ( distanceFunction . getInputTypeRestriction ( ) , hints ) ; return database . getDistanceQuery ( objectQuery , distanceFunction , hints ) ; }
Get a distance query for a given distance function automatically choosing a relation .
34,359
public static < O > SimilarityQuery < O > getSimilarityQuery ( Database database , SimilarityFunction < ? super O > similarityFunction , Object ... hints ) { final Relation < O > objectQuery = database . getRelation ( similarityFunction . getInputTypeRestriction ( ) , hints ) ; return database . getSimilarityQuery ( objectQuery , similarityFunction , hints ) ; }
Get a similarity query automatically choosing a relation .
34,360
public static < O > RKNNQuery < O > getRKNNQuery ( Relation < O > relation , DistanceFunction < ? super O > distanceFunction , Object ... hints ) { final DistanceQuery < O > distanceQuery = relation . getDistanceQuery ( distanceFunction , hints ) ; return relation . getRKNNQuery ( distanceQuery , hints ) ; }
Get a rKNN query object for the given distance function .
34,361
public static < O > RangeQuery < O > getLinearScanSimilarityRangeQuery ( SimilarityQuery < O > simQuery ) { if ( simQuery instanceof PrimitiveSimilarityQuery ) { final PrimitiveSimilarityQuery < O > pdq = ( PrimitiveSimilarityQuery < O > ) simQuery ; return new LinearScanPrimitiveSimilarityRangeQuery < > ( pdq ) ; } return new LinearScanSimilarityRangeQuery < > ( simQuery ) ; }
Get a linear scan query for the given similarity query .
34,362
protected static void register ( Class < ? > parent , String cname ) { Entry e = data . get ( parent ) ; if ( e == null ) { data . put ( parent , e = new Entry ( ) ) ; } e . addName ( cname ) ; }
Register a class with the registry .
34,363
protected static void register ( Class < ? > parent , Class < ? > clazz ) { Entry e = data . get ( parent ) ; if ( e == null ) { data . put ( parent , e = new Entry ( ) ) ; } final String cname = clazz . getCanonicalName ( ) ; e . addHit ( cname , clazz ) ; if ( clazz . isAnnotationPresent ( Alias . class ) ) { Alias aliases = clazz . getAnnotation ( Alias . class ) ; for ( String alias : aliases . value ( ) ) { e . addAlias ( alias , cname ) ; } } }
Register a class in the registry .
34,364
protected static void registerAlias ( Class < ? > parent , String alias , String cname ) { Entry e = data . get ( parent ) ; assert ( e != null ) ; e . addAlias ( alias , cname ) ; }
Register a class alias with the registry .
34,365
private static Class < ? > tryLoadClass ( String value ) { try { return CLASSLOADER . loadClass ( value ) ; } catch ( ClassNotFoundException e ) { return null ; } }
Attempt to load a class
34,366
public static List < Class < ? > > findAllImplementations ( Class < ? > restrictionClass ) { if ( restrictionClass == null ) { return Collections . emptyList ( ) ; } if ( ! contains ( restrictionClass ) ) { ELKIServiceLoader . load ( restrictionClass ) ; ELKIServiceScanner . load ( restrictionClass ) ; } Entry e = data . get ( restrictionClass ) ; if ( e == null ) { return Collections . emptyList ( ) ; } ArrayList < Class < ? > > ret = new ArrayList < > ( e . len ) ; for ( int pos = 0 ; pos < e . len ; pos ++ ) { Class < ? > c = e . clazzes [ pos ] ; if ( c == null ) { c = tryLoadClass ( e . names [ pos ] ) ; if ( c == null ) { LOG . warning ( "Failed to load class " + e . names [ pos ] + " for interface " + restrictionClass . getName ( ) ) ; c = FAILED_LOAD ; } e . clazzes [ pos ] = c ; } if ( c == FAILED_LOAD ) { continue ; } if ( ! ret . contains ( c ) ) { ret . add ( c ) ; } } return ret ; }
Find all implementations of a particular interface .
34,367
public static List < Class < ? > > findAllImplementations ( Class < ? > c , boolean everything , boolean parameterizable ) { if ( c == null ) { return Collections . emptyList ( ) ; } if ( ! everything && parameterizable ) { return findAllImplementations ( c ) ; } List < Class < ? > > known = findAllImplementations ( c ) ; HashSet < Class < ? > > dupes = new HashSet < > ( known ) ; for ( Iterator < Class < ? > > iter = ELKIServiceScanner . nonindexedClasses ( ) ; iter . hasNext ( ) ; ) { Class < ? > cls = iter . next ( ) ; if ( dupes . contains ( cls ) ) { continue ; } if ( ! everything && ( Modifier . isInterface ( cls . getModifiers ( ) ) || Modifier . isAbstract ( cls . getModifiers ( ) ) || Modifier . isPrivate ( cls . getModifiers ( ) ) ) ) { continue ; } if ( ! c . isAssignableFrom ( cls ) ) { continue ; } if ( parameterizable ) { boolean instantiable = false ; try { instantiable = cls . getConstructor ( ) != null ; } catch ( Exception | Error e ) { } try { instantiable = instantiable || ClassGenericsUtil . getParameterizer ( cls ) != null ; } catch ( Exception | Error e ) { } if ( ! instantiable ) { continue ; } } known . add ( cls ) ; dupes . add ( cls ) ; } return known ; }
Find all implementations of a given class in the classpath .
34,368
private static < C > Class < ? > tryAlternateNames ( Class < ? super C > restrictionClass , String value , Entry e ) { StringBuilder buf = new StringBuilder ( value . length ( ) + 100 ) ; Class < ? > clazz = tryLoadClass ( buf . append ( value ) . append ( FACTORY_POSTFIX ) . toString ( ) ) ; if ( clazz != null ) { return clazz ; } clazz = tryLoadClass ( value ) ; if ( clazz != null ) { return clazz ; } buf . setLength ( 0 ) ; clazz = tryLoadClass ( buf . append ( restrictionClass . getPackage ( ) . getName ( ) ) . append ( '.' ) . append ( value ) . append ( FACTORY_POSTFIX ) . toString ( ) ) ; if ( clazz != null ) { return clazz ; } buf . setLength ( buf . length ( ) - FACTORY_POSTFIX . length ( ) ) ; String value2 = buf . toString ( ) ; clazz = tryLoadClass ( value2 ) ; if ( clazz != null ) { return clazz ; } if ( e != null && e . aliaslen > 0 ) { for ( int i = 0 ; i < e . aliaslen ; i += 2 ) { if ( e . aliases [ i ] . equalsIgnoreCase ( value ) || e . aliases [ i ] . equalsIgnoreCase ( value2 ) ) { return findImplementation ( restrictionClass , e . aliases [ ++ i ] ) ; } } } return null ; }
Try loading alternative names .
34,369
protected Element setupCanvas ( ) { final double margin = context . getStyleLibrary ( ) . getSize ( StyleLibrary . MARGIN ) ; this . layer = setupCanvas ( svgp , this . proj , margin , getWidth ( ) , getHeight ( ) ) ; return layer ; }
Setup our canvas .
34,370
protected SimpleTypeInformation < ? > convertedType ( SimpleTypeInformation < ? > in , NumberVector . Factory < V > factory ) { return new VectorFieldTypeInformation < > ( factory , tdim ) ; }
Get the output type from the input type after conversion .
34,371
protected < O > Map < O , IntList > partition ( List < ? extends O > classcolumn ) { Map < O , IntList > classes = new HashMap < > ( ) ; Iterator < ? extends O > iter = classcolumn . iterator ( ) ; for ( int i = 0 ; iter . hasNext ( ) ; i ++ ) { O lbl = iter . next ( ) ; IntList ids = classes . get ( lbl ) ; if ( ids == null ) { ids = new IntArrayList ( ) ; classes . put ( lbl , ids ) ; } ids . add ( i ) ; } return classes ; }
Partition the bundle based on the class label .
34,372
public Curve makeCurve ( ) { Curve c = new Curve ( curves . size ( ) ) ; curves . add ( c ) ; return c ; }
Make a new curve .
34,373
public void publish ( String message , Level level ) { try { publish ( new LogRecord ( level , message ) ) ; } catch ( BadLocationException e ) { throw new RuntimeException ( "Error writing a log-like message." , e ) ; } }
Print a message as if it were logged without going through the full logger .
34,374
protected synchronized void publish ( LogRecord record ) throws BadLocationException { final Formatter fmt ; final Style style ; if ( record . getLevel ( ) . intValue ( ) >= Level . WARNING . intValue ( ) ) { fmt = errformat ; style = errStyle ; } else if ( record . getLevel ( ) . intValue ( ) <= Level . FINE . intValue ( ) ) { fmt = debugformat ; style = dbgStyle ; } else { fmt = msgformat ; style = msgStyle ; } final String m ; m = fmt . format ( record ) ; StyledDocument doc = getStyledDocument ( ) ; if ( record instanceof ProgressLogRecord ) { if ( lastNewlinePos < doc . getLength ( ) ) { doc . remove ( lastNewlinePos , doc . getLength ( ) - lastNewlinePos ) ; } } else { if ( lastNewlinePos < doc . getLength ( ) ) { doc . insertString ( doc . getLength ( ) , "\n" , style ) ; lastNewlinePos = doc . getLength ( ) ; } } int tail = tailingNonNewline ( m , 0 , m . length ( ) ) ; int headlen = m . length ( ) - tail ; if ( headlen > 0 ) { String pre = m . substring ( 0 , headlen ) ; doc . insertString ( doc . getLength ( ) , pre , style ) ; } lastNewlinePos = doc . getLength ( ) ; if ( tail > 0 ) { String post = m . substring ( m . length ( ) - tail ) ; doc . insertString ( lastNewlinePos , post , style ) ; } }
Publish a log record to the logging pane .
34,375
protected void optimizeSNE ( AffinityMatrix pij , double [ ] [ ] sol ) { final int size = pij . size ( ) ; if ( size * 3L * dim > 0x7FFF_FFFAL ) { throw new AbortException ( "Memory exceeds Java array size limit." ) ; } double [ ] meta = new double [ size * 3 * dim ] ; final int dim3 = dim * 3 ; for ( int off = 2 * dim ; off < meta . length ; off += dim3 ) { Arrays . fill ( meta , off , off + dim , 1. ) ; } double [ ] [ ] qij = new double [ size ] [ size ] ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Iterative Optimization" , iterations , LOG ) : null ; Duration timer = LOG . isStatistics ( ) ? LOG . newDuration ( this . getClass ( ) . getName ( ) + ".runtime.optimization" ) . begin ( ) : null ; for ( int it = 0 ; it < iterations ; it ++ ) { double qij_sum = computeQij ( qij , sol ) ; computeGradient ( pij , qij , 1. / qij_sum , sol , meta ) ; updateSolution ( sol , meta , it ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; if ( timer != null ) { LOG . statistics ( timer . end ( ) ) ; } }
Perform the actual tSNE optimization .
34,376
protected double computeQij ( double [ ] [ ] qij , double [ ] [ ] solution ) { double qij_sum = 0 ; for ( int i = 1 ; i < qij . length ; i ++ ) { final double [ ] qij_i = qij [ i ] , vi = solution [ i ] ; for ( int j = 0 ; j < i ; j ++ ) { qij_sum += qij_i [ j ] = qij [ j ] [ i ] = MathUtil . exp ( - sqDist ( vi , solution [ j ] ) ) ; } } return qij_sum * 2 ; }
Compute the qij of the solution and the sum .
34,377
protected void computeGradient ( AffinityMatrix pij , double [ ] [ ] qij , double qij_isum , double [ ] [ ] sol , double [ ] meta ) { final int dim3 = dim * 3 ; int size = pij . size ( ) ; for ( int i = 0 , off = 0 ; i < size ; i ++ , off += dim3 ) { final double [ ] sol_i = sol [ i ] , qij_i = qij [ i ] ; Arrays . fill ( meta , off , off + dim , 0. ) ; for ( int j = 0 ; j < size ; j ++ ) { if ( i == j ) { continue ; } final double [ ] sol_j = sol [ j ] ; final double qij_ij = qij_i [ j ] ; final double q = MathUtil . max ( qij_ij * qij_isum , MIN_QIJ ) ; double a = 4 * ( pij . get ( i , j ) - q ) ; for ( int k = 0 ; k < dim ; k ++ ) { meta [ off + k ] += a * ( sol_i [ k ] - sol_j [ k ] ) ; } } } }
Compute the gradients .
34,378
public OutlierResult run ( Database database , Relation < O > relation ) { DistanceFunction < ? super O > df = clusterer . getDistanceFunction ( ) ; DistanceQuery < O > dq = database . getDistanceQuery ( relation , df ) ; Clustering < ? > c = clusterer . run ( database , relation ) ; WritableDoubleDataStore scores = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_DB ) ; DoubleMinMax mm = new DoubleMinMax ( ) ; @ SuppressWarnings ( "unchecked" ) NumberVector . Factory < O > factory = ( NumberVector . Factory < O > ) RelationUtil . assumeVectorField ( relation ) . getFactory ( ) ; List < ? extends Cluster < ? > > clusters = c . getAllClusters ( ) ; for ( Cluster < ? > cluster : clusters ) { O mean = factory . newNumberVector ( ModelUtil . getPrototype ( cluster . getModel ( ) , relation ) ) ; for ( DBIDIter iter = cluster . getIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double dist = dq . distance ( mean , iter ) ; scores . put ( iter , dist ) ; mm . put ( dist ) ; } } DoubleRelation scoreResult = new MaterializedDoubleRelation ( "KMeans outlier scores" , "kmeans-outlier" , scores , relation . getDBIDs ( ) ) ; OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta ( mm . getMin ( ) , mm . getMax ( ) , 0. , Double . POSITIVE_INFINITY , 0. ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Run the outlier detection algorithm .
34,379
public FittingFunctionResult eval ( double x , double [ ] params ) { final int len = params . length ; assert ( len % 3 ) == 0 ; double y = 0.0 ; double [ ] gradients = new double [ len ] ; for ( int i = 2 ; i < params . length ; i += 3 ) { double stdpar = ( x - params [ i - 2 ] ) / params [ i - 1 ] ; double e = FastMath . exp ( - .5 * stdpar * stdpar ) ; double localy = params [ i ] / ( params [ i - 1 ] * MathUtil . SQRTTWOPI ) * e ; y += localy ; gradients [ i - 2 ] = localy * stdpar ; gradients [ i - 1 ] = ( stdpar * stdpar - 1.0 ) * localy ; gradients [ i ] = e / ( params [ i - 1 ] * MathUtil . SQRTTWOPI ) ; } return new FittingFunctionResult ( y , gradients ) ; }
Compute the mixture of Gaussians at the given position
34,380
private void showVisualization ( VisualizerContext context , SimilarityMatrixVisualizer factory , VisualizationTask task ) { VisualizationPlot plot = new VisualizationPlot ( ) ; Visualization vis = factory . makeVisualization ( context , task , plot , 1.0 , 1.0 , null ) ; plot . getRoot ( ) . appendChild ( vis . getLayer ( ) ) ; plot . getRoot ( ) . setAttribute ( SVGConstants . SVG_WIDTH_ATTRIBUTE , "20cm" ) ; plot . getRoot ( ) . setAttribute ( SVGConstants . SVG_HEIGHT_ATTRIBUTE , "20cm" ) ; plot . getRoot ( ) . setAttribute ( SVGConstants . SVG_VIEW_BOX_ATTRIBUTE , "0 0 1 1" ) ; plot . updateStyleElement ( ) ; ( new SimpleSVGViewer ( ) ) . setPlot ( plot ) ; }
Show a single visualization .
34,381
public void put ( int [ ] data ) { final int l = data . length ; for ( int i = 0 ; i < l ; i ++ ) { put ( data [ i ] ) ; } }
Process a whole array of int values .
34,382
public OutlierResult run ( Database database , Relation < O > rel ) { final DBIDs ids = rel . getDBIDs ( ) ; LOG . verbose ( "Running kNN preprocessor." ) ; KNNQuery < O > knnq = DatabaseUtil . precomputedKNNQuery ( database , rel , getDistanceFunction ( ) , kmax + 1 ) ; WritableDataStore < double [ ] > densities = DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , double [ ] . class ) ; estimateDensities ( rel , knnq , ids , densities ) ; WritableDoubleDataStore kofs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_DB ) ; DoubleMinMax minmax = new DoubleMinMax ( ) ; computeOutlierScores ( knnq , ids , densities , kofs , minmax ) ; DoubleRelation scoreres = new MaterializedDoubleRelation ( "Kernel Density Estimation Outlier Scores" , "kdeos-outlier" , kofs , ids ) ; OutlierScoreMeta meta = new ProbabilisticOutlierScore ( minmax . getMin ( ) , minmax . getMax ( ) ) ; return new OutlierResult ( meta , scoreres ) ; }
Run the KDEOS outlier detection algorithm .
34,383
protected void estimateDensities ( Relation < O > rel , KNNQuery < O > knnq , final DBIDs ids , WritableDataStore < double [ ] > densities ) { final int dim = dimensionality ( rel ) ; final int knum = kmax + 1 - kmin ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { densities . put ( iter , new double [ knum ] ) ; } FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing densities" , ids . size ( ) , LOG ) : null ; double iminbw = ( minBandwidth > 0. ) ? 1. / ( minBandwidth * scale ) : Double . POSITIVE_INFINITY ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { KNNList neighbors = knnq . getKNNForDBID ( iter , kmax + 1 ) ; int k = 1 , idx = 0 ; double sum = 0. ; for ( DoubleDBIDListIter kneighbor = neighbors . iter ( ) ; k <= kmax && kneighbor . valid ( ) ; kneighbor . advance ( ) , k ++ ) { sum += kneighbor . doubleValue ( ) ; if ( k < kmin ) { continue ; } final double ibw = Math . min ( k / ( sum * scale ) , iminbw ) ; final double sca = MathUtil . powi ( ibw , dim ) ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { final double dens ; if ( sca < Double . POSITIVE_INFINITY ) { dens = sca * kernel . density ( neighbor . doubleValue ( ) * ibw ) ; } else { dens = neighbor . doubleValue ( ) == 0. ? 1. : 0. ; } densities . get ( neighbor ) [ idx ] += dens ; if ( dens < CUTOFF ) { break ; } } ++ idx ; } LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
Perform the kernel density estimation step .
34,384
private int dimensionality ( Relation < O > rel ) { if ( idim >= 0 ) { return idim ; } @ SuppressWarnings ( "unchecked" ) final Relation < NumberVector > frel = ( Relation < NumberVector > ) rel ; int dim = RelationUtil . dimensionality ( frel ) ; if ( dim < 1 ) { throw new AbortException ( "When using KDEOS with non-vectorspace data, the intrinsic dimensionality parameter must be set!" ) ; } return dim ; }
Ugly hack to allow using this implementation without having a well - defined dimensionality .
34,385
protected void computeOutlierScores ( KNNQuery < O > knnq , final DBIDs ids , WritableDataStore < double [ ] > densities , WritableDoubleDataStore kdeos , DoubleMinMax minmax ) { final int knum = kmax + 1 - kmin ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing KDEOS scores" , ids . size ( ) , LOG ) : null ; double [ ] [ ] scratch = new double [ knum ] [ kmax + 5 ] ; MeanVariance mv = new MeanVariance ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double [ ] dens = densities . get ( iter ) ; KNNList neighbors = knnq . getKNNForDBID ( iter , kmax + 1 ) ; if ( scratch [ 0 ] . length < neighbors . size ( ) ) { scratch = new double [ knum ] [ neighbors . size ( ) + 5 ] ; } { int i = 0 ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) , i ++ ) { double [ ] ndens = densities . get ( neighbor ) ; for ( int k = 0 ; k < knum ; k ++ ) { scratch [ k ] [ i ] = ndens [ k ] ; } } assert ( i == neighbors . size ( ) ) ; } double score = 0. ; for ( int i = 0 ; i < knum ; i ++ ) { mv . reset ( ) ; for ( int j = 0 ; j < neighbors . size ( ) ; j ++ ) { mv . put ( scratch [ i ] [ j ] ) ; } final double mean = mv . getMean ( ) , stddev = mv . getSampleStddev ( ) ; if ( stddev > 0. ) { score += ( mean - dens [ i ] ) / stddev ; } } score /= knum ; score = NormalDistribution . standardNormalCDF ( score ) ; minmax . put ( score ) ; kdeos . put ( iter , score ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
Compute the final KDEOS scores .
34,386
public Clustering < Model > run ( Relation < V > rel ) { fulldatabase = preprocess ( rel ) ; processedIDs = DBIDUtil . newHashSet ( fulldatabase . size ( ) ) ; noiseDim = dimensionality ( fulldatabase ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "CASH Clustering" , fulldatabase . size ( ) , LOG ) : null ; Clustering < Model > result = doRun ( fulldatabase , progress ) ; LOG . ensureCompleted ( progress ) ; if ( LOG . isVerbose ( ) ) { StringBuilder msg = new StringBuilder ( 1000 ) ; for ( Cluster < Model > c : result . getAllClusters ( ) ) { if ( c . getModel ( ) instanceof LinearEquationModel ) { LinearEquationModel s = ( LinearEquationModel ) c . getModel ( ) ; msg . append ( "\n Cluster: Dim: " + s . getLes ( ) . subspacedim ( ) + " size: " + c . size ( ) ) ; } else { msg . append ( "\n Cluster: " + c . getModel ( ) . getClass ( ) . getName ( ) + " size: " + c . size ( ) ) ; } } LOG . verbose ( msg . toString ( ) ) ; } return result ; }
Run CASH on the relation .
34,387
private Relation < ParameterizationFunction > preprocess ( Relation < V > vrel ) { DBIDs ids = vrel . getDBIDs ( ) ; SimpleTypeInformation < ParameterizationFunction > type = new SimpleTypeInformation < > ( ParameterizationFunction . class ) ; WritableDataStore < ParameterizationFunction > prep = DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT , ParameterizationFunction . class ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { prep . put ( iter , new ParameterizationFunction ( vrel . get ( iter ) ) ) ; } return new MaterializedRelation < > ( type , ids , null , prep ) ; }
Preprocess the dataset precomputing the parameterization functions .
34,388
private void initHeap ( ObjectHeap < CASHInterval > heap , Relation < ParameterizationFunction > relation , int dim , DBIDs ids ) { CASHIntervalSplit split = new CASHIntervalSplit ( relation , minPts ) ; double [ ] minMax = determineMinMaxDistance ( relation , dim ) ; double d_min = minMax [ 0 ] , d_max = minMax [ 1 ] ; double dIntervalLength = d_max - d_min ; int numDIntervals = ( int ) FastMath . ceil ( dIntervalLength / jitter ) ; double dIntervalSize = dIntervalLength / numDIntervals ; double [ ] d_mins = new double [ numDIntervals ] , d_maxs = new double [ numDIntervals ] ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( new StringBuilder ( ) . append ( "d_min " ) . append ( d_min ) . append ( "\nd_max " ) . append ( d_max ) . append ( "\nnumDIntervals " ) . append ( numDIntervals ) . append ( "\ndIntervalSize " ) . append ( dIntervalSize ) . toString ( ) ) ; } double [ ] alphaMin = new double [ dim - 1 ] , alphaMax = new double [ dim - 1 ] ; Arrays . fill ( alphaMax , Math . PI ) ; for ( int i = 0 ; i < numDIntervals ; i ++ ) { d_mins [ i ] = ( i == 0 ) ? d_min : d_maxs [ i - 1 ] ; d_maxs [ i ] = ( i < numDIntervals - 1 ) ? d_mins [ i ] + dIntervalSize : d_max - d_mins [ i ] ; HyperBoundingBox alphaInterval = new HyperBoundingBox ( alphaMin , alphaMax ) ; ModifiableDBIDs intervalIDs = split . determineIDs ( ids , alphaInterval , d_mins [ i ] , d_maxs [ i ] ) ; if ( intervalIDs != null && intervalIDs . size ( ) >= minPts ) { heap . add ( new CASHInterval ( alphaMin , alphaMax , split , intervalIDs , - 1 , 0 , d_mins [ i ] , d_maxs [ i ] ) ) ; } } if ( LOG . isDebuggingFiner ( ) ) { LOG . debugFiner ( new StringBuilder ( ) . append ( "heap.size: " ) . append ( heap . size ( ) ) . toString ( ) ) ; } }
Initializes the heap with the root intervals .
34,389
private MaterializedRelation < ParameterizationFunction > buildDB ( int dim , double [ ] [ ] basis , DBIDs ids , Relation < ParameterizationFunction > relation ) { ProxyDatabase proxy = new ProxyDatabase ( ids ) ; SimpleTypeInformation < ParameterizationFunction > type = new SimpleTypeInformation < > ( ParameterizationFunction . class ) ; WritableDataStore < ParameterizationFunction > prep = DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT , ParameterizationFunction . class ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { prep . put ( iter , project ( basis , relation . get ( iter ) ) ) ; } if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "db fuer dim " + ( dim - 1 ) + ": " + ids . size ( ) ) ; } MaterializedRelation < ParameterizationFunction > prel = new MaterializedRelation < > ( type , ids , null , prep ) ; proxy . addRelation ( prel ) ; return prel ; }
Builds a dim - 1 dimensional database where the objects are projected into the specified subspace .
34,390
private ParameterizationFunction project ( double [ ] [ ] basis , ParameterizationFunction f ) { double [ ] m = transposeTimes ( basis , f . getColumnVector ( ) ) ; return new ParameterizationFunction ( DoubleVector . wrap ( m ) ) ; }
Projects the specified parameterization function into the subspace described by the given basis .
34,391
private double [ ] [ ] determineBasis ( double [ ] alpha ) { final int dim = alpha . length ; double [ ] nn = new double [ dim + 1 ] ; for ( int i = 0 ; i < nn . length ; i ++ ) { double alpha_i = i == alpha . length ? 0 : alpha [ i ] ; nn [ i ] = ParameterizationFunction . sinusProduct ( 0 , i , alpha ) * FastMath . cos ( alpha_i ) ; } timesEquals ( nn , 1. / euclideanLength ( nn ) ) ; double [ ] [ ] basis = new double [ dim ] [ ] ; int found = 0 ; for ( int i = 0 ; i < nn . length && found < dim ; i ++ ) { final double [ ] e_i = new double [ nn . length ] ; e_i [ i ] = 1.0 ; minusTimesEquals ( e_i , nn , scalarProduct ( e_i , nn ) ) ; double len = euclideanLength ( e_i ) ; for ( int j = 0 ; j < found ; j ++ ) { if ( len < 1e-9 ) { break ; } minusTimesEquals ( e_i , basis [ j ] , scalarProduct ( e_i , basis [ j ] ) ) ; len = euclideanLength ( e_i ) ; } if ( len < 1e-9 ) { continue ; } timesEquals ( e_i , 1. / len ) ; basis [ found ++ ] = e_i ; } if ( found < dim ) { for ( int i = found ; i < dim ; i ++ ) { basis [ i ] = new double [ nn . length ] ; } } return transpose ( basis ) ; }
Determines a basis defining a subspace described by the specified alpha values .
34,392
private CASHInterval determineNextIntervalAtMaxLevel ( ObjectHeap < CASHInterval > heap ) { CASHInterval next = doDetermineNextIntervalAtMaxLevel ( heap ) ; while ( next == null ) { if ( heap . isEmpty ( ) ) { return null ; } next = doDetermineNextIntervalAtMaxLevel ( heap ) ; } return next ; }
Determines the next best interval at maximum level i . e . the next interval containing the most unprocessed objects .
34,393
private CASHInterval doDetermineNextIntervalAtMaxLevel ( ObjectHeap < CASHInterval > heap ) { CASHInterval interval = heap . poll ( ) ; int dim = interval . getDimensionality ( ) ; while ( true ) { if ( interval . getLevel ( ) >= maxLevel && interval . getMaxSplitDimension ( ) == ( dim - 1 ) ) { return interval ; } if ( heap . size ( ) % 10000 == 0 && LOG . isVerbose ( ) ) { LOG . verbose ( "heap size " + heap . size ( ) ) ; } if ( heap . size ( ) >= 40000 ) { LOG . warning ( "Heap size > 40.000! Stopping." ) ; heap . clear ( ) ; return null ; } if ( LOG . isDebuggingFiner ( ) ) { LOG . debugFiner ( "split " + interval . toString ( ) + " " + interval . getLevel ( ) + "-" + interval . getMaxSplitDimension ( ) ) ; } interval . split ( ) ; if ( ! interval . hasChildren ( ) ) { return null ; } CASHInterval bestInterval ; if ( interval . getLeftChild ( ) != null && interval . getRightChild ( ) != null ) { int comp = interval . getLeftChild ( ) . compareTo ( interval . getRightChild ( ) ) ; if ( comp < 0 ) { bestInterval = interval . getRightChild ( ) ; heap . add ( interval . getLeftChild ( ) ) ; } else { bestInterval = interval . getLeftChild ( ) ; heap . add ( interval . getRightChild ( ) ) ; } } else if ( interval . getLeftChild ( ) == null ) { bestInterval = interval . getRightChild ( ) ; } else { bestInterval = interval . getLeftChild ( ) ; } interval = bestInterval ; } }
Recursive helper method to determine the next best interval at maximum level i . e . the next interval containing the most unprocessed objects
34,394
private double [ ] determineMinMaxDistance ( Relation < ParameterizationFunction > relation , int dimensionality ) { double [ ] min = new double [ dimensionality - 1 ] ; double [ ] max = new double [ dimensionality - 1 ] ; Arrays . fill ( max , Math . PI ) ; HyperBoundingBox box = new HyperBoundingBox ( min , max ) ; double d_min = Double . POSITIVE_INFINITY , d_max = Double . NEGATIVE_INFINITY ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { ParameterizationFunction f = relation . get ( iditer ) ; HyperBoundingBox minMax = f . determineAlphaMinMax ( box ) ; double f_min = f . function ( SpatialUtil . getMin ( minMax ) ) ; double f_max = f . function ( SpatialUtil . getMax ( minMax ) ) ; d_min = Math . min ( d_min , f_min ) ; d_max = Math . max ( d_max , f_max ) ; } return new double [ ] { d_min , d_max } ; }
Determines the minimum and maximum function value of all parameterization functions stored in the specified database .
34,395
public HistogramResult run ( Database database , Relation < O > relation ) { final DistanceQuery < O > distanceQuery = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; final KNNQuery < O > knnQuery = database . getKNNQuery ( distanceQuery , relation . size ( ) ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Preprocessing clusters..." ) ; } Collection < Cluster < Model > > split = ( new ByLabelOrAllInOneClustering ( ) ) . run ( database ) . getAllClusters ( ) ; DoubleHistogram hist = new DoubleHistogram ( numbins , 0.0 , 1.0 ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Processing points..." ) ; } FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing ROC AUC values" , relation . size ( ) , LOG ) : null ; ROCEvaluation roc = new ROCEvaluation ( ) ; MeanVariance mv = new MeanVariance ( ) ; for ( Cluster < ? > clus : split ) { for ( DBIDIter iter = clus . getIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { KNNList knn = knnQuery . getKNNForDBID ( iter , relation . size ( ) ) ; double result = EvaluateClustering . evaluateRanking ( roc , clus , knn ) ; mv . put ( result ) ; hist . increment ( result , 1. / relation . size ( ) ) ; LOG . incrementProcessed ( progress ) ; } } LOG . ensureCompleted ( progress ) ; Collection < double [ ] > res = new ArrayList < > ( relation . size ( ) ) ; for ( DoubleHistogram . Iter iter = hist . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { res . add ( new double [ ] { iter . getCenter ( ) , iter . getValue ( ) } ) ; } HistogramResult result = new HistogramResult ( "Ranking Quality Histogram" , "ranking-histogram" , res ) ; result . addHeader ( "Mean: " + mv . getMean ( ) + " Variance: " + mv . getSampleVariance ( ) ) ; return result ; }
Process a database
34,396
public Clustering < M > run ( Database database , Relation < V > relation ) { if ( relation . size ( ) == 0 ) { throw new IllegalArgumentException ( "database empty: must contain elements" ) ; } List < ? extends EMClusterModel < M > > models = mfactory . buildInitialModels ( database , relation , k , SquaredEuclideanDistanceFunction . STATIC ) ; WritableDataStore < double [ ] > probClusterIGivenX = DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_SORTED , double [ ] . class ) ; double loglikelihood = assignProbabilitiesToInstances ( relation , models , probClusterIGivenX ) ; DoubleStatistic likestat = LOG . isStatistics ( ) ? new DoubleStatistic ( this . getClass ( ) . getName ( ) + ".loglikelihood" ) : null ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( likestat . setDouble ( loglikelihood ) ) ; } int it = 0 , lastimprovement = 0 ; double bestloglikelihood = loglikelihood ; for ( ++ it ; it < maxiter || maxiter < 0 ; it ++ ) { final double oldloglikelihood = loglikelihood ; recomputeCovarianceMatrices ( relation , probClusterIGivenX , models , prior ) ; loglikelihood = assignProbabilitiesToInstances ( relation , models , probClusterIGivenX ) ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( likestat . setDouble ( loglikelihood ) ) ; } if ( loglikelihood - bestloglikelihood > delta ) { lastimprovement = it ; bestloglikelihood = loglikelihood ; } if ( Math . abs ( loglikelihood - oldloglikelihood ) <= delta || lastimprovement < it >> 1 ) { break ; } } if ( LOG . isStatistics ( ) ) { LOG . statistics ( new LongStatistic ( KEY + ".iterations" , it ) ) ; } List < ModifiableDBIDs > hardClusters = new ArrayList < > ( k ) ; for ( int i = 0 ; i < k ; i ++ ) { hardClusters . add ( DBIDUtil . newArray ( ) ) ; } for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { hardClusters . get ( argmax ( probClusterIGivenX . get ( iditer ) ) ) . add ( iditer ) ; } Clustering < M > result = new Clustering < > ( "EM Clustering" , "em-clustering" ) ; for ( int i = 0 ; i < k ; i ++ ) { result . addToplevelCluster ( new Cluster < > ( hardClusters . get ( i ) , models . get ( i ) . finalizeCluster ( ) ) ) ; } if ( isSoft ( ) ) { result . addChildResult ( new MaterializedRelation < > ( "cluster assignments" , "em-soft-score" , SOFT_TYPE , probClusterIGivenX , relation . getDBIDs ( ) ) ) ; } else { probClusterIGivenX . destroy ( ) ; } return result ; }
Performs the EM clustering algorithm on the given database .
34,397
public static void recomputeCovarianceMatrices ( Relation < ? extends NumberVector > relation , WritableDataStore < double [ ] > probClusterIGivenX , List < ? extends EMClusterModel < ? > > models , double prior ) { final int k = models . size ( ) ; boolean needsTwoPass = false ; for ( EMClusterModel < ? > m : models ) { m . beginEStep ( ) ; needsTwoPass |= m . needsTwoPass ( ) ; } if ( needsTwoPass ) { for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double [ ] clusterProbabilities = probClusterIGivenX . get ( iditer ) ; NumberVector instance = relation . get ( iditer ) ; for ( int i = 0 ; i < clusterProbabilities . length ; i ++ ) { final double prob = clusterProbabilities [ i ] ; if ( prob > 1e-10 ) { models . get ( i ) . firstPassE ( instance , prob ) ; } } } for ( EMClusterModel < ? > m : models ) { m . finalizeFirstPassE ( ) ; } } double [ ] wsum = new double [ k ] ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double [ ] clusterProbabilities = probClusterIGivenX . get ( iditer ) ; NumberVector instance = relation . get ( iditer ) ; for ( int i = 0 ; i < clusterProbabilities . length ; i ++ ) { final double prob = clusterProbabilities [ i ] ; if ( prob > 1e-10 ) { models . get ( i ) . updateE ( instance , prob ) ; } wsum [ i ] += prob ; } } for ( int i = 0 ; i < models . size ( ) ; i ++ ) { final double weight = prior <= 0. ? wsum [ i ] / relation . size ( ) : ( wsum [ i ] + prior - 1 ) / ( relation . size ( ) + prior * k - k ) ; models . get ( i ) . finalizeEStep ( weight , prior ) ; } }
Recompute the covariance matrixes .
34,398
public static double assignProbabilitiesToInstances ( Relation < ? extends NumberVector > relation , List < ? extends EMClusterModel < ? > > models , WritableDataStore < double [ ] > probClusterIGivenX ) { final int k = models . size ( ) ; double emSum = 0. ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { NumberVector vec = relation . get ( iditer ) ; double [ ] probs = new double [ k ] ; for ( int i = 0 ; i < k ; i ++ ) { double v = models . get ( i ) . estimateLogDensity ( vec ) ; probs [ i ] = v > MIN_LOGLIKELIHOOD ? v : MIN_LOGLIKELIHOOD ; } final double logP = logSumExp ( probs ) ; for ( int i = 0 ; i < k ; i ++ ) { probs [ i ] = FastMath . exp ( probs [ i ] - logP ) ; } probClusterIGivenX . put ( iditer , probs ) ; emSum += logP ; } return emSum / relation . size ( ) ; }
Assigns the current probability values to the instances in the database and compute the expectation value of the current mixture of distributions .
34,399
protected synchronized void updateVisualizerMenus ( ) { Projection proj = null ; if ( svgCanvas . getPlot ( ) instanceof DetailView ) { PlotItem item = ( ( DetailView ) svgCanvas . getPlot ( ) ) . getPlotItem ( ) ; proj = item . proj ; } menubar . removeAll ( ) ; menubar . add ( filemenu ) ; ResultHierarchy hier = context . getHierarchy ( ) ; Hierarchy < Object > vistree = context . getVisHierarchy ( ) ; Result start = context . getBaseResult ( ) ; ArrayList < JMenuItem > items = new ArrayList < > ( ) ; if ( start == null ) { for ( It < Result > iter = hier . iterAll ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( hier . numParents ( iter . get ( ) ) == 0 ) { recursiveBuildMenu ( items , iter . get ( ) , hier , vistree , proj ) ; } } } else { for ( It < Result > iter = hier . iterChildren ( start ) ; iter . valid ( ) ; iter . advance ( ) ) { recursiveBuildMenu ( items , iter . get ( ) , hier , vistree , proj ) ; } } for ( JMenuItem item : items ) { menubar . add ( item ) ; } menubar . revalidate ( ) ; menubar . repaint ( ) ; }
Update the visualizer menus .