idx
int64
0
41.2k
question
stringlengths
83
4.15k
target
stringlengths
5
715
33,300
private static TsType extractOriginalTsType ( TsType type ) { if ( type instanceof TsType . OptionalType ) { return extractOriginalTsType ( ( ( TsType . OptionalType ) type ) . type ) ; } if ( type instanceof TsType . UnionType ) { TsType . UnionType union = ( TsType . UnionType ) type ; List < TsType > originalTypes = new ArrayList < > ( ) ; for ( TsType curType : union . types ) { if ( isOriginalTsType ( curType ) ) { originalTypes . add ( curType ) ; } } return originalTypes . size ( ) == 1 ? extractOriginalTsType ( originalTypes . get ( 0 ) ) : type ; } if ( type instanceof TsType . BasicArrayType ) { return extractOriginalTsType ( ( ( TsType . BasicArrayType ) type ) . elementType ) ; } return type ; }
If the type is optional of number|null|undefined or list of of integer we want to be able to recognize it as number to link the member to another class . = > extract the original type while ignoring the |null|undefined and optional informations .
33,301
public static int findUnlinked ( int pos , int end , DBIDArrayIter ix , PointerHierarchyRepresentationBuilder builder ) { while ( pos < end ) { if ( ! builder . isLinked ( ix . seek ( pos ) ) ) { return pos ; } ++ pos ; } return - 1 ; }
Find an unlinked object .
33,302
private DoubleObjPair < Polygon > buildHullsRecursively ( Cluster < Model > clu , Hierarchy < Cluster < Model > > hier , Map < Object , DoubleObjPair < Polygon > > hulls , Relation < ? extends NumberVector > coords ) { final DBIDs ids = clu . getIDs ( ) ; FilteredConvexHull2D hull = new FilteredConvexHull2D ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { hull . add ( coords . get ( iter ) . toArray ( ) ) ; } double weight = ids . size ( ) ; if ( hier != null ) { final int numc = hier . numChildren ( clu ) ; if ( numc > 0 ) { for ( It < Cluster < Model > > iter = hier . iterChildren ( clu ) ; iter . valid ( ) ; iter . advance ( ) ) { final Cluster < Model > iclu = iter . get ( ) ; DoubleObjPair < Polygon > poly = hulls . get ( iclu ) ; if ( poly == null ) { poly = buildHullsRecursively ( iclu , hier , hulls , coords ) ; } for ( ArrayListIter < double [ ] > vi = poly . second . iter ( ) ; vi . valid ( ) ; vi . advance ( ) ) { hull . add ( vi . get ( ) ) ; } weight += poly . first / numc ; } } } DoubleObjPair < Polygon > pair = new DoubleObjPair < > ( weight , hull . getHull ( ) ) ; hulls . put ( clu , pair ) ; return pair ; }
Recursively step through the clusters to build the hulls .
33,303
public static final Color getColorForValue ( double val ) { double [ ] pos = new double [ ] { 0.0 , 0.6 , 0.8 , 1.0 } ; Color [ ] cols = new Color [ ] { new Color ( 0.0f , 0.0f , 0.0f , 0.6f ) , new Color ( 0.0f , 0.0f , 1.0f , 0.8f ) , new Color ( 1.0f , 0.0f , 0.0f , 0.9f ) , new Color ( 1.0f , 1.0f , 0.0f , 1.0f ) } ; assert ( pos . length == cols . length ) ; if ( val < pos [ 0 ] ) { val = pos [ 0 ] ; } for ( int i = 1 ; i < pos . length ; i ++ ) { if ( val <= pos [ i ] ) { Color prev = cols [ i - 1 ] ; Color next = cols [ i ] ; final double mix = ( val - pos [ i - 1 ] ) / ( pos [ i ] - pos [ i - 1 ] ) ; final int r = ( int ) ( ( 1 - mix ) * prev . getRed ( ) + mix * next . getRed ( ) ) ; final int g = ( int ) ( ( 1 - mix ) * prev . getGreen ( ) + mix * next . getGreen ( ) ) ; final int b = ( int ) ( ( 1 - mix ) * prev . getBlue ( ) + mix * next . getBlue ( ) ) ; final int a = ( int ) ( ( 1 - mix ) * prev . getAlpha ( ) + mix * next . getAlpha ( ) ) ; Color col = new Color ( r , g , b , a ) ; return col ; } } return cols [ cols . length - 1 ] ; }
Get color from a simple heatmap .
33,304
public static int showSaveDialog ( SVGPlot plot , int width , int height ) { JFileChooser fc = new JFileChooser ( new File ( "." ) ) ; fc . setDialogTitle ( DEFAULT_TITLE ) ; SaveOptionsPanel optionsPanel = new SaveOptionsPanel ( fc , width , height ) ; fc . setAccessory ( optionsPanel ) ; int ret = fc . showSaveDialog ( null ) ; if ( ret == JFileChooser . APPROVE_OPTION ) { fc . setDialogTitle ( "Saving... Please wait." ) ; File file = fc . getSelectedFile ( ) ; String format = optionsPanel . getSelectedFormat ( ) ; width = optionsPanel . getSelectedWidth ( ) ; height = optionsPanel . getSelectedHeight ( ) ; if ( format == null || AUTOMAGIC_FORMAT . equals ( format ) ) { format = guessFormat ( file . getName ( ) ) ; } try { if ( format == null ) { showError ( fc , "Error saving image." , "File format not recognized." ) ; } else if ( "jpeg" . equals ( format ) || "jpg" . equals ( format ) ) { float quality = optionsPanel . getJPEGQuality ( ) ; plot . saveAsJPEG ( file , width , height , quality ) ; } else if ( "png" . equals ( format ) ) { plot . saveAsPNG ( file , width , height ) ; } else if ( "ps" . equals ( format ) ) { plot . saveAsPS ( file ) ; } else if ( "eps" . equals ( format ) ) { plot . saveAsEPS ( file ) ; } else if ( "pdf" . equals ( format ) ) { plot . saveAsPDF ( file ) ; } else if ( "svg" . equals ( format ) ) { plot . saveAsSVG ( file ) ; } else { showError ( fc , "Error saving image." , "Unsupported format: " + format ) ; } } catch ( java . lang . IncompatibleClassChangeError e ) { showError ( fc , "Error saving image." , "It seems that your Java version is incompatible with this version of Batik and Jpeg writing. Sorry." ) ; } catch ( ClassNotFoundException e ) { showError ( fc , "Error saving image." , "A class was not found when saving this image. Maybe installing Apache FOP will help (for PDF, PS and EPS output).\n" + e . toString ( ) ) ; } catch ( TransformerFactoryConfigurationError | Exception e ) { LOG . exception ( e ) ; showError ( fc , "Error saving image." , e . toString ( ) ) ; } } else if ( ret == JFileChooser . ERROR_OPTION ) { showError ( fc , "Error in file dialog." , "Unknown Error." ) ; } else if ( ret == JFileChooser . CANCEL_OPTION ) { } return ret ; }
Show a Save as dialog .
33,305
public static String guessFormat ( String name ) { String ext = FileUtil . getFilenameExtension ( name ) ; for ( String format : FORMATS ) { if ( format . equalsIgnoreCase ( ext ) ) { return ext ; } } return null ; }
Guess a supported format from the file name . For auto format handling .
33,306
@ SuppressWarnings ( "unchecked" ) public static < F > FeatureVectorAdapter < F > featureVectorAdapter ( FeatureVector < F > prototype ) { return ( FeatureVectorAdapter < F > ) FEATUREVECTORADAPTER ; }
Get the static instance .
33,307
public static < A > int getIndexOfMaximum ( A array , NumberArrayAdapter < ? , A > adapter ) throws IndexOutOfBoundsException { final int size = adapter . size ( array ) ; int index = 0 ; double max = adapter . getDouble ( array , 0 ) ; for ( int i = 1 ; i < size ; i ++ ) { double val = adapter . getDouble ( array , i ) ; if ( val > max ) { max = val ; index = i ; } } return index ; }
Returns the index of the maximum of the given values . If no value is bigger than the first the index of the first entry is returned .
33,308
public byte [ ] asByteArray ( NumberVector vector ) { final long [ ] longValueList = new long [ dimensionality ] ; for ( int dim = 0 ; dim < dimensionality ; ++ dim ) { final double minValue = minValues [ dim ] ; final double maxValue = maxValues [ dim ] ; double dimValue = vector . doubleValue ( dim ) ; dimValue = ( dimValue - minValue ) / ( maxValue - minValue ) ; longValueList [ dim ] = ( long ) ( dimValue * ( Long . MAX_VALUE ) ) ; } final byte [ ] bytes = new byte [ Long . SIZE * dimensionality * ( Long . SIZE / Byte . SIZE ) ] ; int shiftCounter = 0 ; for ( int i = 0 ; i < Long . SIZE ; ++ i ) { for ( int dim = 0 ; dim < dimensionality ; ++ dim ) { long byteValue = longValueList [ dim ] ; int localShift = shiftCounter % Byte . SIZE ; bytes [ ( bytes . length - 1 ) - ( shiftCounter / Byte . SIZE ) ] |= ( ( byteValue >> i ) & 0x01 ) << localShift ; shiftCounter ++ ; } } return bytes ; }
Transform a single vector .
33,309
public OutlierResult run ( Relation < V > relation ) { SimilarityQuery < V > snnInstance = similarityFunction . instantiate ( relation ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "Assigning Subspace Outlier Degree" , relation . size ( ) , LOG ) : null ; WritableDoubleDataStore sod_scores = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_STATIC ) ; WritableDataStore < SODModel > sod_models = models ? DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_STATIC , SODModel . class ) : null ; DoubleMinMax minmax = new DoubleMinMax ( ) ; for ( DBIDIter iter = relation . iterDBIDs ( ) ; iter . valid ( ) ; iter . advance ( ) ) { DBIDs neighborhood = getNearestNeighbors ( relation , snnInstance , iter ) ; double [ ] center ; long [ ] weightVector = null ; double sod = 0. ; if ( neighborhood . size ( ) > 0 ) { center = Centroid . make ( relation , neighborhood ) . getArrayRef ( ) ; double [ ] variances = computePerDimensionVariances ( relation , center , neighborhood ) ; double expectationOfVariance = Mean . of ( variances ) ; weightVector = BitsUtil . zero ( variances . length ) ; for ( int d = 0 ; d < variances . length ; d ++ ) { if ( variances [ d ] < alpha * expectationOfVariance ) { BitsUtil . setI ( weightVector , d ) ; } } sod = subspaceOutlierDegree ( relation . get ( iter ) , center , weightVector ) ; } else { center = relation . get ( iter ) . toArray ( ) ; } if ( sod_models != null ) { sod_models . put ( iter , new SODModel ( center , weightVector ) ) ; } sod_scores . putDouble ( iter , sod ) ; minmax . put ( sod ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; OutlierScoreMeta meta = new BasicOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) ) ; OutlierResult sodResult = new OutlierResult ( meta , new MaterializedDoubleRelation ( "Subspace Outlier Degree" , "sod-outlier" , sod_scores , relation . getDBIDs ( ) ) ) ; if ( sod_models != null ) { sodResult . addChildResult ( new MaterializedRelation < > ( "Subspace Outlier Model" , "sod-outlier" , new SimpleTypeInformation < > ( SODModel . class ) , sod_models , relation . getDBIDs ( ) ) ) ; } return sodResult ; }
Performs the SOD algorithm on the given database .
33,310
private DBIDs getNearestNeighbors ( Relation < V > relation , SimilarityQuery < V > simQ , DBIDRef queryObject ) { Heap < DoubleDBIDPair > nearestNeighbors = new TiedTopBoundedHeap < > ( knn ) ; for ( DBIDIter iter = relation . iterDBIDs ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( DBIDUtil . equal ( iter , queryObject ) ) { continue ; } double sim = simQ . similarity ( queryObject , iter ) ; if ( sim > 0. ) { nearestNeighbors . add ( DBIDUtil . newPair ( sim , iter ) ) ; } } ArrayModifiableDBIDs dbids = DBIDUtil . newArray ( nearestNeighbors . size ( ) ) ; while ( nearestNeighbors . size ( ) > 0 ) { dbids . add ( nearestNeighbors . poll ( ) ) ; } return dbids ; }
Get the k nearest neighbors in terms of the shared nearest neighbor distance .
33,311
private static double [ ] computePerDimensionVariances ( Relation < ? extends NumberVector > relation , double [ ] center , DBIDs neighborhood ) { final int dim = center . length ; double [ ] variances = new double [ dim ] ; for ( DBIDIter iter = neighborhood . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { NumberVector databaseObject = relation . get ( iter ) ; for ( int d = 0 ; d < dim ; d ++ ) { final double deviation = databaseObject . doubleValue ( d ) - center [ d ] ; variances [ d ] += deviation * deviation ; } } return VMath . timesEquals ( variances , 1. / neighborhood . size ( ) ) ; }
Compute the per - dimension variances for the given neighborhood and center .
33,312
private double subspaceOutlierDegree ( V queryObject , double [ ] center , long [ ] weightVector ) { final int card = BitsUtil . cardinality ( weightVector ) ; if ( card == 0 ) { return 0 ; } final SubspaceEuclideanDistanceFunction df = new SubspaceEuclideanDistanceFunction ( weightVector ) ; return df . distance ( queryObject , DoubleVector . wrap ( center ) ) / card ; }
Compute SOD score .
33,313
public static long parseLongBase10 ( final CharSequence str , final int start , final int end ) { int pos = start ; char cur = str . charAt ( pos ) ; boolean isNegative = ( cur == '-' ) ; if ( ( isNegative || ( cur == '+' ) ) && ( ++ pos < end ) ) { cur = str . charAt ( pos ) ; } if ( ( cur < '0' ) || ( cur > '9' ) ) { throw NOT_A_NUMBER ; } long decimal = 0 ; while ( true ) { final int digit = cur - '0' ; if ( ( digit >= 0 ) && ( digit <= 9 ) ) { final long tmp = ( decimal << 3 ) + ( decimal << 1 ) + digit ; if ( tmp < decimal ) { throw PRECISION_OVERFLOW ; } decimal = tmp ; } else { break ; } if ( ++ pos < end ) { cur = str . charAt ( pos ) ; } else { break ; } } if ( pos != end ) { throw TRAILING_CHARACTERS ; } return isNegative ? - decimal : decimal ; }
Parse a long integer from a character sequence .
33,314
private static boolean matchInf ( byte [ ] str , byte firstchar , int start , int end ) { final int len = end - start ; if ( len == 3 && firstchar == - 0x1E && str [ start + 1 ] == - 0x78 && str [ start + 2 ] == - 0x62 ) { return true ; } if ( ( len != 3 && len != INFINITY_LENGTH ) || ( firstchar != 'I' && firstchar != 'i' ) ) { return false ; } for ( int i = 1 , j = INFINITY_LENGTH + 1 ; i < INFINITY_LENGTH ; i ++ , j ++ ) { final byte c = str [ start + i ] ; if ( c != INFINITY_PATTERN [ i ] && c != INFINITY_PATTERN [ j ] ) { return false ; } if ( i == 2 && len == 3 ) { return true ; } } return true ; }
Match inf infinity in a number of different capitalizations .
33,315
private static boolean matchNaN ( byte [ ] str , byte firstchar , int start , int end ) { final int len = end - start ; if ( len < 2 || len > 3 || ( firstchar != 'N' && firstchar != 'n' ) ) { return false ; } final byte c1 = str [ start + 1 ] ; if ( c1 != 'a' && c1 != 'A' ) { return false ; } if ( len == 2 ) { return true ; } final byte c2 = str [ start + 2 ] ; return c2 == 'N' || c2 == 'n' ; }
Match NaN in a number of different capitalizations .
33,316
public static void setLookAndFeel ( ) { try { if ( PREFER_GTK ) { LookAndFeelInfo [ ] lfs = UIManager . getInstalledLookAndFeels ( ) ; for ( LookAndFeelInfo lf : lfs ) { if ( lf . getClassName ( ) . contains ( "GTK" ) ) { UIManager . setLookAndFeel ( lf . getClassName ( ) ) ; return ; } } } UIManager . setLookAndFeel ( UIManager . getSystemLookAndFeelClassName ( ) ) ; } catch ( Exception e ) { } }
Setup look at feel .
33,317
public static void logUncaughtExceptions ( Logging logger ) { try { Thread . setDefaultUncaughtExceptionHandler ( ( t , e ) -> logger . exception ( e ) ) ; } catch ( SecurityException e ) { logger . warning ( "Could not set the Default Uncaught Exception Handler" , e ) ; } }
Setup logging of uncaught exceptions .
33,318
protected List < OneItemset > buildFrequentOneItemsets ( final Relation < ? extends SparseFeatureVector < ? > > relation , final int dim , final int needed ) { int [ ] counts = new int [ dim ] ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { SparseFeatureVector < ? > bv = relation . get ( iditer ) ; for ( int it = bv . iter ( ) ; bv . iterValid ( it ) ; it = bv . iterAdvance ( it ) ) { counts [ bv . iterDim ( it ) ] ++ ; } } if ( LOG . isStatistics ( ) ) { LOG . statistics ( new LongStatistic ( STAT + "1-items.candidates" , dim ) ) ; } List < OneItemset > frequent = new ArrayList < > ( dim ) ; for ( int i = 0 ; i < dim ; i ++ ) { if ( counts [ i ] >= needed ) { frequent . add ( new OneItemset ( i , counts [ i ] ) ) ; } } return frequent ; }
Build the 1 - itemsets .
33,319
protected List < SparseItemset > buildFrequentTwoItemsets ( List < OneItemset > oneitems , final Relation < BitVector > relation , final int dim , final int needed , DBIDs ids , ArrayModifiableDBIDs survivors ) { int f1 = 0 ; long [ ] mask = BitsUtil . zero ( dim ) ; for ( OneItemset supported : oneitems ) { BitsUtil . setI ( mask , supported . item ) ; f1 ++ ; } if ( LOG . isStatistics ( ) ) { LOG . statistics ( new LongStatistic ( STAT + "2-items.candidates" , f1 * ( long ) ( f1 - 1 ) ) ) ; } Long2IntOpenHashMap map = new Long2IntOpenHashMap ( ( f1 * ( f1 - 1 ) ) >>> 1 ) ; final long [ ] scratch = BitsUtil . zero ( dim ) ; for ( DBIDIter iditer = ids . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { BitsUtil . setI ( scratch , mask ) ; relation . get ( iditer ) . andOnto ( scratch ) ; int lives = 0 ; for ( int i = BitsUtil . nextSetBit ( scratch , 0 ) ; i >= 0 ; i = BitsUtil . nextSetBit ( scratch , i + 1 ) ) { for ( int j = BitsUtil . nextSetBit ( scratch , i + 1 ) ; j >= 0 ; j = BitsUtil . nextSetBit ( scratch , j + 1 ) ) { long key = ( ( ( long ) i ) << 32 ) | j ; map . put ( key , 1 + map . get ( key ) ) ; ++ lives ; } } if ( lives > 2 ) { survivors . add ( iditer ) ; } } List < SparseItemset > frequent = new ArrayList < > ( f1 * ( int ) FastMath . sqrt ( f1 ) ) ; for ( ObjectIterator < Long2IntMap . Entry > iter = map . long2IntEntrySet ( ) . fastIterator ( ) ; iter . hasNext ( ) ; ) { Long2IntMap . Entry entry = iter . next ( ) ; if ( entry . getIntValue ( ) >= needed ) { int ii = ( int ) ( entry . getLongKey ( ) >>> 32 ) ; int ij = ( int ) ( entry . getLongKey ( ) & - 1L ) ; frequent . add ( new SparseItemset ( new int [ ] { ii , ij } , entry . getIntValue ( ) ) ) ; } } Collections . sort ( frequent ) ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( new LongStatistic ( STAT + "2-items.frequent" , frequent . size ( ) ) ) ; } return frequent ; }
Build the 2 - itemsets .
33,320
protected List < ? extends Itemset > frequentItemsets ( List < ? extends Itemset > candidates , Relation < BitVector > relation , int needed , DBIDs ids , ArrayModifiableDBIDs survivors , int length ) { if ( candidates . isEmpty ( ) ) { return Collections . emptyList ( ) ; } Itemset first = candidates . get ( 0 ) ; if ( candidates . size ( ) > length * length * length * 100 && first instanceof SparseItemset ) { @ SuppressWarnings ( "unchecked" ) List < SparseItemset > sparsecand = ( List < SparseItemset > ) candidates ; return frequentItemsetsSparse ( sparsecand , relation , needed , ids , survivors , length ) ; } for ( DBIDIter iditer = ids . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { BitVector bv = relation . get ( iditer ) ; int lives = 0 ; for ( Itemset candidate : candidates ) { if ( candidate . containedIn ( bv ) ) { candidate . increaseSupport ( ) ; ++ lives ; } } if ( lives > length ) { survivors . add ( iditer ) ; } } List < Itemset > frequent = new ArrayList < > ( candidates . size ( ) ) ; for ( Iterator < ? extends Itemset > iter = candidates . iterator ( ) ; iter . hasNext ( ) ; ) { final Itemset candidate = iter . next ( ) ; if ( candidate . getSupport ( ) >= needed ) { frequent . add ( candidate ) ; } } return frequent ; }
Returns the frequent BitSets out of the given BitSets with respect to the given database .
33,321
protected List < SparseItemset > frequentItemsetsSparse ( List < SparseItemset > candidates , Relation < BitVector > relation , int needed , DBIDs ids , ArrayModifiableDBIDs survivors , int length ) { int begin = 0 , end = candidates . size ( ) ; int [ ] scratchi = new int [ length ] , iters = new int [ length ] ; SparseItemset scratch = new SparseItemset ( scratchi ) ; for ( DBIDIter iditer = ids . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { BitVector bv = relation . get ( iditer ) ; if ( ! initializeSearchItemset ( bv , scratchi , iters ) ) { continue ; } int lives = 0 ; while ( begin < end ) { begin = binarySearch ( candidates , scratch , begin , end ) ; if ( begin > 0 ) { candidates . get ( begin ) . increaseSupport ( ) ; ++ lives ; } else { begin = ( - begin ) - 1 ; } if ( begin >= end || ! nextSearchItemset ( bv , scratchi , iters ) ) { break ; } } for ( Itemset candidate : candidates ) { if ( candidate . containedIn ( bv ) ) { candidate . increaseSupport ( ) ; ++ lives ; } } if ( lives > length ) { survivors . add ( iditer ) ; } } List < SparseItemset > frequent = new ArrayList < > ( candidates . size ( ) ) ; for ( Iterator < SparseItemset > iter = candidates . iterator ( ) ; iter . hasNext ( ) ; ) { final SparseItemset candidate = iter . next ( ) ; if ( candidate . getSupport ( ) >= needed ) { frequent . add ( candidate ) ; } } return frequent ; }
Returns the frequent BitSets out of the given BitSets with respect to the given database . Optimized implementation for SparseItemset .
33,322
private boolean initializeSearchItemset ( BitVector bv , int [ ] scratchi , int [ ] iters ) { for ( int i = 0 ; i < scratchi . length ; i ++ ) { iters [ i ] = ( i == 0 ) ? bv . iter ( ) : bv . iterAdvance ( iters [ i - 1 ] ) ; if ( iters [ i ] < 0 ) { return false ; } scratchi [ i ] = bv . iterDim ( iters [ i ] ) ; } return true ; }
Initialize the scratch itemset .
33,323
private boolean nextSearchItemset ( BitVector bv , int [ ] scratchi , int [ ] iters ) { final int last = scratchi . length - 1 ; for ( int j = last ; j >= 0 ; j -- ) { int n = bv . iterAdvance ( iters [ j ] ) ; if ( n >= 0 && ( j == last || n != iters [ j + 1 ] ) ) { iters [ j ] = n ; scratchi [ j ] = bv . iterDim ( n ) ; return true ; } } return false ; }
Advance scratch itemset to the next .
33,324
private int binarySearch ( List < SparseItemset > candidates , SparseItemset scratch , int begin , int end ) { -- end ; while ( begin < end ) { final int mid = ( begin + end ) >>> 1 ; SparseItemset midVal = candidates . get ( mid ) ; int cmp = midVal . compareTo ( scratch ) ; if ( cmp < 0 ) { begin = mid + 1 ; } else if ( cmp > 0 ) { end = mid - 1 ; } else { return mid ; } } return - ( begin + 1 ) ; }
Binary - search for the next - larger element .
33,325
private < A > ArrayList < int [ ] > buildPartitions ( NumberArrayAdapter < ? , A > adapter1 , A data1 , int len , int depth ) { final int [ ] idx = new int [ len ] ; final double [ ] tmp = new double [ len ] ; for ( int i = 0 ; i < len ; ++ i ) { idx [ i ] = i ; tmp [ i ] = adapter1 . getDouble ( data1 , i ) ; } IntegerArrayQuickSort . sort ( idx , ( x , y ) -> Double . compare ( tmp [ x ] , tmp [ y ] ) ) ; Arrays . sort ( tmp ) ; ArrayList < int [ ] > ret = new ArrayList < > ( 1 << depth ) ; divide ( idx , tmp , ret , 0 , tmp . length , depth ) ; return ret ; }
Partitions an attribute .
33,326
private void divide ( int [ ] idx , double [ ] data , ArrayList < int [ ] > ret , int start , int end , int depth ) { if ( depth == 0 ) { int [ ] a = Arrays . copyOfRange ( idx , start , end ) ; Arrays . sort ( a ) ; ret . add ( a ) ; return ; } final int count = end - start ; if ( count == 0 ) { for ( int j = 1 << depth ; j > 0 ; -- j ) { ret . add ( new int [ 0 ] ) ; } return ; } double m = 0. ; for ( int i = start ; i < end ; i ++ ) { m += data [ i ] ; } m /= count ; int pos = Arrays . binarySearch ( data , start , end , m ) ; if ( pos >= 0 ) { final int opt = ( start + end ) >> 1 ; while ( data [ pos ] == m ) { if ( pos < opt ) { pos ++ ; } else if ( pos > opt ) { pos -- ; } else { break ; } } } else { pos = ( - pos - 1 ) ; } divide ( idx , data , ret , start , pos , depth - 1 ) ; divide ( idx , data , ret , pos , end , depth - 1 ) ; }
Recursive call to further subdivide the array .
33,327
private void intersectionMatrix ( int [ ] [ ] res , ArrayList < int [ ] > partsx , ArrayList < int [ ] > partsy , int gridsize ) { for ( int x = 0 ; x < gridsize ; x ++ ) { final int [ ] px = partsx . get ( x ) ; final int [ ] rowx = res [ x ] ; for ( int y = 0 ; y < gridsize ; y ++ ) { int [ ] py = partsy . get ( y ) ; rowx [ y ] = intersectionSize ( px , py ) ; } } }
Intersect the two 1d grid decompositions to obtain a 2d matrix .
33,328
private int intersectionSize ( int [ ] px , int [ ] py ) { int i = 0 , j = 0 , c = 0 ; while ( i < px . length && j < py . length ) { final int vx = px [ i ] , vy = py [ j ] ; if ( vx < vy ) { ++ i ; } else if ( vx > vy ) { ++ j ; } else { ++ c ; ++ i ; ++ j ; } } return c ; }
Compute the intersection of two sorted integer lists .
33,329
private double getMCEntropy ( int [ ] [ ] mat , ArrayList < int [ ] > partsx , ArrayList < int [ ] > partsy , int size , int gridsize , double loggrid ) { double [ ] mx = new double [ gridsize ] ; double [ ] my = new double [ gridsize ] ; for ( int i = 0 ; i < gridsize ; i ++ ) { final double sumx = ( double ) partsx . get ( i ) . length ; final double sumy = ( double ) partsy . get ( i ) . length ; for ( int j = 0 ; j < gridsize ; j ++ ) { double px = mat [ i ] [ j ] / sumx ; double py = mat [ j ] [ i ] / sumy ; if ( px > 0. ) { mx [ i ] -= px * FastMath . log ( px ) ; } if ( py > 0. ) { my [ i ] -= py * FastMath . log ( py ) ; } } } double sumx = 0. , sumy = 0. ; for ( int i = 0 ; i < gridsize ; i ++ ) { sumx += mx [ i ] * partsx . get ( i ) . length ; sumy += my [ i ] * partsy . get ( i ) . length ; } double max = ( ( sumx > sumy ) ? sumx : sumy ) ; return max / ( size * loggrid ) ; }
Compute the MCE entropy value .
33,330
public void add ( E e ) { if ( size + 1 > queue . length ) { resize ( size + 1 ) ; } this . size += 1 ; heapifyUp ( size - 1 , e ) ; heapModified ( ) ; }
Add an element to the heap .
33,331
@ SuppressWarnings ( "unchecked" ) public E replaceTopElement ( E e ) { E oldroot = ( E ) queue [ 0 ] ; heapifyDown ( 0 , e ) ; heapModified ( ) ; return oldroot ; }
Combined operation that removes the top element and inserts a new element instead .
33,332
@ SuppressWarnings ( "unchecked" ) protected E removeAt ( int pos ) { if ( pos < 0 || pos >= size ) { return null ; } final E ret = ( E ) queue [ pos ] ; final Object reinsert = queue [ size - 1 ] ; queue [ size - 1 ] = null ; size -- ; heapifyDown ( pos , reinsert ) ; heapModified ( ) ; return ret ; }
Remove the element at the given position .
33,333
protected final void resize ( int requiredSize ) { int newCapacity = ( ( queue . length < 64 ) ? ( ( queue . length + 1 ) << 1 ) : ( ( queue . length >> 1 ) + queue . length ) ) ; if ( newCapacity < 0 ) { throw new OutOfMemoryError ( ) ; } if ( requiredSize > newCapacity ) { newCapacity = requiredSize ; } queue = Arrays . copyOf ( queue , newCapacity ) ; }
Test whether we need to resize to have the requested capacity .
33,334
public void clear ( ) { for ( int i = 0 ; i < size ; i ++ ) { queue [ i ] = null ; } this . size = 0 ; heapModified ( ) ; }
Clear the heap .
33,335
protected String checkHeap ( ) { for ( int i = 1 ; i < size ; i ++ ) { final int parent = ( i - 1 ) >>> 1 ; if ( comparator . compare ( queue [ parent ] , queue [ i ] ) > 0 ) { return "@" + parent + ": " + queue [ parent ] + " < @" + i + ": " + queue [ i ] ; } } return null ; }
Test whether the heap is still valid .
33,336
public void run ( ) { Database db = inputStep . getDatabase ( ) ; hier = db . getHierarchy ( ) ; algorithmStep . runAlgorithms ( db ) ; hier . add ( db , new SettingsResult ( settings ) ) ; evaluationStep . runEvaluators ( hier , db ) ; outputStep . runResultHandlers ( hier , db ) ; }
Method to run the specified algorithm using the specified database connection .
33,337
public void set ( SpatialComparable obj ) { final int dim = min . length ; assert ( obj . getDimensionality ( ) == dim ) ; if ( obj instanceof ModifiableHyperBoundingBox ) { ModifiableHyperBoundingBox ho = ( ModifiableHyperBoundingBox ) obj ; System . arraycopy ( ho . getMinRef ( ) , 0 , min , 0 , dim ) ; System . arraycopy ( ho . getMaxRef ( ) , 0 , max , 0 , dim ) ; return ; } for ( int i = 0 ; i < dim ; i ++ ) { min [ i ] = obj . getMin ( i ) ; max [ i ] = obj . getMax ( i ) ; } }
Set the bounding box to the same as some other spatial object .
33,338
public boolean extend ( SpatialComparable obj ) { final int dim = min . length ; assert ( obj . getDimensionality ( ) == dim ) ; boolean extended = false ; for ( int i = 0 ; i < dim ; i ++ ) { final double omin = obj . getMin ( i ) ; final double omax = obj . getMax ( i ) ; if ( omin < min [ i ] ) { min [ i ] = omin ; extended = true ; } if ( omax > max [ i ] ) { max [ i ] = omax ; extended = true ; } } return extended ; }
Extend the bounding box by some other spatial object .
33,339
public static int findClassLabelColumn ( MultipleObjectsBundle bundle ) { for ( int i = 0 , l = bundle . metaLength ( ) ; i < l ; ++ i ) { if ( TypeUtil . CLASSLABEL . isAssignableFromType ( bundle . meta ( i ) ) ) { return i ; } } return - 1 ; }
Find the class label column in the given data set .
33,340
@ SuppressWarnings ( { "unchecked" , "rawtypes" } ) public int compareTo ( ClassLabel o ) { HierarchicalClassLabel h = ( HierarchicalClassLabel ) o ; for ( int i = 0 ; i < this . levelwiseNames . length && i < h . levelwiseNames . length ; i ++ ) { int comp = 0 ; try { Comparable first = this . levelwiseNames [ i ] ; Comparable second = h . levelwiseNames [ i ] ; comp = first . compareTo ( second ) ; } catch ( RuntimeException e ) { String h1 = ( String ) ( this . levelwiseNames [ i ] instanceof Integer ? this . levelwiseNames [ i ] . toString ( ) : this . levelwiseNames [ i ] ) ; String h2 = ( String ) ( h . levelwiseNames [ i ] instanceof Integer ? h . levelwiseNames [ i ] . toString ( ) : h . levelwiseNames [ i ] ) ; comp = h1 . compareTo ( h2 ) ; } if ( comp != 0 ) { return comp ; } } return ( this . levelwiseNames . length < h . levelwiseNames . length ) ? - 1 : ( ( this . levelwiseNames . length == h . levelwiseNames . length ) ? 0 : 1 ) ; }
Compares two HierarchicalClassLabels . Names at higher levels are compared first . Names at a lower level are compared only if their parent - names are equal . Names at a level are tried to be compared as integer values . If this does not succeed both names are compared as Strings .
33,341
public String getNameAt ( int level ) { return this . levelwiseNames [ level ] instanceof Integer ? this . levelwiseNames [ level ] . toString ( ) : ( String ) this . levelwiseNames [ level ] ; }
Returns the name at the given level as a String .
33,342
private void recursiveLogResult ( StringBuilder buf , Hierarchy < Result > hier , Result result , int depth ) { if ( result == null ) { buf . append ( "null" ) ; LOG . warning ( "null result!" ) ; return ; } if ( depth > 50 ) { LOG . warning ( "Probably infinitely nested results, aborting!" ) ; return ; } for ( int i = 0 ; i < depth ; i ++ ) { buf . append ( ' ' ) ; } buf . append ( result . getClass ( ) . getSimpleName ( ) ) . append ( ": " ) . append ( result . getLongName ( ) ) . append ( " (" ) . append ( result . getShortName ( ) ) . append ( ")\n" ) ; if ( hier . numChildren ( result ) > 0 ) { for ( It < Result > iter = hier . iterChildren ( result ) ; iter . valid ( ) ; iter . advance ( ) ) { recursiveLogResult ( buf , hier , iter . get ( ) , depth + 1 ) ; } } }
Recursively walk through the result tree .
33,343
public ListParameterization addFlag ( OptionID optionid ) { parameters . add ( new ParameterPair ( optionid , Flag . SET ) ) ; return this ; }
Add a flag to the parameter list
33,344
public ArrayList < String > serialize ( ) { ArrayList < String > params = new ArrayList < > ( ) ; for ( ParameterPair pair : parameters ) { params . add ( SerializedParameterization . OPTION_PREFIX + pair . option . toString ( ) ) ; if ( pair . value instanceof String ) { params . add ( ( String ) pair . value ) ; } else if ( pair . value instanceof Class ) { params . add ( ( ( Class < ? > ) pair . value ) . getCanonicalName ( ) ) ; } else { params . add ( pair . value . toString ( ) ) ; } } return params ; }
Serialize parameters .
33,345
public OutlierResult run ( Database db , Relation < V > relation ) { DBIDs ids = relation . getDBIDs ( ) ; WritableDoubleDataStore abodvalues = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmaxabod = new DoubleMinMax ( ) ; if ( kernelFunction . getClass ( ) == LinearKernelFunction . class ) { if ( ! kNNABOD ( db , relation , ids , abodvalues , minmaxabod ) ) { fastABOD ( db , relation , ids , abodvalues , minmaxabod ) ; } } else { fastABOD ( db , relation , ids , abodvalues , minmaxabod ) ; } DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Angle-Based Outlier Degree" , "abod-outlier" , abodvalues , relation . getDBIDs ( ) ) ; OutlierScoreMeta scoreMeta = new InvertedOutlierScoreMeta ( minmaxabod . getMin ( ) , minmaxabod . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Run Fast - ABOD on the data set .
33,346
private boolean kNNABOD ( Database db , Relation < V > relation , DBIDs ids , WritableDoubleDataStore abodvalues , DoubleMinMax minmaxabod ) { DistanceQuery < V > dq = db . getDistanceQuery ( relation , SquaredEuclideanDistanceFunction . STATIC ) ; KNNQuery < V > knnq = db . getKNNQuery ( dq , DatabaseQuery . HINT_OPTIMIZED_ONLY ) ; boolean squared = true ; if ( knnq == null ) { dq = db . getDistanceQuery ( relation , EuclideanDistanceFunction . STATIC ) ; knnq = db . getKNNQuery ( dq , DatabaseQuery . HINT_OPTIMIZED_ONLY ) ; if ( knnq == null ) { return false ; } squared = false ; } SimilarityQuery < V > lk = db . getSimilarityQuery ( relation , LinearKernelFunction . STATIC ) ; int k1 = k + 1 ; MeanVariance s = new MeanVariance ( ) ; for ( DBIDIter pA = ids . iter ( ) ; pA . valid ( ) ; pA . advance ( ) ) { KNNList nl = knnq . getKNNForDBID ( pA , k1 ) ; double simAA = lk . similarity ( pA , pA ) ; s . reset ( ) ; DoubleDBIDListIter iB = nl . iter ( ) , iC = nl . iter ( ) ; for ( ; iB . valid ( ) ; iB . advance ( ) ) { double dAB = iB . doubleValue ( ) ; double simAB = lk . similarity ( pA , iB ) ; if ( ! ( dAB > 0. ) ) { continue ; } for ( iC . seek ( iB . getOffset ( ) + 1 ) ; iC . valid ( ) ; iC . advance ( ) ) { double dAC = iC . doubleValue ( ) ; double simAC = lk . similarity ( pA , iC ) ; if ( ! ( dAC > 0. ) ) { continue ; } double simBC = lk . similarity ( iB , iC ) ; double numerator = simBC - simAB - simAC + simAA ; if ( squared ) { double div = 1. / ( dAB * dAC ) ; s . put ( numerator * div , FastMath . sqrt ( div ) ) ; } else { double sqrtdiv = 1. / ( dAB * dAC ) ; s . put ( numerator * sqrtdiv * sqrtdiv , sqrtdiv ) ; } } } final double abof = s . getNaiveVariance ( ) ; minmaxabod . put ( abof ) ; abodvalues . putDouble ( pA , abof ) ; } return true ; }
Simpler kNN based can use more indexing .
33,347
private void fastABOD ( Database db , Relation < V > relation , DBIDs ids , WritableDoubleDataStore abodvalues , DoubleMinMax minmaxabod ) { SimilarityQuery < V > sq = db . getSimilarityQuery ( relation , kernelFunction ) ; KernelMatrix kernelMatrix = new KernelMatrix ( sq , relation , ids ) ; MeanVariance s = new MeanVariance ( ) ; KNNHeap nn = DBIDUtil . newHeap ( k ) ; for ( DBIDIter pA = ids . iter ( ) ; pA . valid ( ) ; pA . advance ( ) ) { final double simAA = kernelMatrix . getSimilarity ( pA , pA ) ; nn . clear ( ) ; for ( DBIDIter nB = relation . iterDBIDs ( ) ; nB . valid ( ) ; nB . advance ( ) ) { if ( DBIDUtil . equal ( nB , pA ) ) { continue ; } double simBB = kernelMatrix . getSimilarity ( nB , nB ) ; double simAB = kernelMatrix . getSimilarity ( pA , nB ) ; double sqdAB = simAA + simBB - simAB - simAB ; if ( ! ( sqdAB > 0. ) ) { continue ; } nn . insert ( sqdAB , nB ) ; } KNNList nl = nn . toKNNList ( ) ; s . reset ( ) ; DoubleDBIDListIter iB = nl . iter ( ) , iC = nl . iter ( ) ; for ( ; iB . valid ( ) ; iB . advance ( ) ) { double sqdAB = iB . doubleValue ( ) ; double simAB = kernelMatrix . getSimilarity ( pA , iB ) ; if ( ! ( sqdAB > 0. ) ) { continue ; } for ( iC . seek ( iB . getOffset ( ) + 1 ) ; iC . valid ( ) ; iC . advance ( ) ) { double sqdAC = iC . doubleValue ( ) ; double simAC = kernelMatrix . getSimilarity ( pA , iC ) ; if ( ! ( sqdAC > 0. ) ) { continue ; } double simBC = kernelMatrix . getSimilarity ( iB , iC ) ; double numerator = simBC - simAB - simAC + simAA ; double div = 1. / ( sqdAB * sqdAC ) ; s . put ( numerator * div , FastMath . sqrt ( div ) ) ; } } final double abof = s . getNaiveVariance ( ) ; minmaxabod . put ( abof ) ; abodvalues . putDouble ( pA , abof ) ; } }
Full kernel - based version .
33,348
public double getWeight ( double distance , double max , double stddev ) { if ( stddev <= 0 ) { return 1 ; } return NormalDistribution . erfc ( MathUtil . SQRTHALF * distance / stddev ) ; }
Return Erfc weight scaled by standard deviation . max is ignored .
33,349
public OutlierResult run ( Database database , Relation < O > relation ) { StepProgress stepprog = LOG . isVerbose ( ) ? new StepProgress ( "LOF" , 3 ) : null ; DBIDs ids = relation . getDBIDs ( ) ; LOG . beginStep ( stepprog , 1 , "Materializing nearest-neighbor sets." ) ; KNNQuery < O > knnq = DatabaseUtil . precomputedKNNQuery ( database , relation , getDistanceFunction ( ) , k ) ; LOG . beginStep ( stepprog , 2 , "Computing Local Reachability Densities (LRD)." ) ; WritableDoubleDataStore lrds = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; computeLRDs ( knnq , ids , lrds ) ; LOG . beginStep ( stepprog , 3 , "Computing Local Outlier Factors (LOF)." ) ; WritableDoubleDataStore lofs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; DoubleMinMax lofminmax = new DoubleMinMax ( ) ; computeLOFScores ( knnq , ids , lrds , lofs , lofminmax ) ; LOG . setCompleted ( stepprog ) ; DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Local Outlier Factor" , "lof-outlier" , lofs , ids ) ; OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta ( lofminmax . getMin ( ) , lofminmax . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY , 1.0 ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Runs the LOF algorithm on the given database .
33,350
private void computeLRDs ( KNNQuery < O > knnq , DBIDs ids , WritableDoubleDataStore lrds ) { FiniteProgress lrdsProgress = LOG . isVerbose ( ) ? new FiniteProgress ( "Local Reachability Densities (LRD)" , ids . size ( ) , LOG ) : null ; double lrd ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { lrd = computeLRD ( knnq , iter ) ; lrds . putDouble ( iter , lrd ) ; LOG . incrementProcessed ( lrdsProgress ) ; } LOG . ensureCompleted ( lrdsProgress ) ; }
Compute local reachability distances .
33,351
protected double computeLRD ( KNNQuery < O > knnq , DBIDIter curr ) { final KNNList neighbors = knnq . getKNNForDBID ( curr , k ) ; double sum = 0.0 ; int count = 0 ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { if ( DBIDUtil . equal ( curr , neighbor ) ) { continue ; } KNNList neighborsNeighbors = knnq . getKNNForDBID ( neighbor , k ) ; sum += MathUtil . max ( neighbor . doubleValue ( ) , neighborsNeighbors . getKNNDistance ( ) ) ; count ++ ; } return ( sum > 0 ) ? ( count / sum ) : Double . POSITIVE_INFINITY ; }
Compute a single local reachability distance .
33,352
private void computeLOFScores ( KNNQuery < O > knnq , DBIDs ids , DoubleDataStore lrds , WritableDoubleDataStore lofs , DoubleMinMax lofminmax ) { FiniteProgress progressLOFs = LOG . isVerbose ( ) ? new FiniteProgress ( "Local Outlier Factor (LOF) scores" , ids . size ( ) , LOG ) : null ; double lof ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { lof = computeLOFScore ( knnq , iter , lrds ) ; lofs . putDouble ( iter , lof ) ; lofminmax . put ( lof ) ; LOG . incrementProcessed ( progressLOFs ) ; } LOG . ensureCompleted ( progressLOFs ) ; }
Compute local outlier factors .
33,353
protected double computeLOFScore ( KNNQuery < O > knnq , DBIDRef cur , DoubleDataStore lrds ) { final double lrdp = lrds . doubleValue ( cur ) ; if ( Double . isInfinite ( lrdp ) ) { return 1.0 ; } double sum = 0. ; int count = 0 ; final KNNList neighbors = knnq . getKNNForDBID ( cur , k ) ; for ( DBIDIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { if ( DBIDUtil . equal ( cur , neighbor ) ) { continue ; } sum += lrds . doubleValue ( neighbor ) ; ++ count ; } return sum / ( lrdp * count ) ; }
Compute a single LOF score .
33,354
protected double kNNDistance ( ) { double result = getEntry ( 0 ) . getKnnDistance ( ) ; for ( int i = 1 ; i < getNumEntries ( ) ; i ++ ) { double knnDistance = getEntry ( i ) . getKnnDistance ( ) ; result = ( result < knnDistance ) ? knnDistance : result ; } return result ; }
Computes and returns the aggregated knn distance of this node
33,355
public boolean readLine ( Appendable buf ) throws IOException { boolean success = false ; while ( true ) { while ( pos < end ) { success = true ; final char c = buffer [ pos ++ ] ; if ( c == '\n' ) { return success ; } if ( c == '\r' ) { continue ; } buf . append ( c ) ; } assert ( pos >= end ) : "Buffer wasn't empty when refilling!" ; end = in . read ( buffer , 0 , buffer . length ) ; pos = 0 ; if ( end < 0 ) { return success ; } } }
Read a line into the given buffer .
33,356
private static final int bestPivot ( int rank , int m1 , int m2 , int m3 , int m4 , int m5 ) { if ( rank < m1 ) { return m1 ; } if ( rank > m5 ) { return m5 ; } if ( rank < m2 ) { return m2 ; } if ( rank > m4 ) { return m4 ; } return m3 ; }
Choose the best pivot for the given rank .
33,357
public void writeExternal ( ObjectOutput out ) throws IOException { super . writeExternal ( out ) ; int k_max = knnDistances . length ; out . writeInt ( k_max ) ; for ( int i = 0 ; i < k_max ; i ++ ) { out . writeDouble ( knnDistances [ i ] ) ; } }
Calls the super method and writes the parameter k_max and the knn distances of this entry to the specified stream .
33,358
public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { super . readExternal ( in ) ; int k_max = in . readInt ( ) ; knnDistances = new double [ k_max ] ; for ( int i = 0 ; i < k_max ; i ++ ) { knnDistances [ i ] = in . readDouble ( ) ; } }
Calls the super method and reads the parameter k_max and knn distance of this entry from the specified input stream .
33,359
public double getValueAt ( int k ) { if ( k < k_0 ) { return Double . POSITIVE_INFINITY ; } return m * FastMath . log ( k ) + t ; }
Returns the function value of the approximation line at the specified k .
33,360
public void iterate ( ) { for ( int i = 0 ; i < numfit ; i ++ ) { System . arraycopy ( alpha [ i ] , 0 , covmat [ i ] , 0 , numfit ) ; covmat [ i ] [ i ] *= ( 1.0 + lambda ) ; } LinearEquationSystem ls = new LinearEquationSystem ( covmat , beta ) ; ls . solveByTotalPivotSearch ( ) ; covmat = ls . getCoefficents ( ) ; deltaparams = ls . getRHS ( ) ; for ( int i = 0 , i2 = 0 ; i < numparams ; i ++ ) { if ( dofit [ i ] ) { paramstry [ i ] = params [ i ] + deltaparams [ i2 ++ ] ; } } double newchisq = simulateParameters ( paramstry ) ; if ( newchisq < chisq ) { if ( lambda * 0.1 > Double . MIN_NORMAL ) { lambda *= 0.1 ; } chisq = newchisq ; for ( int i = 0 ; i < numfit ; i ++ ) { System . arraycopy ( covmat [ i ] , 0 , alpha [ i ] , 0 , numfit ) ; beta [ i ] = deltaparams [ i ] ; } System . arraycopy ( paramstry , 0 , params , 0 , numparams ) ; } else { if ( lambda * 10 < Double . MAX_VALUE ) { lambda *= 10 ; } } }
Perform an iteration of the approximation loop .
33,361
public void run ( ) { int maxruns = this . maxruns , maxsmall = this . maxsmall ; double oldchi = getChiSq ( ) ; while ( maxruns -- > 0 ) { iterate ( ) ; double newchi = getChiSq ( ) , deltachi = newchi - oldchi ; oldchi = newchi ; if ( deltachi < 0 && deltachi > - small && -- maxsmall < 0 ) { break ; } } }
Iterate until convergence at most 100 times .
33,362
public double [ ] get ( T object ) { double [ ] v = map . get ( object ) ; if ( v == null ) { return null ; } return v . clone ( ) ; }
Get the position data of the object
33,363
public double relativeFill ( ) { double acc = 0.0 ; final int cols = widths . size ( ) ; final int rows = heights . size ( ) ; { for ( int y = 0 ; y < rows ; y ++ ) { for ( int x = 0 ; x < cols ; x ++ ) { if ( usage . get ( y ) . get ( x ) != null ) { acc += widths . get ( x ) * heights . get ( y ) ; } } } } return acc / ( twidth * theight ) ; }
Compute the relative fill . Useful for triggering a relayout if the relative fill is not satisfactory .
33,364
public boolean contains ( NumberVector vector ) { for ( int i = 0 ; i < dims . length ; i ++ ) { final double value = vector . doubleValue ( dims [ i ] ) ; if ( bounds [ i << 1 ] > value || value >= bounds [ ( i << 1 ) + 1 ] ) { return false ; } } return true ; }
Returns true if the intervals of this unit contain the specified feature vector .
33,365
public boolean addFeatureVector ( DBIDRef id , NumberVector vector ) { if ( contains ( vector ) ) { ids . add ( id ) ; return true ; } return false ; }
Adds the id of the specified feature vector to this unit if this unit contains the feature vector .
33,366
protected boolean containsRightNeighbor ( CLIQUEUnit unit , int d ) { final int e = dims . length - 1 ; return checkDimensions ( unit , e ) && bounds [ e << 1 ] == unit . bounds [ ( e << 1 ) + 1 ] ; }
Returns true if this unit is the right neighbor of the given unit .
33,367
protected CLIQUEUnit join ( CLIQUEUnit other , double all , double tau ) { if ( other . dimensionality ( ) != this . dimensionality ( ) ) { return null ; } int e = dims . length - 1 ; if ( ! checkDimensions ( other , e ) ) { return null ; } if ( dims [ e ] >= other . dims [ e ] ) { return null ; } HashSetModifiableDBIDs resultIDs = DBIDUtil . newHashSet ( this . ids ) ; resultIDs . retainAll ( other . ids ) ; if ( resultIDs . size ( ) / all < tau ) { return null ; } return new CLIQUEUnit ( this , other . dims [ e ] , other . bounds [ e << 1 ] , other . bounds [ ( e << 1 ) + 1 ] , resultIDs ) ; }
Joins this unit with the specified unit .
33,368
private boolean checkDimensions ( CLIQUEUnit other , int e ) { for ( int i = 0 , j = 0 ; i < e ; i ++ , j += 2 ) { if ( dims [ i ] != other . dims [ i ] || bounds [ j ] != other . bounds [ j ] || bounds [ j + 1 ] != bounds [ j + 1 ] ) { return false ; } } return true ; }
Check that the first e dimensions agree .
33,369
protected synchronized void merge ( DoubleMinMax minmax ) { this . minmax . put ( minmax . getMin ( ) ) ; this . minmax . put ( minmax . getMax ( ) ) ; }
Merge the result of an instance .
33,370
public void enableStart ( ) { EventTarget targ = ( EventTarget ) element ; targ . addEventListener ( SVGConstants . SVG_EVENT_MOUSEDOWN , this , false ) ; }
Enable capturing of mousedown events .
33,371
public void disableStart ( ) { EventTarget targ = ( EventTarget ) element ; targ . removeEventListener ( SVGConstants . SVG_EVENT_MOUSEDOWN , this , false ) ; }
Disable capturing of mousedown events .
33,372
protected void enableStop ( ) { EventTarget targ = svgp . getDocument ( ) . getRootElement ( ) ; targ . addEventListener ( SVGConstants . SVG_EVENT_MOUSEMOVE , this , false ) ; targ . addEventListener ( SVGConstants . SVG_EVENT_MOUSEUP , this , false ) ; targ . addEventListener ( SVGConstants . SVG_EVENT_MOUSEOUT , this , false ) ; }
Enable capturing of mousemove and mouseup events .
33,373
protected void disableStop ( ) { EventTarget targ = svgp . getDocument ( ) . getRootElement ( ) ; targ . removeEventListener ( SVGConstants . SVG_EVENT_MOUSEMOVE , this , false ) ; targ . removeEventListener ( SVGConstants . SVG_EVENT_MOUSEUP , this , false ) ; targ . removeEventListener ( SVGConstants . SVG_EVENT_MOUSEOUT , this , false ) ; }
Disable capturing of mousemove and mouseup events .
33,374
protected SVGPoint getCoordinates ( Event evt ) { return SVGUtil . elementCoordinatesFromEvent ( this . svgp . getDocument ( ) , this . coordref , evt ) ; }
Return the event coordinates for this event .
33,375
protected boolean startDrag ( SVGPoint startPoint , Event evt ) { if ( listener != null ) { return listener . startDrag ( startPoint , evt ) ; } return true ; }
Action to do on drag start .
33,376
protected boolean duringDrag ( SVGPoint startPoint , SVGPoint dragPoint , Event evt , boolean inside ) { if ( listener != null ) { return listener . duringDrag ( startPoint , dragPoint , evt , inside ) ; } return true ; }
Method called during drags .
33,377
public void makeInvisible ( ) { CSSClass cls = new CSSClass ( this , "unused" ) ; cls . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0" ) ; cls . setStatement ( SVGConstants . CSS_CURSOR_PROPERTY , SVGConstants . CSS_POINTER_VALUE ) ; SVGUtil . setAtt ( element , SVGConstants . SVG_STYLE_ATTRIBUTE , cls . inlineCSS ( ) ) ; }
Make the rectangle invisible .
33,378
public void makeVisible ( ) { CSSClass cls = new CSSClass ( this , "unused" ) ; cls . setStatement ( SVGConstants . CSS_FILL_PROPERTY , SVGConstants . CSS_GREEN_VALUE ) ; cls . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0.2" ) ; cls . setStatement ( SVGConstants . CSS_CURSOR_PROPERTY , SVGConstants . CSS_POINTER_VALUE ) ; SVGUtil . setAtt ( element , SVGConstants . SVG_STYLE_ATTRIBUTE , cls . inlineCSS ( ) ) ; }
Make the rectangle visible for debug purposes .
33,379
public void setFill ( double val , double min , double max ) { this . val = val ; this . min = min ; this . max = max ; }
Set the fill of the score bar .
33,380
public Element build ( SVGPlot svgp , double x , double y , double width , double height ) { Element barchart = svgp . svgElement ( SVGConstants . SVG_G_TAG ) ; Element bar = svgp . svgRect ( x , y , width , height ) ; bar . setAttribute ( SVGConstants . SVG_FILL_ATTRIBUTE , "#a0a0a0" ) ; bar . setAttribute ( SVGConstants . SVG_STROKE_ATTRIBUTE , "#a0a0a0" ) ; bar . setAttribute ( SVGConstants . SVG_STROKE_WIDTH_ATTRIBUTE , String . valueOf ( height * 0.01 ) ) ; barchart . appendChild ( bar ) ; if ( val >= min && val <= max && min < max ) { final double frame = 0.02 * height ; double fpos = ( val - min ) / ( max - min ) * ( width - 2 * frame ) ; Element chart ; if ( reversed ) { chart = svgp . svgRect ( x + frame + fpos , y + frame , width - fpos - 2 * frame , height - 2 * frame ) ; } else { chart = svgp . svgRect ( x + frame , y + frame , fpos , height - 2 * frame ) ; } chart . setAttribute ( SVGConstants . SVG_FILL_ATTRIBUTE , "#d4e4f1" ) ; chart . setAttribute ( SVGConstants . SVG_STROKE_ATTRIBUTE , "#a0a0a0" ) ; chart . setAttribute ( SVGConstants . SVG_STROKE_WIDTH_ATTRIBUTE , String . valueOf ( height * 0.01 ) ) ; barchart . appendChild ( chart ) ; } if ( format != null ) { String num = Double . isNaN ( val ) ? "NaN" : format . format ( val ) ; Element lbl = svgp . svgText ( x + 0.05 * width , y + 0.75 * height , num ) ; lbl . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , "font-size: " + 0.75 * height + "; font-weight: bold" ) ; barchart . appendChild ( lbl ) ; } if ( label != null ) { Element lbl = svgp . svgText ( x + 1.05 * width , y + 0.75 * height , label ) ; lbl . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , "font-size: " + 0.75 * height + "; font-weight: normal" ) ; barchart . appendChild ( lbl ) ; } return barchart ; }
Build the actual element
33,381
void openBuffer ( ) { if ( buffer == null ) { try { buffer = input . map ( MapMode . READ_ONLY , 0 , input . size ( ) ) ; } catch ( IOException e ) { throw new AbortException ( "Cannot map input bundle." , e ) ; } } }
Map the input file .
33,382
void readMeta ( ) { final int check = buffer . getInt ( ) ; if ( check != MAGIC ) { throw new AbortException ( "File does not start with expected magic." ) ; } final int nummeta = buffer . getInt ( ) ; assert ( nummeta > 0 ) : "Empty bundle?" ; meta = new BundleMeta ( nummeta ) ; sers = new ByteBufferSerializer < ? > [ nummeta ] ; data = new Object [ nummeta ] ; for ( int i = 0 ; i < nummeta ; i ++ ) { try { @ SuppressWarnings ( "unchecked" ) SimpleTypeInformation < ? extends Object > type = ( SimpleTypeInformation < ? extends Object > ) TypeInformationSerializer . STATIC . fromByteBuffer ( buffer ) ; sers [ i ] = type . getSerializer ( ) ; if ( i == 0 && DBID . class . isAssignableFrom ( type . getRestrictionClass ( ) ) ) { hasids = true ; } else { meta . add ( type ) ; } } catch ( UnsupportedOperationException e ) { throw new AbortException ( "Deserialization failed: " + e . getMessage ( ) , e ) ; } catch ( IOException e ) { throw new AbortException ( "IO error" , e ) ; } } }
Read the metadata .
33,383
void readObject ( ) { for ( int i = 0 ; i < sers . length ; ++ i ) { try { data [ i ] = sers [ i ] . fromByteBuffer ( buffer ) ; } catch ( UnsupportedOperationException e ) { throw new AbortException ( "Deserialization failed." , e ) ; } catch ( IOException e ) { throw new AbortException ( "IO error" , e ) ; } } }
Read an object .
33,384
public Clustering < PrototypeModel < O > > run ( Relation < O > relation ) { RangeQuery < O > rq = relation . getRangeQuery ( getDistanceFunction ( ) , threshold ) ; ModifiableDBIDs seen = DBIDUtil . newHashSet ( relation . size ( ) ) ; Clustering < PrototypeModel < O > > clustering = new Clustering < > ( "Prototype clustering" , "prototype-clustering" ) ; int queries = 0 ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Leader clustering" , relation . size ( ) , LOG ) : null ; for ( DBIDIter it = relation . iterDBIDs ( ) ; it . valid ( ) && seen . size ( ) < relation . size ( ) ; it . advance ( ) ) { if ( seen . contains ( it ) ) { continue ; } DoubleDBIDList res = rq . getRangeForDBID ( it , threshold ) ; ++ queries ; ModifiableDBIDs ids = DBIDUtil . newArray ( res . size ( ) ) ; for ( DBIDIter cand = res . iter ( ) ; cand . valid ( ) ; cand . advance ( ) ) { if ( seen . add ( cand ) ) { LOG . incrementProcessed ( prog ) ; ids . add ( cand ) ; } } assert ( ids . size ( ) > 0 && ids . contains ( it ) ) ; PrototypeModel < O > mod = new SimplePrototypeModel < > ( relation . get ( it ) ) ; clustering . addToplevelCluster ( new Cluster < > ( ids , mod ) ) ; } LOG . statistics ( new LongStatistic ( this . getClass ( ) . getName ( ) + ".queries" , queries ) ) ; LOG . ensureCompleted ( prog ) ; return clustering ; }
Run the leader clustering algorithm .
33,385
public void writeToText ( TextWriterStream out , String label ) { String name = getNameAutomatic ( ) ; if ( name != null ) { out . commentPrintLn ( "Cluster name: " + name ) ; } out . commentPrintLn ( "Cluster noise flag: " + isNoise ( ) ) ; out . commentPrintLn ( "Cluster size: " + ids . size ( ) ) ; if ( getModel ( ) != null && ( getModel ( ) instanceof TextWriteable ) ) { ( ( TextWriteable ) getModel ( ) ) . writeToText ( out , label ) ; } }
Write to a textual representation . Writing the actual group data will be handled by the caller this is only meant to write the meta information .
33,386
private void scheduleSetPlot ( final SVGPlot oldplot , final SVGPlot newplot ) { UpdateManager um = this . getUpdateManager ( ) ; if ( um != null ) { synchronized ( um ) { if ( um . isRunning ( ) ) { final Runnable detach = new Runnable ( ) { public void run ( ) { if ( latest . compareAndSet ( this , null ) ) { detachPlot ( oldplot ) ; attachPlot ( newplot ) ; } } } ; latest . set ( detach ) ; um . getUpdateRunnableQueue ( ) . preemptLater ( detach ) ; return ; } } } else { if ( oldplot != null ) { LoggingUtil . warning ( "No update manager, but a previous plot exists. Incorrectly initialized?" ) ; } } detachPlot ( oldplot ) ; attachPlot ( newplot ) ; }
Schedule a detach .
33,387
private void attachPlot ( SVGPlot newplot ) { this . plot = newplot ; if ( newplot == null ) { super . setSVGDocument ( null ) ; return ; } newplot . synchronizeWith ( synchronizer ) ; super . setSVGDocument ( newplot . getDocument ( ) ) ; super . setDisableInteractions ( newplot . getDisableInteractions ( ) ) ; }
Attach to a new plot and display .
33,388
private void detachPlot ( SVGPlot oldplot ) { if ( oldplot == null ) { return ; } this . plot = null ; oldplot . unsynchronizeWith ( synchronizer ) ; }
Execute the detaching event .
33,389
protected double estimateID ( DBIDRef ignore , DoubleDBIDListIter it , double [ ] p ) { int j = 0 ; for ( it . seek ( 0 ) ; it . valid ( ) ; it . advance ( ) ) { if ( it . doubleValue ( ) == 0. || DBIDUtil . equal ( ignore , it ) ) { continue ; } p [ j ++ ] = it . doubleValue ( ) ; } if ( j < 2 ) { throw new ArithmeticException ( "Too little data to estimate ID." ) ; } return estimator . estimate ( p , j ) ; }
Estimate the local intrinsic dimensionality .
33,390
public static double logpdf ( double val , double rate ) { return val < 0. ? Double . NEGATIVE_INFINITY : FastMath . log ( rate ) - rate * val ; }
log PDF static version
33,391
public static double quantile ( double val , double rate ) { return val >= 0 && val <= 1 ? - FastMath . log ( 1 - val ) / rate : Double . NaN ; }
Quantile function static version
33,392
private MarkdownDocStream pendingBreak ( ) { if ( newline == Newline . NONE ) { return this ; } out . append ( newline == Newline . BREAK ? "\\\n" : newline == Newline . PAR ? "\n\n" : "\n" ) ; for ( int i = indent , j = i ; i > 0 ; i -= j ) { out . append ( WHITESPACES , 0 , ( j = i > WHITESPACES . length ( ) ? WHITESPACES . length ( ) : i ) ) ; } newline = Newline . NONE ; return this ; }
Output any pending line breaks .
33,393
public MarkdownDocStream append ( char c ) { if ( c == '\n' ) { newline = newline == Newline . NONE ? Newline . NEWLINE : Newline . PAR ; return this ; } pendingBreak ( ) ; out . append ( c ) ; return this ; }
Append a single character .
33,394
public MarkdownDocStream append ( CharSequence p , int start , int end ) { for ( int pos = start ; pos < end ; ++ pos ) { final char c = p . charAt ( pos ) ; if ( c == '\r' ) { continue ; } append ( c ) ; } return this ; }
Output part of a string .
33,395
public MarkdownDocStream indent ( int newindent ) { if ( newindent < indent ) { newline = newline == Newline . BREAK ? Newline . NEWLINE : Newline . PAR ; } indent = newindent ; return this ; }
Set the indent depth .
33,396
public final void render ( GL2 gl ) { gl . glMatrixMode ( GL2 . GL_PROJECTION ) ; gl . glPushMatrix ( ) ; gl . glLoadIdentity ( ) ; gl . glMatrixMode ( GL2 . GL_MODELVIEW ) ; gl . glPushMatrix ( ) ; gl . glLoadIdentity ( ) ; gl . glOrtho ( 0 , width , 0 , height , - 1 , + 1 ) ; gl . glColor4f ( 0f , 0f , 0f , .5f ) ; gl . glBegin ( GL2 . GL_QUADS ) ; gl . glVertex2f ( 0f , 0f ) ; gl . glVertex2f ( width , 0f ) ; gl . glVertex2f ( width , height ) ; gl . glVertex2f ( 0f , height ) ; gl . glEnd ( ) ; renderContents ( gl ) ; gl . glMatrixMode ( GL2 . GL_PROJECTION ) ; gl . glPopMatrix ( ) ; gl . glMatrixMode ( GL2 . GL_MODELVIEW ) ; gl . glPopMatrix ( ) ; }
Main render method
33,397
public int setPageID ( P page ) { int pageID = page . getPageID ( ) ; if ( pageID == - 1 ) { pageID = getNextEmptyPageID ( ) ; if ( pageID == - 1 ) { pageID = nextPageID ++ ; } page . setPageID ( pageID ) ; } return pageID ; }
Sets the id of the given page .
33,398
public StringBuilder appendToBuffer ( StringBuilder buf ) { Iterator < double [ ] > iter = points . iterator ( ) ; while ( iter . hasNext ( ) ) { double [ ] data = iter . next ( ) ; for ( int i = 0 ; i < data . length ; i ++ ) { if ( i > 0 ) { buf . append ( ',' ) ; } buf . append ( data [ i ] ) ; } if ( iter . hasNext ( ) ) { buf . append ( ' ' ) ; } } return buf ; }
Append the polygon to the buffer .
33,399
public boolean containsPoint2D ( double [ ] v ) { assert ( v . length == 2 ) ; final double testx = v [ 0 ] ; final double testy = v [ 1 ] ; boolean c = false ; Iterator < double [ ] > it = points . iterator ( ) ; double [ ] pre = points . get ( points . size ( ) - 1 ) ; while ( it . hasNext ( ) ) { final double [ ] cur = it . next ( ) ; final double curx = cur [ 0 ] , cury = cur [ 1 ] ; final double prex = pre [ 0 ] , prey = pre [ 1 ] ; if ( ( ( cury > testy ) != ( prey > testy ) ) ) { if ( ( testx < ( prex - curx ) * ( testy - cury ) / ( prey - cury ) + curx ) ) { c = ! c ; } } pre = cur ; } return c ; }
Point in polygon test based on