idx
int64 0
41.2k
| question
stringlengths 83
4.15k
| target
stringlengths 5
715
|
|---|---|---|
34,400
|
public OutlierResult run ( Relation < V > relation ) { final DBIDs ids = relation . getDBIDs ( ) ; ArrayList < ArrayDBIDs > subspaceIndex = buildOneDimIndexes ( relation ) ; Set < HiCSSubspace > subspaces = calculateSubspaces ( relation , subspaceIndex , rnd . getSingleThreadedRandom ( ) ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Number of high-contrast subspaces: " + subspaces . size ( ) ) ; } List < DoubleRelation > results = new ArrayList < > ( ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Calculating Outlier scores for high Contrast subspaces" , subspaces . size ( ) , LOG ) : null ; for ( HiCSSubspace dimset : subspaces ) { if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Performing outlier detection in subspace " + dimset ) ; } ProxyDatabase pdb = new ProxyDatabase ( ids ) ; pdb . addRelation ( new ProjectedView < > ( relation , new NumericalFeatureSelection < V > ( dimset ) ) ) ; OutlierResult result = outlierAlgorithm . run ( pdb ) ; results . add ( result . getScores ( ) ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; WritableDoubleDataStore scores = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmax = new DoubleMinMax ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double sum = 0.0 ; for ( DoubleRelation r : results ) { final double s = r . doubleValue ( iditer ) ; if ( ! Double . isNaN ( s ) ) { sum += s ; } } scores . putDouble ( iditer , sum ) ; minmax . put ( sum ) ; } OutlierScoreMeta meta = new BasicOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) ) ; DoubleRelation scoreres = new MaterializedDoubleRelation ( "HiCS" , "HiCS-outlier" , scores , relation . getDBIDs ( ) ) ; return new OutlierResult ( meta , scoreres ) ; }
|
Perform HiCS on a given database .
|
34,401
|
private ArrayList < ArrayDBIDs > buildOneDimIndexes ( Relation < ? extends NumberVector > relation ) { final int dim = RelationUtil . dimensionality ( relation ) ; ArrayList < ArrayDBIDs > subspaceIndex = new ArrayList < > ( dim + 1 ) ; SortDBIDsBySingleDimension comp = new VectorUtil . SortDBIDsBySingleDimension ( relation ) ; for ( int i = 0 ; i < dim ; i ++ ) { ArrayModifiableDBIDs amDBIDs = DBIDUtil . newArray ( relation . getDBIDs ( ) ) ; comp . setDimension ( i ) ; amDBIDs . sort ( comp ) ; subspaceIndex . add ( amDBIDs ) ; } return subspaceIndex ; }
|
Calculates index structures for every attribute i . e . sorts a ModifiableArray of every DBID in the database for every dimension and stores them in a list
|
34,402
|
private double [ ] max ( double [ ] distances1 , double [ ] distances2 ) { if ( distances1 . length != distances2 . length ) { throw new RuntimeException ( "different lengths!" ) ; } double [ ] result = new double [ distances1 . length ] ; for ( int i = 0 ; i < distances1 . length ; i ++ ) { result [ i ] = Math . max ( distances1 [ i ] , distances2 [ i ] ) ; } return result ; }
|
Returns an array that holds the maximum values of the both specified arrays in each index .
|
34,403
|
public static int compileShader ( Class < ? > context , GL2 gl , int type , String name ) throws ShaderCompilationException { int prog = - 1 ; try ( InputStream in = context . getResourceAsStream ( name ) ) { int [ ] error = new int [ 1 ] ; String shaderdata = FileUtil . slurp ( in ) ; prog = gl . glCreateShader ( type ) ; gl . glShaderSource ( prog , 1 , new String [ ] { shaderdata } , null , 0 ) ; gl . glCompileShader ( prog ) ; gl . glGetObjectParameterivARB ( prog , GL2 . GL_OBJECT_INFO_LOG_LENGTH_ARB , error , 0 ) ; if ( error [ 0 ] > 1 ) { byte [ ] info = new byte [ error [ 0 ] ] ; gl . glGetInfoLogARB ( prog , info . length , error , 0 , info , 0 ) ; String out = new String ( info ) ; gl . glDeleteShader ( prog ) ; throw new ShaderCompilationException ( "Shader compilation error in '" + name + "': " + out ) ; } gl . glGetShaderiv ( prog , GL2 . GL_COMPILE_STATUS , error , 0 ) ; if ( error [ 0 ] > 1 ) { throw new ShaderCompilationException ( "Shader compilation of '" + name + "' failed." ) ; } } catch ( IOException e ) { throw new ShaderCompilationException ( "IO error loading shader: " + name , e ) ; } return prog ; }
|
Compile a shader from a file .
|
34,404
|
protected int effectiveBandSize ( final int dim1 , final int dim2 ) { if ( bandSize == Double . POSITIVE_INFINITY ) { return ( dim1 > dim2 ) ? dim1 : dim2 ; } if ( bandSize >= 1. ) { return ( int ) bandSize ; } return ( int ) Math . ceil ( ( dim1 >= dim2 ? dim1 : dim2 ) * bandSize ) ; }
|
Compute the effective band size .
|
34,405
|
public final int addLeafEntry ( E entry ) { if ( ! ( entry instanceof LeafEntry ) ) { throw new UnsupportedOperationException ( "Entry is not a leaf entry!" ) ; } if ( ! isLeaf ( ) ) { throw new UnsupportedOperationException ( "Node is not a leaf node!" ) ; } return addEntry ( entry ) ; }
|
Adds a new leaf entry to this node s children and returns the index of the entry in this node s children array . An UnsupportedOperationException will be thrown if the entry is not a leaf entry or this node is not a leaf node .
|
34,406
|
public final int addDirectoryEntry ( E entry ) { if ( entry instanceof LeafEntry ) { throw new UnsupportedOperationException ( "Entry is not a directory entry!" ) ; } if ( isLeaf ( ) ) { throw new UnsupportedOperationException ( "Node is not a directory node!" ) ; } return addEntry ( entry ) ; }
|
Adds a new directory entry to this node s children and returns the index of the entry in this node s children array . An UnsupportedOperationException will be thrown if the entry is not a directory entry or this node is not a directory node .
|
34,407
|
public boolean deleteEntry ( int index ) { System . arraycopy ( entries , index + 1 , entries , index , numEntries - index - 1 ) ; entries [ -- numEntries ] = null ; return true ; }
|
Deletes the entry at the specified index and shifts all entries after the index to left .
|
34,408
|
@ SuppressWarnings ( "unchecked" ) public final List < E > getEntries ( ) { List < E > result = new ArrayList < > ( numEntries ) ; for ( Entry entry : entries ) { if ( entry != null ) { result . add ( ( E ) entry ) ; } } return result ; }
|
Returns a list of the entries .
|
34,409
|
public void removeMask ( long [ ] mask ) { int dest = BitsUtil . nextSetBit ( mask , 0 ) ; if ( dest < 0 ) { return ; } int src = BitsUtil . nextSetBit ( mask , dest ) ; while ( src < numEntries ) { if ( ! BitsUtil . get ( mask , src ) ) { entries [ dest ] = entries [ src ] ; dest ++ ; } src ++ ; } int rm = src - dest ; while ( dest < numEntries ) { entries [ dest ] = null ; dest ++ ; } numEntries -= rm ; }
|
Remove entries according to the given mask .
|
34,410
|
public final void splitTo ( AbstractNode < E > newNode , List < E > sorting , int splitPoint ) { assert ( isLeaf ( ) == newNode . isLeaf ( ) ) ; deleteAllEntries ( ) ; StringBuilder msg = LoggingConfiguration . DEBUG ? new StringBuilder ( 1000 ) : null ; for ( int i = 0 ; i < splitPoint ; i ++ ) { addEntry ( sorting . get ( i ) ) ; if ( msg != null ) { msg . append ( "n_" ) . append ( getPageID ( ) ) . append ( ' ' ) . append ( sorting . get ( i ) ) . append ( '\n' ) ; } } for ( int i = splitPoint ; i < sorting . size ( ) ; i ++ ) { newNode . addEntry ( sorting . get ( i ) ) ; if ( msg != null ) { msg . append ( "n_" ) . append ( newNode . getPageID ( ) ) . append ( ' ' ) . append ( sorting . get ( i ) ) . append ( '\n' ) ; } } if ( msg != null ) { Logging . getLogger ( this . getClass ( ) . getName ( ) ) . fine ( msg . toString ( ) ) ; } }
|
Redistribute entries according to the given sorting .
|
34,411
|
public static void ensureClusteringResult ( final Database db , final Result result ) { Collection < Clustering < ? > > clusterings = ResultUtil . filterResults ( db . getHierarchy ( ) , result , Clustering . class ) ; if ( clusterings . isEmpty ( ) ) { ResultUtil . addChildResult ( db , new ByLabelOrAllInOneClustering ( ) . run ( db ) ) ; } }
|
Ensure that the result contains at least one Clustering .
|
34,412
|
public static < A > double [ ] toPrimitiveDoubleArray ( A data , NumberArrayAdapter < ? , A > adapter ) { if ( adapter == DoubleArrayAdapter . STATIC ) { return ( ( double [ ] ) data ) . clone ( ) ; } final int len = adapter . size ( data ) ; double [ ] x = new double [ len ] ; for ( int i = 0 ; i < len ; i ++ ) { x [ i ] = adapter . getDouble ( data , i ) ; } return x ; }
|
Local copy see ArrayLikeUtil . toPrimitiveDoubleArray .
|
34,413
|
public void flush ( ) { try { out . flush ( ) ; } catch ( Exception ex ) { reportError ( null , ex , ErrorManager . FLUSH_FAILURE ) ; } try { err . flush ( ) ; } catch ( Exception ex ) { reportError ( null , ex , ErrorManager . FLUSH_FAILURE ) ; } }
|
Flush output streams
|
34,414
|
public void publish ( final LogRecord record ) { final Writer destination ; if ( record . getLevel ( ) . intValue ( ) >= Level . WARNING . intValue ( ) ) { destination = this . err ; } else { destination = this . out ; } final String m ; if ( record instanceof ProgressLogRecord ) { ProgressLogRecord prec = ( ProgressLogRecord ) record ; ptrack . addProgress ( prec . getProgress ( ) ) ; Collection < Progress > completed = ptrack . removeCompleted ( ) ; Collection < Progress > progresses = ptrack . getProgresses ( ) ; StringBuilder buf = new StringBuilder ( ) ; if ( ! completed . isEmpty ( ) ) { buf . append ( OutputStreamLogger . CARRIAGE_RETURN ) ; for ( Progress prog : completed ) { prog . appendToBuffer ( buf ) ; buf . append ( OutputStreamLogger . NEWLINE ) ; } } if ( ! progresses . isEmpty ( ) ) { boolean first = true ; buf . append ( OutputStreamLogger . CARRIAGE_RETURN ) ; for ( Progress prog : progresses ) { if ( first ) { first = false ; } else { buf . append ( ' ' ) ; } prog . appendToBuffer ( buf ) ; } } m = buf . toString ( ) ; } else { final Formatter fmt ; if ( record . getLevel ( ) . intValue ( ) >= Level . WARNING . intValue ( ) ) { fmt = errformat ; } else if ( record . getLevel ( ) . intValue ( ) <= Level . FINE . intValue ( ) ) { fmt = debugformat ; } else { fmt = msgformat ; } try { m = fmt . format ( record ) ; } catch ( Exception ex ) { reportError ( null , ex , ErrorManager . FORMAT_FAILURE ) ; return ; } } try { destination . write ( m ) ; destination . flush ( ) ; } catch ( Exception ex ) { reportError ( null , ex , ErrorManager . WRITE_FAILURE ) ; return ; } }
|
Publish a log record .
|
34,415
|
private boolean checkForNaNs ( NumberVector vec ) { for ( int i = 0 , d = vec . getDimensionality ( ) ; i < d ; i ++ ) { double v = vec . doubleValue ( i ) ; if ( v != v ) { return true ; } } return false ; }
|
Check for NaN values .
|
34,416
|
public static Relation < String > guessLabelRepresentation ( Database database ) throws NoSupportedDataTypeException { try { Relation < ? extends ClassLabel > classrep = database . getRelation ( TypeUtil . CLASSLABEL ) ; if ( classrep != null ) { return new ConvertToStringView ( classrep ) ; } } catch ( NoSupportedDataTypeException e ) { } try { Relation < ? extends LabelList > labelsrep = database . getRelation ( TypeUtil . LABELLIST ) ; if ( labelsrep != null ) { return new ConvertToStringView ( labelsrep ) ; } } catch ( NoSupportedDataTypeException e ) { } try { Relation < String > stringrep = database . getRelation ( TypeUtil . STRING ) ; if ( stringrep != null ) { return stringrep ; } } catch ( NoSupportedDataTypeException e ) { } throw new NoSupportedDataTypeException ( "No label-like representation was found." ) ; }
|
Guess a potentially label - like representation preferring class labels .
|
34,417
|
public static ArrayModifiableDBIDs getObjectsByLabelMatch ( Database database , Pattern name_pattern ) { Relation < String > relation = guessLabelRepresentation ( database ) ; if ( name_pattern == null ) { return DBIDUtil . newArray ( ) ; } ArrayModifiableDBIDs ret = DBIDUtil . newArray ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { if ( name_pattern . matcher ( relation . get ( iditer ) ) . find ( ) ) { ret . add ( iditer ) ; } } return ret ; }
|
Find object by matching their labels .
|
34,418
|
public void writeExternal ( ObjectOutput out ) throws IOException { super . writeExternal ( out ) ; out . writeObject ( conservativeApproximation ) ; }
|
Calls the super method and writes the conservative approximation of the knn distances of this entry to the specified stream .
|
34,419
|
public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { super . readExternal ( in ) ; conservativeApproximation = ( ApproximationLine ) in . readObject ( ) ; }
|
Calls the super method and reads the the conservative approximation of the knn distances of this entry from the specified input stream .
|
34,420
|
protected void updateDensities ( WritableDoubleDataStore rbod_score , DoubleDBIDList referenceDists ) { DoubleDBIDListIter it = referenceDists . iter ( ) ; for ( int l = 0 ; l < referenceDists . size ( ) ; l ++ ) { double density = computeDensity ( referenceDists , it , l ) ; it . seek ( l ) ; if ( ! ( density > rbod_score . doubleValue ( it ) ) ) { rbod_score . putDouble ( it , density ) ; } } }
|
Update the density estimates for each object .
|
34,421
|
static void chooseRemaining ( Relation < ? extends NumberVector > relation , DBIDs ids , DistanceQuery < NumberVector > distQ , int k , List < NumberVector > means , WritableDoubleDataStore weights , double weightsum , Random random ) { while ( true ) { if ( weightsum > Double . MAX_VALUE ) { throw new IllegalStateException ( "Could not choose a reasonable mean - too many data points, too large distance sum?" ) ; } if ( weightsum < Double . MIN_NORMAL ) { LoggingUtil . warning ( "Could not choose a reasonable mean - to few data points?" ) ; } double r = random . nextDouble ( ) * weightsum ; while ( r <= 0 && weightsum > Double . MIN_NORMAL ) { r = random . nextDouble ( ) * weightsum ; } DBIDIter it = ids . iter ( ) ; while ( it . valid ( ) ) { if ( ( r -= weights . doubleValue ( it ) ) < 0 ) { break ; } it . advance ( ) ; } if ( ! it . valid ( ) ) { weightsum -= r ; continue ; } final NumberVector newmean = relation . get ( it ) ; means . add ( newmean ) ; if ( means . size ( ) >= k ) { break ; } weights . putDouble ( it , 0. ) ; weightsum = updateWeights ( weights , ids , newmean , distQ ) ; } }
|
Choose remaining means weighted by distance .
|
34,422
|
private double factor ( int dimension ) { return maxima [ dimension ] > minima [ dimension ] ? maxima [ dimension ] - minima [ dimension ] : maxima [ dimension ] > 0 ? maxima [ dimension ] : 1 ; }
|
Returns a factor for normalization in a certain dimension .
|
34,423
|
protected double derivative ( int i , NumberVector v ) { final int dim = v . getDimensionality ( ) ; if ( dim == 1 ) { return 0. ; } i = ( i == 0 ) ? 1 : ( i == dim - 1 ) ? dim - 2 : i ; return ( v . doubleValue ( i ) - v . doubleValue ( i - 1 ) + ( v . doubleValue ( i + 1 ) - v . doubleValue ( i - 1 ) ) * .5 ) * .5 ; }
|
Given a NumberVector and the position of an element approximates the gradient of given element .
|
34,424
|
public Assignments < E > split ( AbstractMTree < ? , N , E , ? > tree , N node ) { final int n = node . getNumEntries ( ) ; int pos1 = random . nextInt ( n ) , pos2 = random . nextInt ( n - 1 ) ; pos2 = pos2 >= pos1 ? pos2 + 1 : pos2 ; double [ ] dis1 = new double [ n ] , dis2 = new double [ n ] ; E e1 = node . getEntry ( pos1 ) , e2 = node . getEntry ( pos2 ) ; for ( int i = 0 ; i < n ; i ++ ) { if ( i == pos1 || i == pos2 ) { continue ; } final E ej = node . getEntry ( i ) ; dis1 [ i ] = tree . distance ( e1 , ej ) ; dis2 [ i ] = tree . distance ( e2 , ej ) ; } return distributor . distribute ( node , pos1 , dis1 , pos2 , dis2 ) ; }
|
Selects two objects of the specified node to be promoted and stored into the parent node . The m - RAD strategy considers all possible pairs of objects and after partitioning the set of entries promotes the pair of objects for which the sum of covering radiuses is minimum .
|
34,425
|
public static boolean checkCSSStatements ( Collection < Pair < String , String > > statements ) { for ( Pair < String , String > pair : statements ) { if ( ! checkCSSStatement ( pair . getFirst ( ) , pair . getSecond ( ) ) ) { return false ; } } return true ; }
|
Validate a set of CSS statements .
|
34,426
|
public String getStatement ( String key ) { for ( Pair < String , String > pair : statements ) { if ( pair . getFirst ( ) . equals ( key ) ) { return pair . getSecond ( ) ; } } return null ; }
|
Get the current value of a particular CSS statement .
|
34,427
|
public void setStatement ( String key , String value ) { if ( value != null && ! checkCSSStatement ( key , value ) ) { throw new InvalidCSS ( "Invalid CSS statement." ) ; } for ( Pair < String , String > pair : statements ) { if ( pair . getFirst ( ) . equals ( key ) ) { if ( value != null ) { pair . setSecond ( value ) ; } else { statements . remove ( pair ) ; } return ; } } if ( value != null ) { statements . add ( new Pair < > ( key , value ) ) ; } }
|
Set a CSS statement .
|
34,428
|
public void appendCSSDefinition ( StringBuilder buf ) { buf . append ( "\n." ) ; buf . append ( name ) ; buf . append ( '{' ) ; for ( Pair < String , String > pair : statements ) { buf . append ( pair . getFirst ( ) ) ; buf . append ( ':' ) ; buf . append ( pair . getSecond ( ) ) ; buf . append ( ";\n" ) ; } buf . append ( "}\n" ) ; }
|
Append CSS definition to a stream
|
34,429
|
public String inlineCSS ( ) { StringBuilder buf = new StringBuilder ( ) ; for ( Pair < String , String > pair : statements ) { buf . append ( pair . getFirst ( ) ) ; buf . append ( ':' ) ; buf . append ( pair . getSecond ( ) ) ; buf . append ( ';' ) ; } return buf . toString ( ) ; }
|
Render CSS class to inline formatting
|
34,430
|
protected Distribution findBestFit ( final List < V > col , Adapter adapter , int d , double [ ] test ) { if ( estimators . size ( ) == 1 ) { return estimators . get ( 0 ) . estimate ( col , adapter ) ; } Distribution best = null ; double bestq = Double . POSITIVE_INFINITY ; trials : for ( DistributionEstimator < ? > est : estimators ) { try { Distribution dist = est . estimate ( col , adapter ) ; for ( int i = 0 ; i < test . length ; i ++ ) { test [ i ] = dist . cdf ( col . get ( i ) . doubleValue ( d ) ) ; if ( Double . isNaN ( test [ i ] ) ) { LOG . warning ( "Got NaN after fitting " + est + ": " + dist ) ; continue trials ; } if ( Double . isInfinite ( test [ i ] ) ) { LOG . warning ( "Got infinite value after fitting " + est + ": " + dist ) ; continue trials ; } } Arrays . sort ( test ) ; double q = KolmogorovSmirnovTest . simpleTest ( test ) ; if ( LOG . isVeryVerbose ( ) ) { LOG . veryverbose ( "Estimator " + est + " (" + dist + ") has maximum deviation " + q + " for dimension " + d ) ; } if ( best == null || q < bestq ) { best = dist ; bestq = q ; } } catch ( ArithmeticException e ) { if ( LOG . isVeryVerbose ( ) ) { LOG . veryverbose ( "Fitting distribution " + est + " failed: " + e . getMessage ( ) ) ; } continue trials ; } } if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Best fit for dimension " + d + ": " + best ) ; } return best ; }
|
Find the best fitting distribution .
|
34,431
|
protected boolean constantZero ( List < V > column , Adapter adapter ) { for ( int i = 0 , s = adapter . size ( column ) ; i < s ; i ++ ) { if ( adapter . get ( column , i ) != 0. ) { return false ; } } return true ; }
|
Test if an attribute is constant zero .
|
34,432
|
private StreamTokenizer makeArffTokenizer ( BufferedReader br ) { StreamTokenizer tokenizer = new StreamTokenizer ( br ) ; { tokenizer . resetSyntax ( ) ; tokenizer . whitespaceChars ( 0 , ' ' ) ; tokenizer . ordinaryChars ( '0' , '9' ) ; tokenizer . ordinaryChar ( '-' ) ; tokenizer . ordinaryChar ( '.' ) ; tokenizer . wordChars ( ' ' + 1 , '\u00FF' ) ; tokenizer . whitespaceChars ( ',' , ',' ) ; tokenizer . commentChar ( '%' ) ; tokenizer . quoteChar ( '"' ) ; tokenizer . quoteChar ( '\'' ) ; tokenizer . ordinaryChar ( '{' ) ; tokenizer . ordinaryChar ( '}' ) ; tokenizer . eolIsSignificant ( true ) ; } return tokenizer ; }
|
Make a StreamTokenizer for the ARFF format .
|
34,433
|
private void setupBundleHeaders ( ArrayList < String > names , int [ ] targ , TypeInformation [ ] etyp , int [ ] dimsize , MultipleObjectsBundle bundle , boolean sparse ) { for ( int in = 0 , out = 0 ; in < targ . length ; out ++ ) { int nin = in + 1 ; for ( ; nin < targ . length ; nin ++ ) { if ( targ [ nin ] != targ [ in ] ) { break ; } } if ( TypeUtil . NUMBER_VECTOR_FIELD . equals ( etyp [ out ] ) ) { String [ ] labels = new String [ dimsize [ out ] ] ; for ( int i = 0 ; i < dimsize [ out ] ; i ++ ) { labels [ i ] = names . get ( out + i ) ; } if ( ! sparse ) { VectorFieldTypeInformation < DoubleVector > type = new VectorFieldTypeInformation < > ( DoubleVector . FACTORY , dimsize [ out ] , labels ) ; bundle . appendColumn ( type , new ArrayList < DoubleVector > ( ) ) ; } else { VectorFieldTypeInformation < SparseDoubleVector > type = new VectorFieldTypeInformation < > ( SparseDoubleVector . FACTORY , dimsize [ out ] , labels ) ; bundle . appendColumn ( type , new ArrayList < SparseDoubleVector > ( ) ) ; } } else if ( TypeUtil . LABELLIST . equals ( etyp [ out ] ) ) { StringBuilder label = new StringBuilder ( names . get ( out ) ) ; for ( int i = 1 ; i < dimsize [ out ] ; i ++ ) { label . append ( ' ' ) . append ( names . get ( out + i ) ) ; } bundle . appendColumn ( new SimpleTypeInformation < > ( LabelList . class , label . toString ( ) ) , new ArrayList < LabelList > ( ) ) ; } else if ( TypeUtil . EXTERNALID . equals ( etyp [ out ] ) ) { bundle . appendColumn ( new SimpleTypeInformation < > ( ExternalID . class , names . get ( out ) ) , new ArrayList < ExternalID > ( ) ) ; } else if ( TypeUtil . CLASSLABEL . equals ( etyp [ out ] ) ) { bundle . appendColumn ( new SimpleTypeInformation < > ( ClassLabel . class , names . get ( out ) ) , new ArrayList < ClassLabel > ( ) ) ; } else { throw new AbortException ( "Unsupported type for column " + in + "->" + out + ": " + ( ( etyp [ out ] != null ) ? etyp [ out ] . toString ( ) : "null" ) ) ; } assert ( out == bundle . metaLength ( ) - 1 ) ; in = nin ; } }
|
Setup the headers for the object bundle .
|
34,434
|
private void readHeader ( BufferedReader br ) throws IOException { String line ; while ( true ) { line = br . readLine ( ) ; if ( line == null ) { throw new AbortException ( ARFF_HEADER_RELATION + " not found in file." ) ; } if ( ARFF_COMMENT . reset ( line ) . matches ( ) || EMPTY . reset ( line ) . matches ( ) ) { continue ; } if ( ARFF_HEADER_RELATION . reset ( line ) . matches ( ) ) { break ; } throw new AbortException ( "Expected relation declaration: " + line ) ; } }
|
Read the dataset header part of the ARFF file to ensure consistency .
|
34,435
|
private void nextToken ( StreamTokenizer tokenizer ) throws IOException { tokenizer . nextToken ( ) ; if ( ( tokenizer . ttype == '\'' ) || ( tokenizer . ttype == '"' ) ) { tokenizer . ttype = StreamTokenizer . TT_WORD ; } else if ( ( tokenizer . ttype == StreamTokenizer . TT_WORD ) && ( tokenizer . sval . equals ( "?" ) ) ) { tokenizer . ttype = '?' ; } if ( LOG . isDebugging ( ) ) { if ( tokenizer . ttype == StreamTokenizer . TT_NUMBER ) { LOG . debug ( "token: " + tokenizer . nval ) ; } else if ( tokenizer . ttype == StreamTokenizer . TT_WORD ) { LOG . debug ( "token: " + tokenizer . sval ) ; } else if ( tokenizer . ttype == StreamTokenizer . TT_EOF ) { LOG . debug ( "token: EOF" ) ; } else if ( tokenizer . ttype == StreamTokenizer . TT_EOL ) { LOG . debug ( "token: EOL" ) ; } else { LOG . debug ( "token type: " + tokenizer . ttype ) ; } } }
|
Helper function for token handling .
|
34,436
|
public static < E extends ClusterOrder > Clustering < Model > makeOPTICSCut ( E co , double epsilon ) { Clustering < Model > clustering = new Clustering < > ( "OPTICS Cut Clustering" , "optics-cut" ) ; ModifiableDBIDs noise = DBIDUtil . newHashSet ( ) ; double lastDist = Double . MAX_VALUE ; double actDist = Double . MAX_VALUE ; ModifiableDBIDs current = DBIDUtil . newHashSet ( ) ; DBIDVar prev = DBIDUtil . newVar ( ) ; for ( DBIDIter it = co . iter ( ) ; it . valid ( ) ; prev . set ( it ) , it . advance ( ) ) { lastDist = actDist ; actDist = co . getReachability ( it ) ; if ( actDist <= epsilon ) { if ( lastDist > epsilon && prev . isSet ( ) ) { noise . remove ( prev ) ; current . add ( prev ) ; } current . add ( it ) ; } else { if ( ! current . isEmpty ( ) ) { clustering . addToplevelCluster ( new Cluster < Model > ( current , ClusterModel . CLUSTER ) ) ; current = DBIDUtil . newHashSet ( ) ; } noise . add ( it ) ; } } if ( ! current . isEmpty ( ) ) { clustering . addToplevelCluster ( new Cluster < Model > ( current , ClusterModel . CLUSTER ) ) ; } clustering . addToplevelCluster ( new Cluster < Model > ( noise , true , ClusterModel . CLUSTER ) ) ; return clustering ; }
|
Compute an OPTICS cut clustering
|
34,437
|
public static LabelList make ( Collection < String > labels ) { int size = labels . size ( ) ; if ( size == 0 ) { return EMPTY_LABELS ; } return new LabelList ( labels . toArray ( new String [ size ] ) ) ; }
|
Constructor replacement .
|
34,438
|
public Clustering < SubspaceModel > run ( Database db , Relation < V > relation ) { if ( mu >= relation . size ( ) ) { throw new AbortException ( "Parameter mu is chosen unreasonably large. This won't yield meaningful results." ) ; } DiSHClusterOrder opticsResult = new Instance ( db , relation ) . run ( ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Compute Clusters." ) ; } return computeClusters ( relation , opticsResult ) ; }
|
Performs the DiSH algorithm on the given database .
|
34,439
|
private Clustering < SubspaceModel > computeClusters ( Relation < V > database , DiSHClusterOrder clusterOrder ) { final int dimensionality = RelationUtil . dimensionality ( database ) ; Object2ObjectOpenCustomHashMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap = extractClusters ( database , clusterOrder ) ; logClusterSizes ( "Step 1: extract clusters" , dimensionality , clustersMap ) ; checkClusters ( database , clustersMap ) ; logClusterSizes ( "Step 2: check clusters" , dimensionality , clustersMap ) ; List < Cluster < SubspaceModel > > clusters = sortClusters ( database , clustersMap ) ; if ( LOG . isVerbose ( ) ) { StringBuilder msg = new StringBuilder ( "Step 3: sort clusters" ) ; for ( Cluster < SubspaceModel > c : clusters ) { msg . append ( '\n' ) . append ( BitsUtil . toStringLow ( c . getModel ( ) . getSubspace ( ) . getDimensions ( ) , dimensionality ) ) . append ( " ids " ) . append ( c . size ( ) ) ; } LOG . verbose ( msg . toString ( ) ) ; } Clustering < SubspaceModel > clustering = new Clustering < > ( "DiSH clustering" , "dish-clustering" ) ; buildHierarchy ( database , clustering , clusters , dimensionality ) ; if ( LOG . isVerbose ( ) ) { StringBuilder msg = new StringBuilder ( "Step 4: build hierarchy" ) ; for ( Cluster < SubspaceModel > c : clusters ) { msg . append ( '\n' ) . append ( BitsUtil . toStringLow ( c . getModel ( ) . getSubspace ( ) . getDimensions ( ) , dimensionality ) ) . append ( " ids " ) . append ( c . size ( ) ) ; for ( It < Cluster < SubspaceModel > > iter = clustering . getClusterHierarchy ( ) . iterParents ( c ) ; iter . valid ( ) ; iter . advance ( ) ) { msg . append ( "\n parent " ) . append ( iter . get ( ) ) ; } for ( It < Cluster < SubspaceModel > > iter = clustering . getClusterHierarchy ( ) . iterChildren ( c ) ; iter . valid ( ) ; iter . advance ( ) ) { msg . append ( "\n child " ) . append ( iter . get ( ) ) ; } } LOG . verbose ( msg . toString ( ) ) ; } for ( Cluster < SubspaceModel > c : clusters ) { if ( clustering . getClusterHierarchy ( ) . numParents ( c ) == 0 ) { clustering . addToplevelCluster ( c ) ; } } return clustering ; }
|
Computes the hierarchical clusters according to the cluster order .
|
34,440
|
private void logClusterSizes ( String m , int dimensionality , Object2ObjectOpenCustomHashMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap ) { if ( LOG . isVerbose ( ) ) { final StringBuilder msg = new StringBuilder ( 1000 ) . append ( m ) . append ( '\n' ) ; for ( ObjectIterator < Object2ObjectMap . Entry < long [ ] , List < ArrayModifiableDBIDs > > > iter = clustersMap . object2ObjectEntrySet ( ) . fastIterator ( ) ; iter . hasNext ( ) ; ) { Object2ObjectMap . Entry < long [ ] , List < ArrayModifiableDBIDs > > entry = iter . next ( ) ; msg . append ( BitsUtil . toStringLow ( entry . getKey ( ) , dimensionality ) ) . append ( " sizes:" ) ; for ( ArrayModifiableDBIDs c : entry . getValue ( ) ) { msg . append ( ' ' ) . append ( c . size ( ) ) ; } msg . append ( '\n' ) ; } LOG . verbose ( msg . toString ( ) ) ; } }
|
Log cluster sizes in verbose mode .
|
34,441
|
private List < Cluster < SubspaceModel > > sortClusters ( Relation < V > relation , Object2ObjectMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap ) { final int db_dim = RelationUtil . dimensionality ( relation ) ; List < Cluster < SubspaceModel > > clusters = new ArrayList < > ( ) ; for ( long [ ] pv : clustersMap . keySet ( ) ) { List < ArrayModifiableDBIDs > parallelClusters = clustersMap . get ( pv ) ; for ( int i = 0 ; i < parallelClusters . size ( ) ; i ++ ) { ArrayModifiableDBIDs c = parallelClusters . get ( i ) ; Cluster < SubspaceModel > cluster = new Cluster < > ( c ) ; cluster . setModel ( new SubspaceModel ( new Subspace ( pv ) , Centroid . make ( relation , c ) . getArrayRef ( ) ) ) ; String subspace = BitsUtil . toStringLow ( cluster . getModel ( ) . getSubspace ( ) . getDimensions ( ) , db_dim ) ; cluster . setName ( parallelClusters . size ( ) > 1 ? ( "Cluster_" + subspace + "_" + i ) : ( "Cluster_" + subspace ) ) ; clusters . add ( cluster ) ; } } Comparator < Cluster < SubspaceModel > > comparator = new Comparator < Cluster < SubspaceModel > > ( ) { public int compare ( Cluster < SubspaceModel > c1 , Cluster < SubspaceModel > c2 ) { return c2 . getModel ( ) . getSubspace ( ) . dimensionality ( ) - c1 . getModel ( ) . getSubspace ( ) . dimensionality ( ) ; } } ; Collections . sort ( clusters , comparator ) ; return clusters ; }
|
Returns a sorted list of the clusters w . r . t . the subspace dimensionality in descending order .
|
34,442
|
private void checkClusters ( Relation < V > relation , Object2ObjectMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap ) { final int dimensionality = RelationUtil . dimensionality ( relation ) ; List < Pair < long [ ] , ArrayModifiableDBIDs > > notAssigned = new ArrayList < > ( ) ; Object2ObjectMap < long [ ] , List < ArrayModifiableDBIDs > > newClustersMap = new Object2ObjectOpenCustomHashMap < > ( BitsUtil . FASTUTIL_HASH_STRATEGY ) ; Pair < long [ ] , ArrayModifiableDBIDs > noise = new Pair < > ( BitsUtil . zero ( dimensionality ) , DBIDUtil . newArray ( ) ) ; for ( long [ ] pv : clustersMap . keySet ( ) ) { if ( BitsUtil . cardinality ( pv ) == 0 ) { List < ArrayModifiableDBIDs > parallelClusters = clustersMap . get ( pv ) ; for ( ArrayModifiableDBIDs c : parallelClusters ) { noise . second . addDBIDs ( c ) ; } } else { List < ArrayModifiableDBIDs > parallelClusters = clustersMap . get ( pv ) ; List < ArrayModifiableDBIDs > newParallelClusters = new ArrayList < > ( parallelClusters . size ( ) ) ; for ( ArrayModifiableDBIDs c : parallelClusters ) { if ( ! BitsUtil . isZero ( pv ) && c . size ( ) < mu ) { notAssigned . add ( new Pair < > ( pv , c ) ) ; } else { newParallelClusters . add ( c ) ; } } newClustersMap . put ( pv , newParallelClusters ) ; } } clustersMap . clear ( ) ; clustersMap . putAll ( newClustersMap ) ; for ( Pair < long [ ] , ArrayModifiableDBIDs > c : notAssigned ) { if ( c . second . isEmpty ( ) ) { continue ; } Pair < long [ ] , ArrayModifiableDBIDs > parent = findParent ( relation , c , clustersMap ) ; ( parent != null ? parent : noise ) . second . addDBIDs ( c . second ) ; } List < ArrayModifiableDBIDs > noiseList = new ArrayList < > ( 1 ) ; noiseList . add ( noise . second ) ; clustersMap . put ( noise . first , noiseList ) ; }
|
Removes the clusters with size < ; minpts from the cluster map and adds them to their parents .
|
34,443
|
private Pair < long [ ] , ArrayModifiableDBIDs > findParent ( Relation < V > relation , Pair < long [ ] , ArrayModifiableDBIDs > child , Object2ObjectMap < long [ ] , List < ArrayModifiableDBIDs > > clustersMap ) { Centroid child_centroid = ProjectedCentroid . make ( child . first , relation , child . second ) ; Pair < long [ ] , ArrayModifiableDBIDs > result = null ; int resultCardinality = - 1 ; long [ ] childPV = child . first ; int childCardinality = BitsUtil . cardinality ( childPV ) ; for ( long [ ] parentPV : clustersMap . keySet ( ) ) { int parentCardinality = BitsUtil . cardinality ( parentPV ) ; if ( parentCardinality >= childCardinality || ( resultCardinality != - 1 && parentCardinality <= resultCardinality ) ) { continue ; } long [ ] pv = BitsUtil . andCMin ( childPV , parentPV ) ; if ( BitsUtil . equal ( pv , parentPV ) ) { List < ArrayModifiableDBIDs > parentList = clustersMap . get ( parentPV ) ; for ( ArrayModifiableDBIDs parent : parentList ) { NumberVector parent_centroid = ProjectedCentroid . make ( parentPV , relation , parent ) ; double d = weightedDistance ( child_centroid , parent_centroid , parentPV ) ; if ( d <= 2 * epsilon ) { result = new Pair < > ( parentPV , parent ) ; resultCardinality = parentCardinality ; break ; } } } } return result ; }
|
Returns the parent of the specified cluster
|
34,444
|
private int subspaceDimensionality ( NumberVector v1 , NumberVector v2 , long [ ] pv1 , long [ ] pv2 , long [ ] commonPreferenceVector ) { int subspaceDim = v1 . getDimensionality ( ) - BitsUtil . cardinality ( commonPreferenceVector ) ; if ( BitsUtil . equal ( commonPreferenceVector , pv1 ) || BitsUtil . equal ( commonPreferenceVector , pv2 ) ) { double d = weightedDistance ( v1 , v2 , commonPreferenceVector ) ; if ( d > 2 * epsilon ) { subspaceDim ++ ; } } return subspaceDim ; }
|
Compute the common subspace dimensionality of two vectors .
|
34,445
|
protected static double weightedDistance ( NumberVector v1 , NumberVector v2 , long [ ] weightVector ) { double sqrDist = 0 ; for ( int i = BitsUtil . nextSetBit ( weightVector , 0 ) ; i >= 0 ; i = BitsUtil . nextSetBit ( weightVector , i + 1 ) ) { double manhattanI = v1 . doubleValue ( i ) - v2 . doubleValue ( i ) ; sqrDist += manhattanI * manhattanI ; } return FastMath . sqrt ( sqrDist ) ; }
|
Computes the weighted distance between the two specified vectors according to the given preference vector .
|
34,446
|
public double [ ] [ ] processIds ( DBIDs ids , Relation < ? extends NumberVector > database ) { return CovarianceMatrix . make ( database , ids ) . destroyToPopulationMatrix ( ) ; }
|
Compute Covariance Matrix for a collection of database IDs .
|
34,447
|
private void updateMeta ( ) { meta = new BundleMeta ( ) ; BundleMeta origmeta = source . getMeta ( ) ; for ( int i = 0 ; i < origmeta . size ( ) ; i ++ ) { SimpleTypeInformation < ? > type = origmeta . get ( i ) ; if ( column < 0 ) { if ( TypeUtil . NUMBER_VECTOR_VARIABLE_LENGTH . isAssignableFromType ( type ) ) { if ( type instanceof VectorFieldTypeInformation ) { @ SuppressWarnings ( "unchecked" ) final VectorFieldTypeInformation < V > castType = ( VectorFieldTypeInformation < V > ) type ; if ( dim != - 1 && castType . mindim ( ) > dim ) { throw new AbortException ( "Would filter all vectors: minimum dimensionality " + castType . mindim ( ) + " > desired dimensionality " + dim ) ; } if ( dim != - 1 && castType . maxdim ( ) < dim ) { throw new AbortException ( "Would filter all vectors: maximum dimensionality " + castType . maxdim ( ) + " < desired dimensionality " + dim ) ; } if ( dim == - 1 ) { dim = castType . mindim ( ) ; } if ( castType . mindim ( ) == castType . maxdim ( ) ) { meta . add ( castType ) ; column = i ; continue ; } } @ SuppressWarnings ( "unchecked" ) final VectorTypeInformation < V > castType = ( VectorTypeInformation < V > ) type ; if ( dim != - 1 ) { meta . add ( new VectorFieldTypeInformation < > ( FilterUtil . guessFactory ( castType ) , dim , dim , castType . getSerializer ( ) ) ) ; } else { LOG . warning ( "No dimensionality yet for column " + i ) ; meta . add ( castType ) ; } column = i ; continue ; } } meta . add ( type ) ; } }
|
Update metadata .
|
34,448
|
public static double logquantile ( double val , double loc , double scale ) { return loc + scale * ( val - MathUtil . log1mexp ( - val ) ) ; }
|
log Quantile function .
|
34,449
|
public static < C extends Model > void logClusterSizes ( Clustering < C > c ) { if ( ! LOG . isStatistics ( ) ) { return ; } final List < Cluster < C > > clusters = c . getAllClusters ( ) ; final int numc = clusters . size ( ) ; LOG . statistics ( new StringStatistic ( PREFIX + "name" , c . getLongName ( ) ) ) ; LOG . statistics ( new LongStatistic ( PREFIX + "clusters" , numc ) ) ; Hierarchy < Cluster < C > > h = c . getClusterHierarchy ( ) ; int cnum = 0 ; for ( Cluster < C > clu : clusters ) { final String p = PREFIX + "cluster-" + cnum + "." ; if ( clu . getName ( ) != null ) { LOG . statistics ( new StringStatistic ( p + "name" , clu . getName ( ) ) ) ; } LOG . statistics ( new LongStatistic ( p + "size" , clu . size ( ) ) ) ; if ( clu . isNoise ( ) ) { LOG . statistics ( new StringStatistic ( p + "noise" , "true" ) ) ; } if ( h . numChildren ( clu ) > 0 ) { StringBuilder buf = new StringBuilder ( ) ; for ( It < Cluster < C > > it = h . iterChildren ( clu ) ; it . valid ( ) ; it . advance ( ) ) { if ( buf . length ( ) > 0 ) { buf . append ( ", " ) ; } buf . append ( it . get ( ) . getName ( ) ) ; } LOG . statistics ( new StringStatistic ( p + "children" , buf . toString ( ) ) ) ; } ++ cnum ; } }
|
Log the cluster sizes of a clustering .
|
34,450
|
public void addDenseUnit ( CLIQUEUnit unit ) { int numdim = unit . dimensionality ( ) ; for ( int i = 0 ; i < numdim ; i ++ ) { BitsUtil . setI ( getDimensions ( ) , unit . getDimension ( i ) ) ; } denseUnits . add ( unit ) ; coverage += unit . numberOfFeatureVectors ( ) ; }
|
Adds the specified dense unit to this subspace .
|
34,451
|
public List < Pair < Subspace , ModifiableDBIDs > > determineClusters ( ) { List < Pair < Subspace , ModifiableDBIDs > > clusters = new ArrayList < > ( ) ; for ( CLIQUEUnit unit : denseUnits ) { if ( ! unit . isAssigned ( ) ) { ModifiableDBIDs cluster = DBIDUtil . newHashSet ( ) ; CLIQUESubspace model = new CLIQUESubspace ( getDimensions ( ) ) ; clusters . add ( new Pair < Subspace , ModifiableDBIDs > ( model , cluster ) ) ; dfs ( unit , cluster , model ) ; } } return clusters ; }
|
Determines all clusters in this subspace by performing a depth - first search algorithm to find connected dense units .
|
34,452
|
public void dfs ( CLIQUEUnit unit , ModifiableDBIDs cluster , CLIQUESubspace model ) { cluster . addDBIDs ( unit . getIds ( ) ) ; unit . markAsAssigned ( ) ; model . addDenseUnit ( unit ) ; final long [ ] dims = getDimensions ( ) ; for ( int dim = BitsUtil . nextSetBit ( dims , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( dims , dim + 1 ) ) { CLIQUEUnit left = leftNeighbor ( unit , dim ) ; if ( left != null && ! left . isAssigned ( ) ) { dfs ( left , cluster , model ) ; } CLIQUEUnit right = rightNeighbor ( unit , dim ) ; if ( right != null && ! right . isAssigned ( ) ) { dfs ( right , cluster , model ) ; } } }
|
Depth - first search algorithm to find connected dense units in this subspace that build a cluster . It starts with a unit assigns it to a cluster and finds all units it is connected to .
|
34,453
|
protected CLIQUEUnit leftNeighbor ( CLIQUEUnit unit , int dim ) { for ( CLIQUEUnit u : denseUnits ) { if ( u . containsLeftNeighbor ( unit , dim ) ) { return u ; } } return null ; }
|
Returns the left neighbor of the given unit in the specified dimension .
|
34,454
|
protected CLIQUEUnit rightNeighbor ( CLIQUEUnit unit , int dim ) { for ( CLIQUEUnit u : denseUnits ) { if ( u . containsRightNeighbor ( unit , dim ) ) { return u ; } } return null ; }
|
Returns the right neighbor of the given unit in the specified dimension .
|
34,455
|
private IntIterator getCommonSplitDimensions ( N node ) { Collection < SplitHistory > splitHistories = new ArrayList < > ( node . getNumEntries ( ) ) ; for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { SpatialEntry entry = node . getEntry ( i ) ; if ( ! ( entry instanceof XTreeDirectoryEntry ) ) { throw new RuntimeException ( "Wrong entry type to derive split dimension from: " + entry . getClass ( ) . getName ( ) ) ; } splitHistories . add ( ( ( XTreeDirectoryEntry ) entry ) . getSplitHistory ( ) ) ; } return SplitHistory . getCommonDimensions ( splitHistories ) ; }
|
Determine the common split dimensions from a list of entries .
|
34,456
|
private HyperBoundingBox mbr ( final int [ ] entries , final int from , final int to ) { SpatialEntry first = this . node . getEntry ( entries [ from ] ) ; ModifiableHyperBoundingBox mbr = new ModifiableHyperBoundingBox ( first ) ; for ( int i = from + 1 ; i < to ; i ++ ) { mbr . extend ( this . node . getEntry ( entries [ i ] ) ) ; } return mbr ; }
|
Computes and returns the mbr of the specified nodes only the nodes between from and to index are considered .
|
34,457
|
private void ensureSize ( int minsize ) { if ( minsize <= store . length ) { return ; } int asize = store . length ; while ( asize < minsize ) { asize = ( asize >>> 1 ) + asize ; } final int [ ] prev = store ; store = new int [ asize ] ; System . arraycopy ( prev , 0 , store , 0 , size ) ; }
|
Resize as desired .
|
34,458
|
private void grow ( ) { final int newsize = store . length + ( store . length >>> 1 ) ; final int [ ] prev = store ; store = new int [ newsize ] ; System . arraycopy ( prev , 0 , store , 0 , size ) ; }
|
Grow array by 50% .
|
34,459
|
public static double sumOfProbabilities ( DBIDIter ignore , DBIDArrayIter di , double [ ] p ) { double s = 0 ; for ( di . seek ( 0 ) ; di . valid ( ) ; di . advance ( ) ) { if ( DBIDUtil . equal ( ignore , di ) ) { continue ; } final double v = p [ di . getOffset ( ) ] ; if ( ! ( v > 0 ) ) { break ; } s += v ; } return s ; }
|
Compute the sum of probabilities stop at first 0 ignore query object .
|
34,460
|
public synchronized static Task queue ( Listener callback ) { final Task task = new Task ( callback ) ; if ( THREAD != null && THREAD . isAlive ( ) ) { THREAD . queue . add ( task ) ; return task ; } THREAD = new ThumbnailThread ( ) ; THREAD . queue . add ( task ) ; THREAD . start ( ) ; return task ; }
|
Queue a thumbnail task in a global thumbnail thread .
|
34,461
|
public static void unqueue ( Task task ) { if ( THREAD != null ) { synchronized ( THREAD ) { THREAD . queue . remove ( task ) ; } } }
|
Remove a pending task from the queue .
|
34,462
|
public void beginStep ( int step , String stepTitle , Logging logger ) { setProcessed ( step - 1 ) ; this . stepTitle = stepTitle ; logger . progress ( this ) ; }
|
Do a new step and log it
|
34,463
|
protected static HyperBoundingBox computeBounds ( NumberVector [ ] samples ) { assert ( samples . length > 0 ) : "Cannot compute bounding box of empty set." ; final int dimensions = samples [ 0 ] . getDimensionality ( ) ; final double [ ] min = new double [ dimensions ] ; final double [ ] max = new double [ dimensions ] ; NumberVector first = samples [ 0 ] ; for ( int d = 0 ; d < dimensions ; d ++ ) { min [ d ] = max [ d ] = first . doubleValue ( d ) ; } for ( int i = 1 ; i < samples . length ; i ++ ) { NumberVector v = samples [ i ] ; for ( int d = 0 ; d < dimensions ; d ++ ) { final double c = v . doubleValue ( d ) ; min [ d ] = c < min [ d ] ? c : min [ d ] ; max [ d ] = c > max [ d ] ? c : max [ d ] ; } } return new HyperBoundingBox ( min , max ) ; }
|
Compute the bounding box for some samples .
|
34,464
|
protected void preprocess ( ) { final Logging log = getLogger ( ) ; createStorage ( ) ; ArrayDBIDs ids = DBIDUtil . ensureArray ( relation . getDBIDs ( ) ) ; if ( log . isStatistics ( ) ) { log . statistics ( new LongStatistic ( this . getClass ( ) . getName ( ) + ".k" , k ) ) ; } Duration duration = log . isStatistics ( ) ? log . newDuration ( this . getClass ( ) . getName ( ) + ".precomputation-time" ) . begin ( ) : null ; FiniteProgress progress = getLogger ( ) . isVerbose ( ) ? new FiniteProgress ( "Materializing k nearest neighbors (k=" + k + ")" , ids . size ( ) , getLogger ( ) ) : null ; List < ? extends KNNList > kNNList = null ; if ( usebulk ) { kNNList = knnQuery . getKNNForBulkDBIDs ( ids , k ) ; if ( kNNList != null ) { int i = 0 ; for ( DBIDIter id = ids . iter ( ) ; id . valid ( ) ; id . advance ( ) , i ++ ) { storage . put ( id , kNNList . get ( i ) ) ; log . incrementProcessed ( progress ) ; } } } else { final boolean ismetric = getDistanceQuery ( ) . getDistanceFunction ( ) . isMetric ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( ismetric && storage . get ( iter ) != null ) { log . incrementProcessed ( progress ) ; continue ; } KNNList knn = knnQuery . getKNNForDBID ( iter , k ) ; storage . put ( iter , knn ) ; if ( ismetric ) { for ( DoubleDBIDListIter it = knn . iter ( ) ; it . valid ( ) && it . doubleValue ( ) == 0. ; it . advance ( ) ) { storage . put ( it , knn ) ; } } log . incrementProcessed ( progress ) ; } } log . ensureCompleted ( progress ) ; if ( duration != null ) { log . statistics ( duration . end ( ) ) ; } }
|
The actual preprocessing step .
|
34,465
|
protected void objectsInserted ( DBIDs ids ) { final Logging log = getLogger ( ) ; StepProgress stepprog = log . isVerbose ( ) ? new StepProgress ( 3 ) : null ; ArrayDBIDs aids = DBIDUtil . ensureArray ( ids ) ; log . beginStep ( stepprog , 1 , "New insertions ocurred, materialize their new kNNs." ) ; List < ? extends KNNList > kNNList = knnQuery . getKNNForBulkDBIDs ( aids , k ) ; DBIDIter iter = aids . iter ( ) ; for ( int i = 0 ; i < aids . size ( ) ; i ++ , iter . advance ( ) ) { storage . put ( iter , kNNList . get ( i ) ) ; } log . beginStep ( stepprog , 2 , "New insertions ocurred, update the affected kNNs." ) ; ArrayDBIDs rkNN_ids = updateKNNsAfterInsertion ( ids ) ; log . beginStep ( stepprog , 3 , "New insertions ocurred, inform listeners." ) ; fireKNNsInserted ( ids , rkNN_ids ) ; log . setCompleted ( stepprog ) ; }
|
Called after new objects have been inserted updates the materialized neighborhood .
|
34,466
|
protected void objectsRemoved ( DBIDs ids ) { final Logging log = getLogger ( ) ; StepProgress stepprog = log . isVerbose ( ) ? new StepProgress ( 3 ) : null ; log . beginStep ( stepprog , 1 , "New deletions ocurred, remove their materialized kNNs." ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { storage . delete ( iter ) ; } log . beginStep ( stepprog , 2 , "New deletions ocurred, update the affected kNNs." ) ; ArrayDBIDs rkNN_ids = updateKNNsAfterDeletion ( ids ) ; log . beginStep ( stepprog , 3 , "New deletions ocurred, inform listeners." ) ; fireKNNsRemoved ( ids , rkNN_ids ) ; log . ensureCompleted ( stepprog ) ; }
|
Called after objects have been removed updates the materialized neighborhood .
|
34,467
|
protected void fireKNNsInserted ( DBIDs insertions , DBIDs updates ) { KNNChangeEvent e = new KNNChangeEvent ( this , KNNChangeEvent . Type . INSERT , insertions , updates ) ; Object [ ] listeners = listenerList . getListenerList ( ) ; for ( int i = listeners . length - 2 ; i >= 0 ; i -= 2 ) { if ( listeners [ i ] == KNNListener . class ) { ( ( KNNListener ) listeners [ i + 1 ] ) . kNNsChanged ( e ) ; } } }
|
Informs all registered KNNListener that new kNNs have been inserted and as a result some kNNs have been changed .
|
34,468
|
protected void fireKNNsRemoved ( DBIDs removals , DBIDs updates ) { KNNChangeEvent e = new KNNChangeEvent ( this , KNNChangeEvent . Type . DELETE , removals , updates ) ; Object [ ] listeners = listenerList . getListenerList ( ) ; for ( int i = listeners . length - 2 ; i >= 0 ; i -= 2 ) { if ( listeners [ i ] == KNNListener . class ) { ( ( KNNListener ) listeners [ i + 1 ] ) . kNNsChanged ( e ) ; } } }
|
Informs all registered KNNListener that existing kNNs have been removed and as a result some kNNs have been changed .
|
34,469
|
public void buildClassifier ( Database database , Relation < ? extends ClassLabel > labelrep ) { Object2IntOpenHashMap < ClassLabel > count = new Object2IntOpenHashMap < > ( ) ; for ( DBIDIter iter = labelrep . iterDBIDs ( ) ; iter . valid ( ) ; iter . advance ( ) ) { count . addTo ( labelrep . get ( iter ) , 1 ) ; } int max = Integer . MIN_VALUE ; double size = labelrep . size ( ) ; distribution = new double [ count . size ( ) ] ; labels = new ArrayList < > ( count . size ( ) ) ; ObjectIterator < Entry < ClassLabel > > iter = count . object2IntEntrySet ( ) . fastIterator ( ) ; for ( int i = 0 ; iter . hasNext ( ) ; ++ i ) { Entry < ClassLabel > entry = iter . next ( ) ; distribution [ i ] = entry . getIntValue ( ) / size ; labels . add ( entry . getKey ( ) ) ; if ( entry . getIntValue ( ) > max ) { max = entry . getIntValue ( ) ; prediction = entry . getKey ( ) ; } } }
|
Learns the prior probability for all classes .
|
34,470
|
public Assignments < E > split ( AbstractMTree < ? , N , E , ? > tree , N node ) { final int n = node . getNumEntries ( ) ; double [ ] [ ] distanceMatrix = computeDistanceMatrix ( tree , node ) ; double miSumCR = Double . POSITIVE_INFINITY ; boolean leaf = node . isLeaf ( ) ; Assignments < E > bestAssignment = null ; for ( int i = 0 ; i < n ; i ++ ) { for ( int j = i + 1 ; j < n ; j ++ ) { Assignments < E > currentAssignments = distributor . distribute ( node , i , distanceMatrix [ i ] , j , distanceMatrix [ j ] ) ; double maxCR = Math . max ( currentAssignments . computeFirstCover ( leaf ) , currentAssignments . computeSecondCover ( leaf ) ) ; if ( maxCR < miSumCR ) { miSumCR = maxCR ; bestAssignment = currentAssignments ; } } } return bestAssignment ; }
|
Selects two objects of the specified node to be promoted and stored into the parent node . The mM - RAD strategy considers all possible pairs of objects and after partitioning the set of entries promotes the pair of objects for which the larger of the two covering radiuses is minimum .
|
34,471
|
public static double [ ] [ ] unboxVectors ( List < ? extends NumberVector > means ) { double [ ] [ ] ret = new double [ means . size ( ) ] [ ] ; for ( int i = 0 ; i < ret . length ; i ++ ) { ret [ i ] = means . get ( i ) . toArray ( ) ; } return ret ; }
|
Unbox database means to primitive means .
|
34,472
|
public void put ( double x , double y , double w ) { if ( w == 0. ) { return ; } if ( sumWe <= 0. ) { sumX = x * w ; sumY = y * w ; sumWe = w ; return ; } final double deltaX = x * sumWe - sumX ; final double deltaY = y * sumWe - sumY ; final double oldWe = sumWe ; sumWe += w ; final double f = w / ( sumWe * oldWe ) ; sumXX += f * deltaX * deltaX ; sumYY += f * deltaY * deltaY ; sumXY += f * deltaX * deltaY ; sumX += x * w ; sumY += y * w ; }
|
Put a single value into the correlation statistic .
|
34,473
|
public double getCorrelation ( ) { if ( ! ( sumXX > 0. && sumYY > 0. ) ) { return ( sumXX == sumYY ) ? 1. : 0. ; } return sumXY / FastMath . sqrt ( sumXX * sumYY ) ; }
|
Get the Pearson correlation value .
|
34,474
|
public static double coefficient ( double [ ] x , double [ ] y ) { final int xdim = x . length ; final int ydim = y . length ; if ( xdim != ydim ) { throw new IllegalArgumentException ( "Invalid arguments: arrays differ in length." ) ; } if ( xdim == 0 ) { throw new IllegalArgumentException ( "Empty vector." ) ; } double sumXX = 0. , sumYY = 0. , sumXY = 0. ; double sumX = x [ 0 ] , sumY = y [ 0 ] ; int i = 1 ; while ( i < xdim ) { final double xv = x [ i ] , yv = y [ i ] ; final double deltaX = xv * i - sumX ; final double deltaY = yv * i - sumY ; final double oldi = i ; ++ i ; final double f = 1. / ( i * oldi ) ; sumXX += f * deltaX * deltaX ; sumYY += f * deltaY * deltaY ; sumXY += f * deltaX * deltaY ; sumX += xv ; sumY += yv ; } if ( ! ( sumXX > 0. && sumYY > 0. ) ) { return ( sumXX == sumYY ) ? 1. : 0. ; } return sumXY / FastMath . sqrt ( sumXX * sumYY ) ; }
|
Compute the Pearson product - moment correlation coefficient for two FeatureVectors .
|
34,475
|
public static double weightedCoefficient ( NumberVector x , NumberVector y , double [ ] weights ) { final int xdim = x . getDimensionality ( ) ; final int ydim = y . getDimensionality ( ) ; if ( xdim != ydim ) { throw new IllegalArgumentException ( "Invalid arguments: number vectors differ in dimensionality." ) ; } if ( xdim != weights . length ) { throw new IllegalArgumentException ( "Dimensionality doesn't agree to weights." ) ; } if ( xdim == 0 ) { throw new IllegalArgumentException ( "Empty vector." ) ; } double sumXX = 0. , sumYY = 0. , sumXY = 0. , sumWe = weights [ 0 ] ; double sumX = x . doubleValue ( 0 ) * sumWe , sumY = y . doubleValue ( 0 ) * sumWe ; for ( int i = 1 ; i < xdim ; ++ i ) { final double xv = x . doubleValue ( i ) , yv = y . doubleValue ( i ) , w = weights [ i ] ; final double deltaX = xv * sumWe - sumX ; final double deltaY = yv * sumWe - sumY ; final double oldWe = sumWe ; sumWe += w ; final double f = w / ( sumWe * oldWe ) ; sumXX += f * deltaX * deltaX ; sumYY += f * deltaY * deltaY ; sumXY += f * deltaX * deltaY ; sumX += xv * w ; sumY += yv * w ; } if ( ! ( sumXX > 0. && sumYY > 0. ) ) { return ( sumXX == sumYY ) ? 1. : 0. ; } return sumXY / FastMath . sqrt ( sumXX * sumYY ) ; }
|
Compute the Pearson product - moment correlation coefficient for two NumberVectors .
|
34,476
|
@ SuppressWarnings ( "unchecked" ) public static < T , A > ExtendedArray < T > extend ( A array , ArrayAdapter < T , A > getter , T extra ) { return new ExtendedArray < > ( array , ( ArrayAdapter < T , Object > ) getter , extra ) ; }
|
Static wrapper that has a nicer generics signature .
|
34,477
|
public static SelectionResult ensureSelectionResult ( final Database db ) { List < SelectionResult > selections = ResultUtil . filterResults ( db . getHierarchy ( ) , db , SelectionResult . class ) ; if ( ! selections . isEmpty ( ) ) { return selections . get ( 0 ) ; } SelectionResult sel = new SelectionResult ( ) ; ResultUtil . addChildResult ( db , sel ) ; return sel ; }
|
Ensure that there also is a selection container object .
|
34,478
|
@ SuppressWarnings ( "unused" ) public void debugRender ( GL2 gl ) { if ( ! DEBUG || ( startcamera == null ) ) { return ; } gl . glLineWidth ( 3f ) ; gl . glColor4f ( 1.f , 0.f , 0.f , .66f ) ; gl . glBegin ( GL . GL_LINES ) ; gl . glVertex3f ( 0.f , 0.f , 0.f ) ; double rot = startangle - startcamera . getRotationZ ( ) ; gl . glVertex3f ( ( float ) FastMath . cos ( rot ) * 4.f , ( float ) - FastMath . sin ( rot ) * 4.f , 0.f ) ; gl . glVertex3f ( ( float ) FastMath . cos ( rot ) * 1.f , ( float ) - FastMath . sin ( rot ) * 1.f , 0.f ) ; gl . glVertex3f ( ( float ) FastMath . cos ( rot ) * 1.f , ( float ) - FastMath . sin ( rot ) * 1.f , 1.f ) ; gl . glEnd ( ) ; }
|
Render a debugging hint for the arcball tool .
|
34,479
|
public static List < SettingsResult > getSettingsResults ( Result r ) { if ( r instanceof SettingsResult ) { List < SettingsResult > ors = new ArrayList < > ( 1 ) ; ors . add ( ( SettingsResult ) r ) ; return ors ; } if ( r instanceof HierarchicalResult ) { return ResultUtil . filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , SettingsResult . class ) ; } return Collections . emptyList ( ) ; }
|
Collect all settings results from a Result
|
34,480
|
public static String usage ( Collection < TrackedParameter > options ) { StringBuilder usage = new StringBuilder ( 10000 ) ; if ( ! REFERENCE_VERSION . equals ( VERSION ) ) { usage . append ( "ELKI build: " ) . append ( VERSION ) . append ( NEWLINE ) . append ( NEWLINE ) ; } usage . append ( REFERENCE ) ; OptionUtil . formatForConsole ( usage . append ( NEWLINE ) . append ( "Parameters:" ) . append ( NEWLINE ) , FormatUtil . getConsoleWidth ( ) , options ) ; return usage . toString ( ) ; }
|
Returns a usage message explaining all known options
|
34,481
|
protected static void printErrorMessage ( Exception e ) { if ( e instanceof AbortException ) { LoggingConfiguration . setVerbose ( Level . VERBOSE ) ; LOG . verbose ( e . getMessage ( ) ) ; } else if ( e instanceof UnspecifiedParameterException ) { LOG . error ( e . getMessage ( ) ) ; } else if ( e instanceof ParameterException ) { LOG . error ( e . getMessage ( ) ) ; } else { LOG . exception ( e ) ; } }
|
Print an error message for the given error .
|
34,482
|
private static void printDescription ( Class < ? > descriptionClass ) { if ( descriptionClass == null ) { return ; } try { LoggingConfiguration . setVerbose ( Level . VERBOSE ) ; LOG . verbose ( OptionUtil . describeParameterizable ( new StringBuilder ( ) , descriptionClass , FormatUtil . getConsoleWidth ( ) , "" ) . toString ( ) ) ; } catch ( Exception e ) { LOG . exception ( "Error instantiating class to describe." , e . getCause ( ) ) ; } }
|
Print the description for the given parameter
|
34,483
|
public static < T > void processDense ( T data , Adapter < T > adapter , Collector collector ) { final int n = adapter . size ( data ) ; double [ ] best = new double [ n ] ; Arrays . fill ( best , Double . POSITIVE_INFINITY ) ; int [ ] src = new int [ n ] ; byte [ ] connected = new byte [ n ] ; int current = 0 ; connected [ current ] = 1 ; best [ current ] = 0 ; for ( int i = n - 2 ; i >= 0 ; i -- ) { int newbesti = - 1 ; double newbestd = Double . POSITIVE_INFINITY ; for ( int j = 0 ; j < n ; ++ j ) { if ( connected [ j ] == 1 ) { continue ; } final double dist = adapter . distance ( data , current , j ) ; if ( dist < best [ j ] ) { best [ j ] = dist ; src [ j ] = current ; } if ( best [ j ] < newbestd || newbesti == - 1 ) { newbestd = best [ j ] ; newbesti = j ; } } assert ( newbesti >= 0 ) ; connected [ newbesti ] = 1 ; collector . addEdge ( newbestd , src [ newbesti ] , newbesti ) ; current = newbesti ; } }
|
Run Prim s algorithm on a dense graph .
|
34,484
|
protected boolean isInputValid ( String filename , String line , String id , String msg ) { return ! filename . isEmpty ( ) || ! line . isEmpty ( ) || ! id . isEmpty ( ) || ! msg . isEmpty ( ) ; }
|
Derived classes can overload this method
|
34,485
|
public void parseVCppLine ( String line , String projectPath , String compilationFile ) { this . parseVCppCompilerCLLine ( line , projectPath , compilationFile ) ; }
|
Can be used to create a list of includes defines and options for a single line If it follows the format of VC ++
|
34,486
|
public Set < ValgrindError > processReport ( File report ) throws XMLStreamException { ValgrindReportStreamHandler streamHandler = new ValgrindReportStreamHandler ( ) ; new StaxParser ( streamHandler ) . parse ( report ) ; return streamHandler . valgrindErrors ; }
|
Parses given valgrind report
|
34,487
|
private static boolean isGeneratedNodeExcluded ( AstNode astNode ) { AstNode prev = astNode . getPreviousAstNode ( ) ; return prev != null && prev . getTokenLine ( ) == astNode . getTokenLine ( ) && prev . isCopyBookOrGeneratedNode ( ) ; }
|
Exclude subsequent generated nodes if they are consecutive and on the same line .
|
34,488
|
private boolean isBreakStatementExcluded ( AstNode astNode ) { boolean exclude = false ; if ( excludeCaseBreak && astNode . getToken ( ) . getType ( ) . equals ( CxxKeyword . BREAK ) ) { for ( AstNode statement = astNode . getFirstAncestor ( CxxGrammarImpl . statement ) ; statement != null ; statement = statement . getPreviousSibling ( ) ) { if ( astNode . getTokenLine ( ) != statement . getTokenLine ( ) ) { break ; } TokenType type = statement . getToken ( ) . getType ( ) ; if ( type . equals ( CxxKeyword . CASE ) || type . equals ( CxxKeyword . DEFAULT ) ) { exclude = true ; break ; } } } return exclude ; }
|
Exclude break statement if it is on the same line as the switch label
|
34,489
|
private boolean isEmptyExpressionStatement ( AstNode astNode ) { if ( astNode . is ( CxxGrammarImpl . expressionStatement ) && ";" . equals ( astNode . getToken ( ) . getValue ( ) ) ) { AstNode statement = astNode . getFirstAncestor ( CxxGrammarImpl . selectionStatement ) ; if ( statement != null ) { return astNode . getTokenLine ( ) == statement . getTokenLine ( ) ; } return isGeneratedNodeExcluded ( astNode ) ; } return false ; }
|
Exclude empty expression statement
|
34,490
|
private Map < String , Macro > parsePredefinedUnitMacros ( Map < String , Macro > configuredMacros ) { if ( ! ctorInProgress || ( unitMacros != null ) ) { throw new IllegalStateException ( "Preconditions for initial fill-out of predefinedUnitMacros were violated" ) ; } if ( conf . getCompilationUnitSourceFiles ( ) . isEmpty ( ) && ( conf . getGlobalCompilationUnitSettings ( ) == null ) ) { return Collections . emptyMap ( ) ; } unitMacros = new MapChain < > ( ) ; if ( getMacros ( ) != unitMacros ) { throw new IllegalStateException ( "expected unitMacros as active macros map" ) ; } try { getMacros ( ) . setHighPrio ( true ) ; getMacros ( ) . putAll ( Macro . UNIT_MACROS ) ; getMacros ( ) . putAll ( configuredMacros ) ; parseForcedIncludes ( ) ; final HashMap < String , Macro > result = new HashMap < > ( unitMacros . getHighPrioMap ( ) ) ; return result ; } finally { getMacros ( ) . setHighPrio ( false ) ; unitMacros = null ; } }
|
Create temporary unitMacros map ; This map will be used as an active macros storage while parsing of forced includes . After parsing was over extract resulting macros and destroy the unitMacros . fixedMacros will be set as active macros again .
|
34,491
|
public < G extends Serializable > Metric < G > getMetric ( CxxMetricsFactory . Key metricKey ) { Metric < G > metric = ( Metric < G > ) this . langSpecificMetrics . get ( metricKey ) ; if ( metric == null ) { throw new IllegalStateException ( "Requested metric " + metricKey + " couldn't be found" ) ; } return metric ; }
|
Get language specific metric
|
34,492
|
public static List < String > getElements ( File file , String charset ) { List < String > list = new ArrayList < > ( ) ; try ( BufferedReader br = new BufferedReader ( new InputStreamReader ( java . nio . file . Files . newInputStream ( file . toPath ( ) ) , charset ) ) ) { StringBuilder sb = new StringBuilder ( 4096 ) ; String line ; int cnt = 0 ; final Pattern whitespacesOnly = Pattern . compile ( "^\\s*$" ) ; while ( ( line = br . readLine ( ) ) != null ) { if ( cnt > ( TOP_COUNT ) ) { if ( whitespacesOnly . matcher ( line ) . matches ( ) ) { list . add ( sb . toString ( ) ) ; sb . setLength ( 0 ) ; } else { sb . append ( line ) ; sb . append ( '\n' ) ; } } ++ cnt ; } if ( sb . length ( ) > 0 ) { list . add ( sb . toString ( ) ) ; } } catch ( IOException e ) { String msg = new StringBuilder ( 512 ) . append ( "Cannot feed the data into sonar, details: '" ) . append ( e ) . append ( "'" ) . toString ( ) ; LOG . error ( msg ) ; } return list ; }
|
get all DrMemory elements from file
|
34,493
|
public void saveUniqueViolation ( SensorContext sensorContext , CxxReportIssue issue ) { if ( uniqueIssues . add ( issue ) ) { saveViolation ( sensorContext , issue ) ; } }
|
Saves code violation only if it wasn t already saved
|
34,494
|
public static SourceFile scanSingleFile ( InputFile file , SensorContext sensorContext , CxxLanguage language , SquidAstVisitor < Grammar > ... visitors ) { return scanSingleFileConfig ( language , file , new CxxConfiguration ( sensorContext . fileSystem ( ) . encoding ( ) ) , visitors ) ; }
|
Helper method for testing checks without having to deploy them on a Sonar instance .
|
34,495
|
public static SourceFile scanSingleFileConfig ( CxxLanguage language , InputFile file , CxxConfiguration cxxConfig , SquidAstVisitor < Grammar > ... visitors ) { if ( ! file . isFile ( ) ) { throw new IllegalArgumentException ( "File '" + file + "' not found." ) ; } AstScanner < Grammar > scanner = create ( language , cxxConfig , visitors ) ; scanner . scanFile ( file . file ( ) ) ; Collection < SourceCode > sources = scanner . getIndex ( ) . search ( new QueryByType ( SourceFile . class ) ) ; if ( sources . size ( ) != 1 ) { throw new IllegalStateException ( "Only one SourceFile was expected whereas " + sources . size ( ) + " has been returned." ) ; } return ( SourceFile ) sources . iterator ( ) . next ( ) ; }
|
Helper method for scanning a single file
|
34,496
|
public static String join ( Path path1 , Path path2 ) { if ( path2 . toString ( ) . isEmpty ( ) ) { return "" ; } if ( ! path1 . isAbsolute ( ) ) { path1 = Paths . get ( "." , path1 . toString ( ) ) ; } if ( ! path2 . isAbsolute ( ) ) { path2 = Paths . get ( "." , path2 . toString ( ) ) ; } Path result = path1 . resolve ( path2 ) . normalize ( ) ; if ( ! result . isAbsolute ( ) ) { result = Paths . get ( "." , result . toString ( ) ) ; } return result . toString ( ) ; }
|
Join two paths
|
34,497
|
public static void parse ( CxxConfiguration config , File compileCommandsFile ) throws IOException { LOG . debug ( "Parsing 'JSON Compilation Database' format" ) ; ObjectMapper mapper = new ObjectMapper ( ) ; mapper . disable ( DeserializationFeature . FAIL_ON_UNKNOWN_PROPERTIES ) ; mapper . enable ( DeserializationFeature . USE_JAVA_ARRAY_FOR_JSON_ARRAY ) ; JsonCompilationDatabaseCommandObject [ ] commandObjects = mapper . readValue ( compileCommandsFile , JsonCompilationDatabaseCommandObject [ ] . class ) ; for ( JsonCompilationDatabaseCommandObject commandObject : commandObjects ) { Path cwd = Paths . get ( "." ) ; if ( commandObject . getDirectory ( ) != null ) { cwd = Paths . get ( commandObject . getDirectory ( ) ) ; } Path absPath = cwd . resolve ( commandObject . getFile ( ) ) ; if ( "__global__" . equals ( commandObject . getFile ( ) ) ) { CxxCompilationUnitSettings globalSettings = new CxxCompilationUnitSettings ( ) ; parseCommandObject ( globalSettings , commandObject ) ; config . setGlobalCompilationUnitSettings ( globalSettings ) ; } else { CxxCompilationUnitSettings settings = new CxxCompilationUnitSettings ( ) ; parseCommandObject ( settings , commandObject ) ; config . addCompilationUnitSettings ( absPath . toAbsolutePath ( ) . normalize ( ) . toString ( ) , settings ) ; } } }
|
Set up the given CxxConfiguration from the JSON compilation database
|
34,498
|
private static String getOperatorId ( AstNode operatorFunctionId ) { StringBuilder builder = new StringBuilder ( operatorFunctionId . getTokenValue ( ) ) ; AstNode operator = operatorFunctionId . getFirstDescendant ( CxxGrammarImpl . overloadableOperator ) ; if ( operator != null ) { AstNode opNode = operator . getFirstChild ( ) ; while ( opNode != null ) { builder . append ( opNode . getTokenValue ( ) ) ; opNode = opNode . getNextSibling ( ) ; } } return builder . toString ( ) ; }
|
XXX may go to a utility class
|
34,499
|
private static List < Token > getInlineDocumentation ( Token token , int line ) { List < Token > comments = new ArrayList < > ( ) ; for ( Trivia trivia : token . getTrivia ( ) ) { if ( trivia . isComment ( ) ) { Token triviaToken = trivia . getToken ( ) ; if ( ( triviaToken != null ) && ( triviaToken . getLine ( ) == line ) && ( isDoxygenInlineComment ( triviaToken . getValue ( ) ) ) ) { comments . add ( triviaToken ) ; if ( LOG . isTraceEnabled ( ) ) { LOG . trace ( "Inline doc: " + triviaToken . getValue ( ) ) ; } } } } return comments ; }
|
Check if inline Doxygen documentation is attached to the given token at specified line
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.