idx
int64 0
41.2k
| question
stringlengths 83
4.15k
| target
stringlengths 5
715
|
|---|---|---|
4,100
|
private void parseCacheConfig ( final Node node , final ConfigSettings config ) { String name ; Long lValue ; Node nnode ; NodeList list = node . getChildNodes ( ) ; int length = list . getLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) { nnode = list . item ( i ) ; name = nnode . getNodeName ( ) . toUpperCase ( ) ; if ( name . equals ( KEY_LIMIT_TASK_SIZE_REVISIONS ) ) { lValue = Long . parseLong ( nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ) ; config . setConfigParameter ( ConfigurationKeys . LIMIT_TASK_SIZE_REVISIONS , lValue ) ; } else if ( name . equals ( KEY_LIMIT_TASK_SIZE_DIFFS ) ) { lValue = Long . parseLong ( nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ) ; config . setConfigParameter ( ConfigurationKeys . LIMIT_TASK_SIZE_DIFFS , lValue ) ; } else if ( name . equals ( KEY_LIMIT_SQLSERVER_MAX_ALLOWED_PACKET ) ) { lValue = Long . parseLong ( nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ) ; config . setConfigParameter ( ConfigurationKeys . LIMIT_SQLSERVER_MAX_ALLOWED_PACKET , lValue ) ; } } }
|
Parses the cache parameter section .
|
4,101
|
private void parseLoggingConfig ( final Node node , final ConfigSettings config ) { String name ; String value ; Node nnode ; NodeList list = node . getChildNodes ( ) ; int length = list . getLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) { nnode = list . item ( i ) ; name = nnode . getNodeName ( ) . toUpperCase ( ) ; if ( name . equals ( KEY_ROOT_FOLDER ) ) { value = nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ; value = value . substring ( 1 , value . length ( ) - 1 ) ; config . setConfigParameter ( ConfigurationKeys . LOGGING_PATH_DIFFTOOL , value ) ; } else if ( name . equals ( SUBSUBSECTION_DIFF_TOOL ) ) { parseLoggerConfig ( nnode , config , null , ConfigurationKeys . LOGGING_LOGLEVEL_DIFFTOOL ) ; } } }
|
Parses the logging parameter section .
|
4,102
|
private void parseLoggerConfig ( final Node node , final ConfigSettings config , final ConfigurationKeys logPath , final ConfigurationKeys logLevel ) { String name , value ; Level level ; Node nnode ; NodeList list = node . getChildNodes ( ) ; int length = list . getLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) { nnode = list . item ( i ) ; name = nnode . getNodeName ( ) . toUpperCase ( ) ; if ( name . equals ( KEY_LOG_PATH ) ) { value = nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ; value = value . substring ( 1 , value . length ( ) - 1 ) ; config . setConfigParameter ( logPath , value ) ; } else if ( name . equals ( KEY_LOG_LEVEL ) ) { level = Level . valueOf ( nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ) ; config . setConfigParameter ( logLevel , level ) ; } } }
|
Parses the information for a logger .
|
4,103
|
private void parseDebugConfig ( final Node node , final ConfigSettings config ) { String name ; Boolean value ; Node nnode ; NodeList list = node . getChildNodes ( ) ; int length = list . getLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) { nnode = list . item ( i ) ; name = nnode . getNodeName ( ) . toUpperCase ( ) ; if ( name . equals ( KEY_VERIFICATION_DIFF ) ) { value = Boolean . parseBoolean ( nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ) ; config . setConfigParameter ( ConfigurationKeys . VERIFICATION_DIFF , value ) ; } else if ( name . equals ( KEY_VERIFICATION_ENCODING ) ) { value = Boolean . parseBoolean ( nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ) ; config . setConfigParameter ( ConfigurationKeys . VERIFICATION_ENCODING , value ) ; } else if ( name . equals ( KEY_STATISTICAL_OUTPUT ) ) { value = Boolean . parseBoolean ( nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ) ; config . setConfigParameter ( ConfigurationKeys . MODE_STATISTICAL_OUTPUT , value ) ; } else if ( name . equals ( SUBSECTION_DEBUG_OUTPUT ) ) { parseDebugOutputConfig ( nnode , config ) ; } } }
|
Parses the debug parameter section .
|
4,104
|
private void parseDebugOutputConfig ( final Node node , final ConfigSettings config ) { String name , value ; Node nnode ; NodeList list = node . getChildNodes ( ) ; int length = list . getLength ( ) ; for ( int i = 0 ; i < length ; i ++ ) { nnode = list . item ( i ) ; name = nnode . getNodeName ( ) . toUpperCase ( ) ; if ( name . equals ( KEY_DEBUG_PATH ) ) { value = nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ; value = value . substring ( 1 , value . length ( ) - 1 ) ; config . setConfigParameter ( ConfigurationKeys . LOGGING_PATH_DEBUG , value ) ; } else if ( name . equals ( KEY_DEBUG_ENABLED ) ) { Boolean enabled = Boolean . parseBoolean ( nnode . getChildNodes ( ) . item ( 0 ) . getNodeValue ( ) ) ; config . setConfigParameter ( ConfigurationKeys . MODE_DEBUG_OUTPUT , enabled ) ; } } }
|
Parses the debug output parameter subsection .
|
4,105
|
public NestedList getNestedList ( int i ) { if ( i < lists . size ( ) ) return lists . get ( i ) ; else return null ; }
|
Returns the NestedListContainer or NestedListElement at Positon i .
|
4,106
|
public void add ( final String word , final V value ) { char c = word . charAt ( 0 ) ; LetterNode < V > node = get ( c ) ; if ( node == null ) { node = new LetterNode < V > ( this . word + c ) ; } this . nodes . put ( c , node ) ; if ( word . length ( ) == 1 ) { node . isKeyword = true ; node . value = value ; return ; } node . add ( word . substring ( 1 ) , value ) ; }
|
Adds a word and its related value .
|
4,107
|
public int compareTo ( final DiffBlock b ) { if ( ab ) { return this . revAStart - b . revAStart ; } else { return this . revBStart - b . revBStart ; } }
|
Compares the positions of both blocks .
|
4,108
|
public static ConfigurationManager getInstance ( ) throws ConfigurationException { if ( instance == null ) { throw ErrorFactory . createConfigurationException ( ErrorKeys . CONFIGURATION_CONFIGURATIONMANAGER_NOT_INITIALIZED ) ; } return instance ; }
|
Returns the reference to the instance of the ConfigurationManager .
|
4,109
|
public Object getConfigParameter ( final ConfigurationKeys configParameter ) throws ConfigurationException { Object o = this . config . getConfigParameter ( configParameter ) ; if ( o != null ) { return o ; } else if ( configParameter == ConfigurationKeys . LIMIT_SQL_ARCHIVE_SIZE ) { return Long . MAX_VALUE ; } else if ( configParameter == ConfigurationKeys . LIMIT_SQL_FILE_SIZE ) { return Long . MAX_VALUE ; } else if ( configParameter == ConfigurationKeys . MODE_STATISTICAL_OUTPUT ) { return false ; } else if ( configParameter == ConfigurationKeys . MODE_DEBUG_OUTPUT ) { return false ; } else if ( configParameter == ConfigurationKeys . VERIFICATION_ENCODING ) { return false ; } else if ( configParameter == ConfigurationKeys . VERIFICATION_DIFF ) { return false ; } else if ( configParameter == ConfigurationKeys . LOGGING_PATH_DEBUG ) { return "" ; } else if ( configParameter == ConfigurationKeys . NAMESPACES_TO_KEEP ) { return new HashSet < Integer > ( ) ; } else if ( configParameter == ConfigurationKeys . MODE_DATAFILE_OUTPUT ) { return false ; } else { throw ErrorFactory . createConfigurationException ( ErrorKeys . CONFIGURATION_CONFIGURATIONMANAGER_UNKNOWN_CONFIG_PARAMETER , configParameter . toString ( ) ) ; } }
|
Returns the value of the configuration parameter .
|
4,110
|
public static String outputNestedList ( NestedList nl , int depth ) { String result = "" ; if ( nl == null ) { return result ; } for ( int i = 0 ; i < depth ; i ++ ) { result += " " ; } if ( nl . getClass ( ) == NestedListElement . class ) { result += nl . getText ( ) ; } else { result += "---" ; for ( NestedList nl2 : ( ( NestedListContainer ) nl ) . getNestedLists ( ) ) { result += "\n" + outputNestedList ( nl2 , depth + 1 ) ; } } return result ; }
|
Returns String with all elements of a NestedList
|
4,111
|
private void writeHeader ( ) throws SQLException { Statement query ; String [ ] revTableHeaderQueries ; revTableHeaderQueries = sqlEncoder . getTable ( ) ; for ( String revTableHeaderQuery : revTableHeaderQueries ) { query = connection . createStatement ( ) ; query . executeUpdate ( revTableHeaderQuery ) ; query . close ( ) ; } }
|
Retrieves the encoded sql orders and executes them .
|
4,112
|
public void compress ( String path ) { try { File fileToArchive = new File ( path ) ; BufferedInputStream input = new BufferedInputStream ( new FileInputStream ( fileToArchive ) ) ; File archivedFile = new File ( fileToArchive . getName ( ) + ".bz2" ) ; archivedFile . createNewFile ( ) ; FileOutputStream fos = new FileOutputStream ( archivedFile ) ; BufferedOutputStream bufStr = new BufferedOutputStream ( fos ) ; fos . write ( "BZ" . getBytes ( ) ) ; CBZip2OutputStream bzip2 = new CBZip2OutputStream ( bufStr ) ; while ( input . available ( ) > 0 ) { int size = COMPRESSION_CACHE ; if ( input . available ( ) < COMPRESSION_CACHE ) { size = input . available ( ) ; } byte [ ] bytes = new byte [ size ] ; input . read ( bytes ) ; bzip2 . write ( bytes ) ; } bzip2 . close ( ) ; bufStr . close ( ) ; fos . close ( ) ; input . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
|
Creates bz2 archive file from file in path
|
4,113
|
public OutputStream getCompressionStream ( String path ) throws IOException { File archivedFile = new File ( path ) ; archivedFile . createNewFile ( ) ; FileOutputStream fos = new FileOutputStream ( archivedFile ) ; BufferedOutputStream bufStr = new BufferedOutputStream ( fos ) ; fos . write ( "BZ" . getBytes ( ) ) ; CBZip2OutputStream bzip2 = new CBZip2OutputStream ( bufStr ) ; return bzip2 ; }
|
Creates stream for compression
|
4,114
|
public InputStreamReader getDecompressionStream ( String path , String encoding ) throws IOException { File fileToUncompress = new File ( path ) ; BufferedInputStream fileStream = new BufferedInputStream ( new FileInputStream ( fileToUncompress ) ) ; fileStream . read ( ) ; fileStream . read ( ) ; BufferedInputStream bufferedStream = new BufferedInputStream ( fileStream ) ; CBZip2InputStream input = new CBZip2InputStream ( bufferedStream ) ; return new InputStreamReader ( input , encoding ) ; }
|
Creates Stream for decompression
|
4,115
|
public void decompress ( String path ) throws IOException { File bzip2 = new File ( path ) ; File unarchived = new File ( bzip2 . getName ( ) . replace ( ".bz2" , "" ) ) ; unarchived . createNewFile ( ) ; BufferedInputStream inputStr = new BufferedInputStream ( new FileInputStream ( bzip2 ) ) ; inputStr . read ( ) ; inputStr . read ( ) ; BufferedInputStream buffStr = new BufferedInputStream ( inputStr ) ; CBZip2InputStream input = new CBZip2InputStream ( buffStr ) ; FileOutputStream outStr = new FileOutputStream ( unarchived ) ; while ( true ) { byte [ ] compressedBytes = new byte [ DECOMPRESSION_CACHE ] ; int byteRead = input . read ( compressedBytes ) ; outStr . write ( compressedBytes , 0 , byteRead ) ; if ( byteRead != DECOMPRESSION_CACHE ) { break ; } } input . close ( ) ; buffStr . close ( ) ; inputStr . close ( ) ; outStr . close ( ) ; }
|
Uncompress bz2 file
|
4,116
|
public void add ( final int currentArticleID , final long startTime , final long endTime , final List < ArticleIndexData > infoList ) { if ( ! infoList . isEmpty ( ) ) { StringBuilder fullRevBuffer = new StringBuilder ( ) ; StringBuilder revCountBuffer = new StringBuilder ( ) ; boolean first = true ; ArticleIndexData info ; while ( ! infoList . isEmpty ( ) ) { info = infoList . remove ( 0 ) ; if ( ! first ) { fullRevBuffer . append ( " " ) ; revCountBuffer . append ( " " ) ; } fullRevBuffer . append ( info . getFullRevisionPrimaryKey ( ) ) ; revCountBuffer . append ( info . getStartRevisionCount ( ) ) ; revCountBuffer . append ( " " ) ; revCountBuffer . append ( info . getEndRevisionCount ( ) ) ; first = false ; } boolean sql = ! insertStatement . isEmpty ( ) ; if ( buffer . length ( ) + fullRevBuffer . length ( ) + revCountBuffer . length ( ) + 20 >= MAX_ALLOWED_PACKET ) { storeBuffer ( ) ; } if ( sql ) { if ( buffer . length ( ) > insertStatement . length ( ) ) { buffer . append ( "," ) ; } buffer . append ( "(" ) ; } buffer . append ( currentArticleID ) ; buffer . append ( "," ) ; buffer . append ( sql ? "\'" : "\"" ) ; buffer . append ( fullRevBuffer ) ; buffer . append ( sql ? "\'" : "\"" ) ; buffer . append ( "," ) ; buffer . append ( sql ? "\'" : "\"" ) ; buffer . append ( revCountBuffer ) ; buffer . append ( sql ? "\'" : "\"" ) ; buffer . append ( "," ) ; buffer . append ( startTime ) ; buffer . append ( "," ) ; buffer . append ( endTime ) ; if ( sql ) { buffer . append ( ")" ) ; } else { buffer . append ( "\n" ) ; } } }
|
Adds the information for an new entry in the article index .
|
4,117
|
public static SectionContainer eliminateEmptyStructures ( SectionContainer sc ) { for ( int i = sc . nrOfSubSections ( ) - 1 ; i >= 0 ; i -- ) { Section ss = sc . getSubSection ( i ) ; if ( ss . getClass ( ) == SectionContainer . class ) { SectionContainer sci = ( SectionContainer ) ss ; eliminateEmptyStructures ( sci ) ; } else if ( ss . getClass ( ) == SectionContent . class ) eliminateEmptyStructures ( ( SectionContent ) ss ) ; if ( ss . empty ( ) ) sc . removeSection ( ss ) ; } if ( sc . nrOfSubSections ( ) == 1 && sc . getSubSection ( 0 ) . getClass ( ) == SectionContainer . class ) { SectionContainer sc0 = ( SectionContainer ) sc . getSubSection ( 0 ) ; if ( sc0 . getTitleElement ( ) == null ) { sc . removeSection ( sc0 ) ; for ( int i = 0 ; i < sc0 . nrOfSubSections ( ) ; i ++ ) sc . addSection ( sc0 . getSubSection ( i ) ) ; } } return sc ; }
|
Removes all empty Structures from a SectionContainer and all substructures .
|
4,118
|
public static SectionContent eliminateEmptyStructures ( SectionContent sc ) { for ( int i = sc . nrOfParagraphs ( ) - 1 ; i >= 0 ; i -- ) { Paragraph p = sc . getParagraph ( i ) ; if ( p . empty ( ) ) sc . removeParagraph ( p ) ; } for ( int i = sc . nrOfDefinitionLists ( ) - 1 ; i >= 0 ; i -- ) { DefinitionList dl = sc . getDefinitionList ( i ) ; eliminateEmptyStructures ( dl ) ; if ( dl . empty ( ) ) sc . removeDefinitionList ( dl ) ; } for ( int i = sc . nrOfNestedLists ( ) - 1 ; i >= 0 ; i -- ) { NestedListContainer nl = sc . getNestedList ( i ) ; eliminateEmptyStructures ( nl ) ; if ( nl . empty ( ) ) sc . removeNestedList ( nl ) ; } for ( int i = sc . nrOfTables ( ) - 1 ; i >= 0 ; i -- ) { Table t = sc . getTable ( i ) ; eliminateEmptyStructures ( t ) ; if ( t . empty ( ) ) sc . removeTable ( t ) ; } return sc ; }
|
Removes all empty Structures from a SectionContent and all substructures .
|
4,119
|
public static NestedListContainer eliminateEmptyStructures ( NestedListContainer nlc ) { for ( int i = nlc . size ( ) - 1 ; i >= 0 ; i -- ) { NestedList nl = nlc . getNestedList ( i ) ; if ( nl . getClass ( ) == NestedListContainer . class ) eliminateEmptyStructures ( ( NestedListContainer ) nl ) ; if ( nl . empty ( ) ) nlc . remove ( nl ) ; } return nlc ; }
|
Removes all empty Structures from a NestedListContainer and all substructures .
|
4,120
|
public static Table eliminateEmptyStructures ( Table t ) { for ( int i = t . nrOfTableElements ( ) - 1 ; i >= 0 ; i -- ) { TableElement te = t . getTableElement ( i ) ; eliminateEmptyStructures ( te ) ; if ( te . empty ( ) ) t . removeTableElement ( te ) ; } return t ; }
|
Removes all empty Structures from a Table and all substructures .
|
4,121
|
public static TableElement eliminateEmptyStructures ( TableElement te ) { for ( int i = te . nrOfSections ( ) - 1 ; i >= 0 ; i -- ) { Section s = te . getSection ( i ) ; if ( s . getClass ( ) == SectionContainer . class ) eliminateEmptyStructures ( ( SectionContainer ) s ) ; else if ( s . getClass ( ) == SectionContent . class ) eliminateEmptyStructures ( ( SectionContent ) s ) ; if ( s . empty ( ) ) te . removeSection ( s ) ; } return te ; }
|
Removes all empty Structures from a TableElement and all substructures .
|
4,122
|
public static DefinitionList eliminateEmptyStructures ( DefinitionList dl ) { ContentElement dt = dl . getDefinedTerm ( ) ; if ( dt != null && dt . empty ( ) ) dl . setDefinedTerm ( null ) ; for ( int i = dl . nrOfDefinitions ( ) - 1 ; i >= 0 ; i -- ) { ContentElement ce = dl . getDefinition ( i ) ; if ( ce . empty ( ) ) dl . removeDefinition ( ce ) ; } return dl ; }
|
Removes all empty Structures from a DefinitionList and all substructures .
|
4,123
|
private void initXMLKeys ( ) { this . keywords = new SingleKeywordTree < WikipediaXMLKeys > ( ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_PAGE . getKeyword ( ) , WikipediaXMLKeys . KEY_START_PAGE ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_PAGE . getKeyword ( ) , WikipediaXMLKeys . KEY_END_PAGE ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_TITLE . getKeyword ( ) , WikipediaXMLKeys . KEY_START_TITLE ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_TITLE . getKeyword ( ) , WikipediaXMLKeys . KEY_END_TITLE ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_ID . getKeyword ( ) , WikipediaXMLKeys . KEY_START_ID ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_ID . getKeyword ( ) , WikipediaXMLKeys . KEY_END_ID ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_REVISION . getKeyword ( ) , WikipediaXMLKeys . KEY_START_REVISION ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_REVISION . getKeyword ( ) , WikipediaXMLKeys . KEY_END_REVISION ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_TIMESTAMP . getKeyword ( ) , WikipediaXMLKeys . KEY_START_TIMESTAMP ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_TIMESTAMP . getKeyword ( ) , WikipediaXMLKeys . KEY_END_TIMESTAMP ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_TEXT . getKeyword ( ) , WikipediaXMLKeys . KEY_START_TEXT ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_TEXT . getKeyword ( ) , WikipediaXMLKeys . KEY_END_TEXT ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_MINOR_FLAG . getKeyword ( ) , WikipediaXMLKeys . KEY_MINOR_FLAG ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_COMMENT . getKeyword ( ) , WikipediaXMLKeys . KEY_START_COMMENT ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_COMMENT . getKeyword ( ) , WikipediaXMLKeys . KEY_END_COMMENT ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_IP . getKeyword ( ) , WikipediaXMLKeys . KEY_START_IP ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_IP . getKeyword ( ) , WikipediaXMLKeys . KEY_END_IP ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_USERNAME . getKeyword ( ) , WikipediaXMLKeys . KEY_START_USERNAME ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_USERNAME . getKeyword ( ) , WikipediaXMLKeys . KEY_END_USERNAME ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_CONTRIBUTOR . getKeyword ( ) , WikipediaXMLKeys . KEY_START_CONTRIBUTOR ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_CONTRIBUTOR . getKeyword ( ) , WikipediaXMLKeys . KEY_END_CONTRIBUTOR ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_START_NAMESPACES . getKeyword ( ) , WikipediaXMLKeys . KEY_START_NAMESPACES ) ; keywords . addKeyword ( WikipediaXMLKeys . KEY_END_NAMESPACES . getKeyword ( ) , WikipediaXMLKeys . KEY_END_NAMESPACES ) ; }
|
Creates and initializes the xml keyword tree .
|
4,124
|
private void initNamespaces ( ) { Map < Integer , String > namespaceMap = new HashMap < Integer , String > ( ) ; try { int b = read ( ) ; this . keywords . reset ( ) ; StringBuilder buffer = null ; while ( b != - 1 ) { if ( buffer != null ) { buffer . append ( ( char ) b ) ; } if ( this . keywords . check ( ( char ) b ) ) { switch ( this . keywords . getValue ( ) ) { case KEY_START_NAMESPACES : buffer = new StringBuilder ( WikipediaXMLKeys . KEY_START_NAMESPACES . getKeyword ( ) ) ; break ; case KEY_END_NAMESPACES : DocumentBuilderFactory factory = DocumentBuilderFactory . newInstance ( ) ; factory . setIgnoringElementContentWhitespace ( true ) ; Document namespaces = factory . newDocumentBuilder ( ) . parse ( new InputSource ( new StringReader ( buffer . toString ( ) ) ) ) ; NodeList nsList = namespaces . getChildNodes ( ) . item ( 0 ) . getChildNodes ( ) ; for ( int i = 0 ; i < nsList . getLength ( ) ; i ++ ) { Node curNamespace = nsList . item ( i ) ; String prefix = curNamespace . getTextContent ( ) . trim ( ) ; if ( ! prefix . isEmpty ( ) ) { NamedNodeMap nsAttributes = curNamespace . getAttributes ( ) ; String namespace = nsAttributes . getNamedItem ( "key" ) . getTextContent ( ) ; namespaceMap . put ( Integer . parseInt ( namespace ) , prefix ) ; } } buffer = null ; articleFilter . initializeNamespaces ( namespaceMap ) ; return ; } this . keywords . reset ( ) ; } b = read ( ) ; } } catch ( IOException e ) { System . err . println ( "Error reading namespaces from xml dump." ) ; } catch ( ParserConfigurationException e ) { System . err . println ( "Error parsing namespace data." ) ; } catch ( SAXException e ) { System . err . println ( "Error parsing namespace data." ) ; } }
|
Reads the namespaces from the siteinfo section and processes them in order to initialize the ArticleFilter
|
4,125
|
protected boolean readHeader ( ) throws IOException , ArticleReaderException { this . taskHeader = new ArticleInformation ( ) ; int size , r = read ( ) ; StringBuilder buffer = null ; while ( r != - 1 ) { if ( buffer != null ) { buffer . append ( ( char ) r ) ; } if ( this . keywords . check ( ( char ) r ) ) { switch ( this . keywords . getValue ( ) ) { case KEY_START_TITLE : case KEY_START_ID : buffer = new StringBuilder ( ) ; break ; case KEY_END_TITLE : size = buffer . length ( ) ; buffer . delete ( size - WikipediaXMLKeys . KEY_END_TITLE . getKeyword ( ) . length ( ) , size ) ; this . taskHeader . setArticleName ( buffer . toString ( ) ) ; if ( this . articleFilter != null ) { if ( ! this . articleFilter . checkArticle ( this . taskHeader . getArticleName ( ) ) ) { return false ; } } buffer = null ; break ; case KEY_END_ID : size = buffer . length ( ) ; buffer . delete ( size - WikipediaXMLKeys . KEY_END_ID . getKeyword ( ) . length ( ) , size ) ; this . taskHeader . setArticleId ( Integer . parseInt ( buffer . toString ( ) ) ) ; buffer = null ; break ; case KEY_START_REVISION : this . keywords . reset ( ) ; return true ; default : throw ErrorFactory . createArticleReaderException ( ErrorKeys . DELTA_CONSUMERS_TASK_READER_WIKIPEDIAXMLREADER_UNEXPECTED_KEYWORD ) ; } this . keywords . reset ( ) ; } r = read ( ) ; } throw ErrorFactory . createArticleReaderException ( ErrorKeys . DELTA_CONSUMERS_TASK_READER_WIKIPEDIAXMLREADER_UNEXPECTED_END_OF_FILE ) ; }
|
Reads the header of an article .
|
4,126
|
protected void readContributor ( Revision rev , String str ) throws IOException , ArticleReaderException { char [ ] contrChars = str . toCharArray ( ) ; int size ; StringBuilder buffer = null ; this . keywords . reset ( ) ; for ( char curChar : contrChars ) { if ( buffer != null ) { buffer . append ( curChar ) ; } if ( this . keywords . check ( curChar ) ) { switch ( this . keywords . getValue ( ) ) { case KEY_START_ID : case KEY_START_IP : case KEY_START_USERNAME : buffer = new StringBuilder ( ) ; break ; case KEY_END_IP : size = buffer . length ( ) ; buffer . delete ( size - WikipediaXMLKeys . KEY_END_IP . getKeyword ( ) . length ( ) , size ) ; rev . setContributorName ( SQLEscape . escape ( buffer . toString ( ) ) ) ; rev . setContributorIsRegistered ( false ) ; buffer = null ; break ; case KEY_END_USERNAME : size = buffer . length ( ) ; buffer . delete ( size - WikipediaXMLKeys . KEY_END_USERNAME . getKeyword ( ) . length ( ) , size ) ; rev . setContributorName ( SQLEscape . escape ( buffer . toString ( ) ) ) ; rev . setContributorIsRegistered ( true ) ; buffer = null ; break ; case KEY_END_ID : size = buffer . length ( ) ; buffer . delete ( size - WikipediaXMLKeys . KEY_END_ID . getKeyword ( ) . length ( ) , size ) ; String id = buffer . toString ( ) ; if ( ! id . isEmpty ( ) ) { rev . setContributorId ( Integer . parseInt ( buffer . toString ( ) ) ) ; } buffer = null ; break ; } } } }
|
Parses the content within the contributor tags and adds the parsed info to the provided revision object .
|
4,127
|
private void extractTemplates ( ) throws WikiApiException { PageIterator pageIter = new PageIterator ( getWiki ( ) , true , pageBuffer ) ; RevisionApi revApi = new RevisionApi ( dbConf ) ; int pageCounter = 0 ; long revisionCounter = 0L ; while ( pageIter . hasNext ( ) ) { pageCounter ++ ; if ( pageCounter % VERBOSITY == 0 ) { logger . info ( "{} pages processed ..." , pageCounter ) ; } Page curPage = pageIter . next ( ) ; int curPageId = curPage . getPageId ( ) ; if ( mode . active_for_pages ) { fillMapWithTemplateData ( curPage . getText ( ) , pageFilter , curPageId , TPLNAME_TO_PAGEIDS ) ; } if ( mode . active_for_revisions ) { List < Timestamp > tsList = revApi . getRevisionTimestamps ( curPageId ) ; for ( Timestamp ts : tsList ) { revisionCounter ++ ; if ( revisionCounter % ( VERBOSITY * 10 ) == 0 ) { logger . info ( "{} revisions processed ..." , revisionCounter ) ; } Revision curRevision = revApi . getRevision ( curPageId , ts ) ; int curRevisionId = curRevision . getRevisionID ( ) ; fillMapWithTemplateData ( curRevision . getRevisionText ( ) , revisionFilter , curRevisionId , TPLNAME_TO_REVISIONIDS ) ; } } } }
|
Extracts templates from pages and revisions
|
4,128
|
private void processPages ( ) { PageIterator pageIter = new PageIterator ( getWiki ( ) , true , pageBuffer ) ; int pageCounter = 0 ; while ( pageIter . hasNext ( ) ) { pageCounter ++ ; if ( pageCounter % VERBOSITY == 0 ) { logger . info ( "{} pages processed ..." , pageCounter ) ; } Page curPage = pageIter . next ( ) ; int curPageId = curPage . getPageId ( ) ; fillMapWithTemplateData ( curPage . getText ( ) , pageFilter , curPageId , TPLNAME_TO_PAGEIDS ) ; } }
|
Extracts templates from pages only
|
4,129
|
private void processRevisions ( ) { logger . info ( "Processing revisions, extracting template information ..." ) ; RevisionIterator revisionIter = null ; try { revisionIter = new RevisionIterator ( dbConf ) ; int revCounter = 0 ; while ( revisionIter . hasNext ( ) ) { revCounter ++ ; if ( revCounter % VERBOSITY == 0 ) { logger . info ( "{} revisions processed ..." , revCounter ) ; } Revision curRevision = revisionIter . next ( ) ; int curRevisionId = curRevision . getRevisionID ( ) ; fillMapWithTemplateData ( curRevision . getRevisionText ( ) , revisionFilter , curRevisionId , TPLNAME_TO_REVISIONIDS ) ; } } catch ( WikiApiException e ) { logger . error ( "Error initializing Revision Iterator" , e ) ; } finally { if ( revisionIter != null ) { try { revisionIter . close ( ) ; } catch ( SQLException e ) { logger . error ( "Error closing RevisionIterator" , e ) ; } } } }
|
Processes only revision templates using the Revision Iterator
|
4,130
|
private void generateTemplateIndices ( WikipediaTemplateInfo info , Set < String > templateNames ) { try { for ( String name : templateNames ) { int id = info . checkTemplateId ( name ) ; if ( id != - 1 ) { tplNameToTplId . put ( name , id ) ; } } } catch ( WikiApiException e ) { } }
|
Loads existing ids into the map . If no id exists a template will get a new one in the dump writer
|
4,131
|
public void initialize ( InputStream inputStream , DumpTableEnum table ) throws IOException { unbufferedResult = new PipedInputStream ( ) ; decodedStream = new PipedOutputStream ( unbufferedResult ) ; result = new BufferedInputStream ( unbufferedResult , BUFFERSIZE ) ; xmlInputThread = new XMLDumpTableInputStreamThread ( inputStream , decodedStream , table ) ; xmlInputThread . start ( ) ; }
|
Decorator for InputStream which allows to convert an XML input stream to SQL
|
4,132
|
protected void init ( InputStream inputStream ) throws IOException { stream = inputStream ; st = new StreamTokenizer ( new BufferedReader ( new InputStreamReader ( stream , ENCODING ) ) ) ; EOF_reached = false ; skipStatements ( ) ; }
|
Init the SQLFileParser with the input stream
|
4,133
|
private void validateSQLFields ( ) { boolean flag = controller . isEnableSQLDatabaseOutput ( ) ; enableSQLDatabaseConnection . setSelected ( flag ) ; sqlHostLabel . setEnabled ( flag ) ; sqlHostField . setEnabled ( flag ) ; sqlDatabaseLabel . setEnabled ( flag ) ; sqlDatabaseField . setEnabled ( flag ) ; sqlUserLabel . setEnabled ( flag ) ; sqlUserField . setEnabled ( flag ) ; sqlPasswordLabel . setEnabled ( flag ) ; sqlPasswordField . setEnabled ( flag ) ; enableZipEncodingCheckBox . setEnabled ( flag ) ; }
|
Validates the UNCOMPRESSED Settings .
|
4,134
|
public static void main ( final String [ ] args ) { if ( args . length != 1 ) { throw new IllegalArgumentException ( "Configuration File ist missing." ) ; } try { ConfigSettings config = readConfiguration ( args [ 0 ] ) ; new DiffToolThread ( config ) . run ( ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } }
|
Starts the DiffTool application .
|
4,135
|
private static ConfigSettings readConfiguration ( final String path ) throws IOException , SAXException , ParserConfigurationException { ConfigurationReader reader = new ConfigurationReader ( path ) ; return reader . read ( ) ; }
|
Reads and parses the configuration file .
|
4,136
|
public static String capitalize ( String s ) { if ( Character . isLowerCase ( s . charAt ( 0 ) ) ) { return Character . toUpperCase ( s . charAt ( 0 ) ) + s . substring ( 1 ) ; } else { return s ; } }
|
Uppercases the first character of a string .
|
4,137
|
private void send ( ) throws IOException , SQLException { this . indexWriter . write ( articleIndex ) ; this . indexWriter . write ( revisionIndex ) ; this . indexWriter . write ( chronoIndex ) ; }
|
Checks whether the AbstractIndex classes have output available and forward them to the output writer .
|
4,138
|
public void index ( final Revision rev ) throws WikiApiException { int articleID = rev . getArticleID ( ) ; int fullRevisionID = rev . getFullRevisionID ( ) ; int revisionCounter = rev . getRevisionCounter ( ) ; if ( articleID != currentArticleID ) { if ( lastRev != null ) { info . setEndRevisionCount ( lastRev . getRevisionCounter ( ) ) ; this . infoList . add ( info ) ; try { this . articleIndex . add ( currentArticleID , startTime , endTime , infoList ) ; send ( ) ; } catch ( SQLException sql ) { sql . printStackTrace ( ) ; throw new WikiApiException ( sql ) ; } catch ( IOException sql ) { sql . printStackTrace ( ) ; throw new WikiApiException ( sql ) ; } } if ( revisionCounter != 1 ) { System . err . println ( "WARNING : ArticleID (" + articleID + ") RevisionCounter 1 expected - " + revisionCounter + " read" ) ; } startTime = Long . MAX_VALUE ; endTime = Long . MIN_VALUE ; currentArticleID = articleID ; currentFullRevisionID = fullRevisionID ; info = new ArticleIndexData ( ) ; info . setFullRevisionPrimaryKey ( rev . getPrimaryKey ( ) ) ; info . setStartRevisionCount ( rev . getRevisionCounter ( ) ) ; } else if ( fullRevisionID != currentFullRevisionID ) { if ( lastRev . getRevisionCounter ( ) + 1 != revisionCounter ) { System . err . println ( "WARNING : ArticleID (" + articleID + ")" + " RevisionCounter " + ( lastRev . getRevisionCounter ( ) + 1 ) + " expected - " + revisionCounter + " read" ) ; } info . setEndRevisionCount ( lastRev . getRevisionCounter ( ) ) ; this . infoList . add ( info ) ; currentFullRevisionID = fullRevisionID ; info = new ArticleIndexData ( ) ; info . setFullRevisionPrimaryKey ( rev . getPrimaryKey ( ) ) ; info . setStartRevisionCount ( rev . getRevisionCounter ( ) ) ; } else if ( lastRev . getRevisionCounter ( ) + 1 != revisionCounter ) { System . err . println ( "WARNING : ArticleID (" + articleID + ")" + " RevisionCounter " + ( lastRev . getRevisionCounter ( ) + 1 ) + " expected - " + revisionCounter + " read" ) ; } this . startTime = Math . min ( rev . getTimeStamp ( ) . getTime ( ) , startTime ) ; this . endTime = Math . max ( rev . getTimeStamp ( ) . getTime ( ) , endTime ) ; revisionIndex . add ( rev . getRevisionID ( ) , rev . getPrimaryKey ( ) , info . getFullRevisionPrimaryKey ( ) ) ; chronoIndex . add ( articleID , rev . getRevisionCounter ( ) , rev . getTimeStamp ( ) . getTime ( ) ) ; lastRev = rev ; }
|
Processes the given revision .
|
4,139
|
public void close ( ) throws WikiApiException { try { this . revisionIndex . finalizeIndex ( ) ; this . chronoIndex . finalizeIndex ( ) ; info . setEndRevisionCount ( lastRev . getRevisionCounter ( ) ) ; this . infoList . add ( info ) ; this . articleIndex . add ( currentArticleID , startTime , endTime , infoList ) ; this . articleIndex . finalizeIndex ( ) ; send ( ) ; this . indexWriter . finish ( ) ; if ( connection != null ) { this . connection . close ( ) ; } } catch ( SQLException sql ) { sql . printStackTrace ( ) ; throw new WikiApiException ( sql ) ; } catch ( IOException sql ) { sql . printStackTrace ( ) ; throw new WikiApiException ( sql ) ; } }
|
Finalizes the indices and sends the rest of the data to the output . Afterwards the database connection will be closed .
|
4,140
|
private void applyConfig ( ) { this . components . applyConfig ( config ) ; switch ( config . getConfigType ( ) ) { case DEFAULT : break ; case IMPORT : this . archives . applyConfiguration ( config ) ; } repaint ( ) ; }
|
Applies the configuration file .
|
4,141
|
public boolean createConfigurationXML ( ) { errors = new ConfigVerification ( ) ; xmlConfig = new StringBuilder ( ) ; xmlConfig . append ( "<config>\r\n" ) ; components . toXML ( xmlConfig , errors ) ; xmlConfig . append ( "</config>\r\n" ) ; if ( errors . getRowCount ( ) != 0 ) { new ConfigDialog ( this ) . setVisible ( true ) ; return false ; } return true ; }
|
Creates the xml content representation of the currently used settings .
|
4,142
|
public void loadConfiguration ( ) { XMLFileChooser fc = new XMLFileChooser ( ) ; if ( fc . showOpenDialog ( new JPanel ( ) ) == XMLFileChooser . APPROVE_OPTION ) { this . loadConfig ( fc . getSelectedFile ( ) . getPath ( ) ) ; } }
|
Loads the configuration file . The path of the file will be chosen by displaying a FileChooser Dialog .
|
4,143
|
public void saveConfiguration ( ) { if ( this . createConfigurationXML ( ) ) { XMLFileChooser fc = new XMLFileChooser ( ) ; if ( fc . showSaveDialog ( new JPanel ( ) ) == XMLFileChooser . APPROVE_OPTION ) { String path = fc . getSelectedFile ( ) . getPath ( ) ; if ( path . indexOf ( '.' ) == - 1 ) { path += ".xml" ; } if ( this . saveConfiguration ( path ) ) { System . out . println ( "SAVE CONFIG SUCCESSFULL" ) ; } else { System . out . println ( "SAVE CONFIG FAILED" ) ; } } } }
|
Saves the configuration file . The path of the file will be chosen by displaying a FileChooser Dialog .
|
4,144
|
public boolean saveConfiguration ( final String path ) { if ( xmlConfig != null && ! errors . hasFailed ( ) ) { boolean success = true ; FileWriter writer = null ; try { writer = new FileWriter ( path ) ; writer . write ( xmlConfig . toString ( ) ) ; writer . flush ( ) ; } catch ( IOException ioe ) { ioe . printStackTrace ( ) ; success = false ; } finally { if ( writer != null ) { try { writer . close ( ) ; } catch ( IOException ioe ) { success = false ; } } } return success ; } return false ; }
|
Save the configuration to a file .
|
4,145
|
public void setEnable7Zip ( final boolean enable7Zip ) { this . enable7Zip = enable7Zip ; if ( ! this . enable7Zip ) { if ( outputCompression == OutputCompressionEnum . SevenZip ) { outputCompression = OutputCompressionEnum . None ; } } }
|
Enables or disables the 7Zip support .
|
4,146
|
public void removeCycles ( ) throws WikiApiException { DefaultEdge edge = null ; while ( ( edge = findCycle ( ) ) != null ) { Category sourceCat = wiki . getCategory ( categoryGraph . getGraph ( ) . getEdgeSource ( edge ) ) ; Category targetCat = wiki . getCategory ( categoryGraph . getGraph ( ) . getEdgeTarget ( edge ) ) ; logger . info ( "Removing cycle: " + sourceCat . getTitle ( ) + " - " + targetCat . getTitle ( ) ) ; categoryGraph . getGraph ( ) . removeEdge ( edge ) ; } }
|
Removes cycles from the graph that was used to construct the cycle handler .
|
4,147
|
private void loadExternal ( ) { Properties properties = new Properties ( ) ; try { properties . loadFromXML ( new FileInputStream ( PROPERTIES_PATH ) ) ; for ( String key : properties . stringPropertyNames ( ) ) { externalSupport . put ( key , properties . getProperty ( key ) ) ; } } catch ( IOException ignore ) { } }
|
Load the properties for external utilities from a XML file
|
4,148
|
private String getExtension ( String fileName ) { if ( fileName == null ) { return null ; } String ext = null ; int i = fileName . lastIndexOf ( '.' ) ; if ( i > 0 && i < fileName . length ( ) - 1 ) { ext = fileName . substring ( i + 1 ) . toLowerCase ( ) ; } return ext ; }
|
Return the extension of the filename
|
4,149
|
public boolean isSupported ( String fileName ) { String extension = getExtension ( fileName ) ; return isInternalSupported ( extension ) || isExternalSupported ( extension ) ; }
|
Check if the file is supported by the internal or external decompressor
|
4,150
|
private InputStream startExternal ( String fileName ) { InputStream result = null ; try { String extension = getExtension ( fileName ) ; String command = externalSupport . get ( extension ) . replace ( FILEPLACEHOLDER , fileName ) ; Process externalProcess = Runtime . getRuntime ( ) . exec ( command ) ; result = externalProcess . getInputStream ( ) ; } catch ( IOException ignore ) { } return result ; }
|
Start an external utility to unpack the the archive
|
4,151
|
private InputStream getDefault ( String fileName ) { InputStream result = null ; try { result = new BufferedInputStream ( new FileInputStream ( fileName ) ) ; } catch ( IOException ignore ) { } return result ; }
|
Get default InputStream to read the data from the file
|
4,152
|
private boolean isInWhiteList ( String tpl ) { if ( ( ! whiteList . isEmpty ( ) && whiteList . contains ( tpl ) ) || ( whiteList . isEmpty ( ) ) ) { return true ; } return false ; }
|
Checks if the input string is in white list
|
4,153
|
private boolean containsAllowedPrefix ( String tpl ) { if ( whitePrefixList . isEmpty ( ) ) return true ; for ( String i : whitePrefixList ) { if ( tpl . startsWith ( i ) ) return true ; } return false ; }
|
Checks if the input string contains prefixes from white list
|
4,154
|
private boolean containsRestrictedPrefix ( String tpl ) { for ( String i : blackPrefixList ) { if ( tpl . startsWith ( i ) ) return true ; } return false ; }
|
Checks if the input string contains prefixes from black list
|
4,155
|
public boolean acceptTemplate ( String tpl ) { if ( isInWhiteList ( tpl ) && ! isInBlackList ( tpl ) ) { if ( containsAllowedPrefix ( tpl ) && ! containsRestrictedPrefix ( tpl ) ) { return true ; } else { return false ; } } else { return false ; } }
|
Checks whether to include the template with the given name in the database or not .
|
4,156
|
public void add ( final int revisionID , final long revisionPrimaryKey , final long fullRevisionPrimaryKey ) { boolean sql = ! insertStatement . isEmpty ( ) ; if ( sql && buffer . length ( ) != insertStatement . length ( ) ) { this . buffer . append ( "," ) ; } this . buffer . append ( ( sql ? "(" : "" ) + revisionID + "," + revisionPrimaryKey + "," + fullRevisionPrimaryKey + ( sql ? ")" : "" ) ) ; if ( ! sql ) { buffer . append ( "\n" ) ; } if ( buffer . length ( ) + 100 >= MAX_ALLOWED_PACKET ) { storeBuffer ( ) ; } }
|
Adds the information for an new entry in the revision index .
|
4,157
|
public int getFirstRevisionPK ( final int articleID ) throws WikiApiException { try { if ( articleID < 1 ) { throw new IllegalArgumentException ( ) ; } PreparedStatement statement = null ; ResultSet result = null ; String firstRevPK ; try { statement = this . connection . prepareStatement ( "SELECT PrimaryKey " + "FROM revisions " + "WHERE ArticleID=? AND RevisionCounter =1 LIMIT 1" ) ; statement . setInt ( 1 , articleID ) ; result = statement . executeQuery ( ) ; if ( result . next ( ) ) { firstRevPK = result . getString ( 1 ) ; } else { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " was not found." ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return Integer . parseInt ( firstRevPK ) ; } catch ( WikiApiException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the PrimaryKey for the first revision of the given article
|
4,158
|
public int getNumberOfRevisions ( final int articleID ) throws WikiApiException { try { if ( articleID < 1 ) { throw new IllegalArgumentException ( ) ; } PreparedStatement statement = null ; ResultSet result = null ; String revCounters ; try { statement = this . connection . prepareStatement ( "SELECT RevisionCounter " + "FROM index_articleID_rc_ts " + "WHERE ArticleID=? LIMIT 1" ) ; statement . setInt ( 1 , articleID ) ; result = statement . executeQuery ( ) ; if ( result . next ( ) ) { revCounters = result . getString ( 1 ) ; } else { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " was not found." ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } int index = revCounters . lastIndexOf ( ' ' ) ; if ( index == - 1 ) { throw new WikiApiException ( "Article data is inconsistent" ) ; } return Integer . parseInt ( revCounters . substring ( index + 1 , revCounters . length ( ) ) ) ; } catch ( WikiApiException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the number of revisions for the specified article .
|
4,159
|
public List < Timestamp > getRevisionTimestamps ( final int articleID ) throws WikiApiException { List < Timestamp > timestamps = new LinkedList < Timestamp > ( ) ; try { if ( articleID < 1 ) { throw new IllegalArgumentException ( ) ; } PreparedStatement statement = null ; ResultSet result = null ; try { if ( ! indexExists ( "revisions" ) ) { throw new WikiInitializationException ( "Please create an index on revisions(ArticleID) in order to make this query feasible." ) ; } statement = connection . prepareStatement ( "SELECT Timestamp " + "FROM revisions WHERE ArticleID=?" ) ; statement . setInt ( 1 , articleID ) ; result = statement . executeQuery ( ) ; if ( result == null ) { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " was not found." ) ; } while ( result . next ( ) ) { timestamps . add ( new Timestamp ( result . getLong ( 1 ) ) ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return timestamps ; } catch ( WikiApiException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the timestamps of all revisions connected to the specified article .
|
4,160
|
public int getNumberOfUniqueContributorsBeforeRevision ( final int revisionID , boolean onlyRegistered ) throws WikiApiException { try { if ( revisionID < 1 ) { throw new IllegalArgumentException ( ) ; } int articleID = getPageIdForRevisionId ( revisionID ) ; Timestamp ts = getRevision ( revisionID ) . getTimeStamp ( ) ; int contrCount = 0 ; PreparedStatement statement = null ; ResultSet result = null ; try { if ( ! indexExists ( "revisions" ) ) { throw new WikiInitializationException ( "Please create an index on revisions(ArticleID) in order to make this query feasible." ) ; } StringBuffer sqlString = new StringBuffer ( ) ; sqlString . append ( "SELECT COUNT(DISTINCT ContributorName) FROM revisions WHERE ArticleID=? AND Timestamp<?" ) ; if ( onlyRegistered ) { sqlString . append ( " AND ContributorIsRegistered=1" ) ; } statement = connection . prepareStatement ( sqlString . toString ( ) ) ; statement . setInt ( 1 , articleID ) ; statement . setLong ( 2 , ts . getTime ( ) ) ; result = statement . executeQuery ( ) ; if ( result == null ) { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " was not found." ) ; } if ( result . next ( ) ) { contrCount = result . getInt ( 1 ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return contrCount ; } catch ( WikiApiException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the number of unique contributors to an article that have contributed before the given revision .
|
4,161
|
public Map < String , Timestamp > getUserContributionMap ( final int articleID , String [ ] groupfilter ) throws WikiApiException { return getUserContributionMap ( articleID , groupfilter , false ) ; }
|
Returns a map of usernames mapped to the timestamps of their contributions .
|
4,162
|
public List < String > getUserGroups ( final int userID ) throws WikiApiException { List < String > groups = new LinkedList < String > ( ) ; try { if ( userID < 1 ) { throw new IllegalArgumentException ( ) ; } if ( ! tableExists ( "user_groups" ) ) { throw new WikiInitializationException ( "User group assignment data is missing. Please download user_groups.sql for this Wikipedia from http://dumps.wikimedia.org and import the data into this database." ) ; } PreparedStatement statement = null ; ResultSet result = null ; try { statement = connection . prepareStatement ( "SELECT ug_group " + "FROM user_groups WHERE ug_user=?" ) ; statement . setInt ( 1 , userID ) ; result = statement . executeQuery ( ) ; if ( result == null ) { throw new WikiPageNotFoundException ( "The user with the ID " + userID + " was not found." ) ; } while ( result . next ( ) ) { groups . add ( result . getString ( 1 ) ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return groups ; } catch ( WikiApiException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the group assignments of the specified user
|
4,163
|
public Map < Integer , List < Integer > > getUserRevisionIds ( int userid ) throws WikiApiException { Map < Integer , List < Integer > > revIds = new HashMap < Integer , List < Integer > > ( ) ; try { if ( userid < 1 ) { throw new IllegalArgumentException ( ) ; } if ( ! indexExists ( "revisions" , "userids" ) ) { System . err . println ( "You should create and index for the field ContributorID: create index userids ON revisions(ContributorId(15));" ) ; } PreparedStatement statement = null ; ResultSet result = null ; try { statement = connection . prepareStatement ( "SELECT ArticleID, RevisionID " + "FROM revisions WHERE ContributorId=?" ) ; statement . setInt ( 1 , userid ) ; result = statement . executeQuery ( ) ; if ( result == null ) { throw new WikiPageNotFoundException ( "No revisions for user " + userid ) ; } while ( result . next ( ) ) { int artId = result . getInt ( 1 ) ; int revId = result . getInt ( 2 ) ; if ( revIds . containsKey ( artId ) ) { revIds . get ( artId ) . add ( revId ) ; } else { List < Integer > revList = new ArrayList < Integer > ( ) ; revList . add ( revId ) ; revIds . put ( artId , revList ) ; } } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return revIds ; } catch ( WikiApiException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the revisionids of all revisions created by given user
|
4,164
|
public Map < Timestamp , Collection < DiffPart > > getTimestampToRevisionMap ( final int articleID ) throws WikiApiException { Map < Timestamp , Collection < DiffPart > > tsDiffPartsMap = new HashMap < Timestamp , Collection < DiffPart > > ( ) ; try { if ( articleID < 1 ) { throw new IllegalArgumentException ( ) ; } PreparedStatement statement = null ; ResultSet result = null ; RevisionDecoder decoder = new RevisionDecoder ( config . getCharacterSet ( ) ) ; try { if ( ! indexExists ( "revisions" ) ) { throw new WikiInitializationException ( "Please create an index on revisions(ArticleID) in order to make this query feasible." ) ; } statement = connection . prepareStatement ( "SELECT Timestamp, Revision " + "FROM revisions WHERE ArticleID=?" ) ; ; statement . setInt ( 1 , articleID ) ; result = statement . executeQuery ( ) ; if ( result == null ) { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " was not found." ) ; } while ( result . next ( ) ) { boolean binaryData = result . getMetaData ( ) . getColumnType ( 2 ) == Types . LONGVARBINARY ; if ( binaryData ) { decoder . setInput ( result . getBinaryStream ( 2 ) , true ) ; } else { decoder . setInput ( result . getString ( 2 ) ) ; } Diff diff = decoder . decode ( ) ; Collection < DiffPart > parts = new LinkedList < DiffPart > ( ) ; Iterator < DiffPart > it = diff . iterator ( ) ; while ( it . hasNext ( ) ) { parts . add ( it . next ( ) ) ; } tsDiffPartsMap . put ( new Timestamp ( result . getLong ( 1 ) ) , parts ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return tsDiffPartsMap ; } catch ( WikiApiException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns a map of timestamps mapped on the corresponding DiffPart - Collections . Can be used to compile statistics over all changes that have been made in one article .
|
4,165
|
private Timestamp getDateOfAppearance ( final int articleID , final String firstOrLast ) throws WikiApiException { try { if ( articleID < 1 ) { throw new IllegalArgumentException ( ) ; } PreparedStatement statement = null ; ResultSet result = null ; long time ; try { statement = this . connection . prepareStatement ( "SELECT " + firstOrLast + " FROM index_articleID_rc_ts " + "WHERE ArticleID=? LIMIT 1" ) ; statement . setInt ( 1 , articleID ) ; result = statement . executeQuery ( ) ; if ( result . next ( ) ) { time = result . getLong ( 1 ) ; } else { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " was not found." ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return new Timestamp ( time ) ; } catch ( WikiApiException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the timestamp of the first or last revision connected to the specified article .
|
4,166
|
public Revision getRevision ( final int revisionID ) throws WikiApiException { try { if ( revisionID < 1 ) { throw new IllegalArgumentException ( ) ; } int fullRevPK = - 1 ; int limit = 1 ; PreparedStatement statement = null ; ResultSet result = null ; try { statement = this . connection . prepareStatement ( "SELECT FullRevisionPK, RevisionPK " + "FROM index_revisionID " + "WHERE revisionID=? LIMIT 1" ) ; statement . setInt ( 1 , revisionID ) ; result = statement . executeQuery ( ) ; if ( result . next ( ) ) { fullRevPK = result . getInt ( 1 ) ; limit = ( result . getInt ( 2 ) - fullRevPK ) + 1 ; } else { throw new WikiPageNotFoundException ( "The revision with the ID " + revisionID + " was not found." ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return buildRevisionMetaData ( fullRevPK , limit ) ; } catch ( WikiPageNotFoundException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the by the id specified revision .
|
4,167
|
public Revision getRevision ( final int articleID , final int revisionCounter ) throws WikiApiException { try { if ( articleID < 1 || revisionCounter < 1 ) { throw new IllegalArgumentException ( ) ; } int revisionIndex = checkMapping ( articleID , revisionCounter ) ; String fullRevisions , revCounters ; PreparedStatement statement = null ; ResultSet result = null ; try { statement = this . connection . prepareStatement ( "SELECT FullRevisionPKs, RevisionCounter FROM index_articleID_rc_ts WHERE ArticleID=? LIMIT 1" ) ; statement . setInt ( 1 , articleID ) ; result = statement . executeQuery ( ) ; if ( result . next ( ) ) { fullRevisions = result . getString ( 1 ) ; revCounters = result . getString ( 2 ) ; } else { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " was not found." ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return getReferencedRevision ( articleID , revisionIndex , fullRevisions , revCounters ) ; } catch ( WikiPageNotFoundException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the by the article ID and revisionCounter specified revision . Note that this method returns the revision in chronological order .
|
4,168
|
public Revision getRevision ( final int articleID , final Timestamp time ) throws WikiApiException { try { PreparedStatement statement = null ; ResultSet result = null ; String fullRevisions ; String revisionCounters ; if ( articleID < 1 || time == null || time . getTime ( ) <= 0 ) { throw new IllegalArgumentException ( ) ; } int firstPK = - 1 , lastPK = - 1 ; try { statement = this . connection . prepareStatement ( "SELECT FullRevisionPKs, RevisionCounter," + " FirstAppearance " + "FROM index_articleID_rc_ts " + "WHERE ArticleID=? LIMIT 1" ) ; statement . setInt ( 1 , articleID ) ; result = statement . executeQuery ( ) ; if ( result . next ( ) ) { fullRevisions = result . getString ( 1 ) ; revisionCounters = result . getString ( 2 ) ; long firstDate = result . getLong ( 3 ) ; int max = fullRevisions . length ( ) ; int index = fullRevisions . indexOf ( ' ' ) ; if ( index == - 1 ) { index = max ; } firstPK = Integer . parseInt ( fullRevisions . substring ( 0 , index ) ) ; index = revisionCounters . lastIndexOf ( ' ' ) + 1 ; lastPK = firstPK + Integer . parseInt ( revisionCounters . substring ( index , revisionCounters . length ( ) ) ) ; if ( time . getTime ( ) < firstDate ) { throw new WikiPageNotFoundException ( "No revision before the " + "specified date [" + time + "]" ) ; } } else { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " was not found." ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } try { statement = this . connection . prepareStatement ( "SELECT RevisionCounter FROM revisions WHERE PrimaryKey >= ? AND PrimaryKey < ? AND Timestamp <= ? ORDER BY Timestamp DESC LIMIT 1" ) ; statement . setInt ( 1 , firstPK ) ; statement . setInt ( 2 , lastPK ) ; statement . setLong ( 3 , time . getTime ( ) ) ; result = statement . executeQuery ( ) ; if ( result . next ( ) ) { int revisionCount = result . getInt ( 1 ) ; return getReferencedRevision ( articleID , revisionCount , fullRevisions , revisionCounters ) ; } else { throw new WikiPageNotFoundException ( "The revision with the specified timestamp was not found." ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } } catch ( WikiPageNotFoundException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
Returns the by the article ID and timestamp specified revision . Note that the timestamp is not an unique identifier of a revision related to an article . The returned revision should be the first revision that can be found inside the database .
|
4,169
|
protected int checkMapping ( final int articleID , final int revisionCounter ) throws SQLException { PreparedStatement statement = null ; ResultSet result = null ; try { statement = this . connection . prepareStatement ( "SELECT Mapping " + "FROM index_chronological " + "WHERE ArticleID=? LIMIT 1" ) ; statement . setInt ( 1 , articleID ) ; result = statement . executeQuery ( ) ; if ( result . next ( ) ) { String mapping = result . getString ( 1 ) ; return getMapping ( mapping , revisionCounter ) ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } return revisionCounter ; }
|
This method maps the chronological order to the revisionCounter .
|
4,170
|
private int getMapping ( final String mapping , final int revisionCounter ) { String tempA , tempB ; int length = 0 ; int revC = - 1 , mapC = - 1 ; int index , max = mapping . length ( ) ; while ( length < max && revC < revisionCounter ) { index = mapping . indexOf ( ' ' , length ) ; tempA = mapping . substring ( length , index ) ; length = index + 1 ; index = mapping . indexOf ( ' ' , length ) ; if ( index == - 1 ) { index = mapping . length ( ) ; } tempB = mapping . substring ( length , index ) ; length = index + 1 ; revC = Integer . parseInt ( tempA ) ; mapC = Integer . parseInt ( tempB ) ; } if ( revC == revisionCounter ) { return mapC ; } return revisionCounter ; }
|
This method returns the correct mapping of the given input .
|
4,171
|
private Revision getReferencedRevision ( final int articleID , final int revisionIndex , final String fullRevisions , final String revCounters ) throws WikiApiException { try { int fullRevPK = - 1 ; int limit = 1 ; String fullRev = null ; int revA = - 1 , revB = - 1 ; int lengthFR = 0 ; int lengthRC = 0 ; int index ; int max = fullRevisions . length ( ) ; while ( lengthFR < max && revB < revisionIndex ) { index = fullRevisions . indexOf ( ' ' , lengthFR ) ; if ( index == - 1 ) { index = max ; } fullRev = fullRevisions . substring ( lengthFR , index ) ; lengthFR = index + 1 ; index = revCounters . indexOf ( ' ' , lengthRC ) ; revA = Integer . parseInt ( revCounters . substring ( lengthRC , index ) ) ; lengthRC = index + 1 ; index = revCounters . indexOf ( ' ' , lengthRC ) ; if ( index == - 1 ) { index = revCounters . length ( ) ; } revB = Integer . parseInt ( revCounters . substring ( lengthRC , index ) ) ; lengthRC = index + 1 ; } if ( revisionIndex > revB ) { throw new WikiPageNotFoundException ( "The article with the ID " + articleID + " has no revision number " + revisionIndex ) ; } fullRevPK = Integer . parseInt ( fullRev ) ; limit = ( revisionIndex - revA ) + 1 ; return buildRevisionMetaData ( fullRevPK , limit ) ; } catch ( WikiPageNotFoundException e ) { throw e ; } catch ( Exception e ) { throw new WikiApiException ( e ) ; } }
|
This method identifies the correct full revision and retrieves the reference revision .
|
4,172
|
private boolean indexExists ( String table , String indexName ) throws SQLException { PreparedStatement statement = null ; ResultSet result = null ; try { statement = this . connection . prepareStatement ( "SHOW INDEX FROM " + table + " WHERE Key_name!= 'PRIMARY'" ) ; result = statement . executeQuery ( ) ; if ( result == null || ! result . next ( ) ) { return false ; } if ( indexName != null ) { result . first ( ) ; boolean specificIndexExists = false ; while ( result . next ( ) ) { if ( result . getString ( 3 ) . equals ( indexName ) ) { specificIndexExists = true ; } } return specificIndexExists ? true : false ; } else { return true ; } } finally { if ( statement != null ) { statement . close ( ) ; } if ( result != null ) { result . close ( ) ; } } }
|
Checks if an index with a specific name exists in a given table .
|
4,173
|
public Map < Integer , Long > getIdMappingCategories ( ) { Map < Integer , Long > idMapping = new HashMap < Integer , Long > ( ) ; Session session = WikiHibernateUtil . getSessionFactory ( this . dbConfig ) . getCurrentSession ( ) ; session . beginTransaction ( ) ; Iterator results = session . createQuery ( "select cat.id, cat.pageId from Category as cat" ) . list ( ) . iterator ( ) ; while ( results . hasNext ( ) ) { Object [ ] row = ( Object [ ] ) results . next ( ) ; idMapping . put ( ( Integer ) row [ 1 ] , ( Long ) row [ 0 ] ) ; } session . getTransaction ( ) . commit ( ) ; return idMapping ; }
|
Hibernate IDs are needed to load an object from the database . Internal references are via pageIDs .
|
4,174
|
private void init ( final Task < Revision > task ) { this . partCounter ++ ; this . result = new Task < Diff > ( task . getHeader ( ) , partCounter ) ; }
|
Initializes the processing of a RevisionTask using a new DiffTask .
|
4,175
|
protected void initNewTask ( final int taskID ) { this . articleID = taskID ; this . partCounter = 0 ; this . revisionCounter = 0 ; this . revPrevious = null ; this . revCurrent = null ; }
|
Initializes the processing of a new RevisionTask .
|
4,176
|
private Diff generateFullRevision ( final Revision revision ) throws UnsupportedEncodingException { Diff diff = new Diff ( ) ; RevisionCodecData codecData = new RevisionCodecData ( ) ; part = new DiffPart ( DiffAction . FULL_REVISION_UNCOMPRESSED ) ; text = revision . getRevisionText ( ) ; revCurrent = text . toCharArray ( ) ; part . setText ( text ) ; codecData . checkBlocksizeL ( text . getBytes ( WIKIPEDIA_ENCODING ) . length ) ; diff . add ( part ) ; diff . setCodecData ( codecData ) ; return diff ; }
|
Generates a FullRevision .
|
4,177
|
private void scan ( final char [ ] input , final int start , final int end ) { this . positions = new HashMap < Character , ArrayList < Integer > > ( ) ; ArrayList < Integer > list ; char c ; for ( int i = start ; i < end ; i ++ ) { c = input [ i ] ; list = positions . get ( c ) ; if ( list == null ) { list = new ArrayList < Integer > ( ) ; positions . put ( c , list ) ; } list . add ( i ) ; } }
|
Scans the input and creates the character - > position mapping .
|
4,178
|
private boolean findLongestMatch ( final char [ ] revA , final ArrayList < Integer > list , final char [ ] revB , final int index ) { int match ; longestMatch_size = - 1 ; int size = list . size ( ) ; int revAsize = revA . length ; int revBsize = revB . length ; int start , end , count ; for ( int i = 0 ; i < size ; i ++ ) { start = list . get ( i ) ; if ( ! revABlocked [ start ] && ! revBBlocked [ index + 1 ] ) { count = index + 1 ; end = start + 1 ; while ( end < revAsize && count < revBsize && revA [ end ] == revB [ count ] && ! revABlocked [ end ] && ! revBBlocked [ count ] ) { end ++ ; count ++ ; } match = end - start ; if ( match > longestMatch_size ) { longestMatch_size = match ; longestMatch_start = start ; } } } if ( longestMatch_size <= VALUE_MINIMUM_LONGEST_COMMON_SUBSTRING ) { return false ; } queueA . add ( new DiffBlock ( this . blockCount , longestMatch_start , longestMatch_start + longestMatch_size , index , index + longestMatch_size , true ) ) ; queueB . add ( new DiffBlock ( this . blockCount , longestMatch_start , longestMatch_start + longestMatch_size , index , index + longestMatch_size , false ) ) ; blockCount ++ ; for ( int i = 0 , j = longestMatch_start , k = index ; i < longestMatch_size ; i ++ , j ++ , k ++ ) { revABlocked [ j ] = true ; revBBlocked [ k ] = true ; } return true ; }
|
Searches the longest common substring
|
4,179
|
public boolean check ( final char c ) { current = current . get ( c ) ; if ( current == null ) { reset ( ) ; } return current . isKeyword ( ) ; }
|
Checks whether the character is related to the currently used node . If the comparison fails the keyword tree will be reseted to its root node otherwise the related node will replace the current node .
|
4,180
|
public void add ( final int articleID , final int revisionCounter , final long timestamp ) { if ( this . articleID != articleID ) { if ( list != null ) { addToBuffer ( ) ; } this . articleID = articleID ; this . list = new ArrayList < ChronoIndexData > ( ) ; } this . list . add ( new ChronoIndexData ( timestamp , revisionCounter ) ) ; }
|
Adds the information for an new entry in the chrono index .
|
4,181
|
private void addToBuffer ( ) { if ( list != null && ! list . isEmpty ( ) ) { ChronoIndexData info ; Collections . sort ( list ) ; StringBuilder reverseMapping = new StringBuilder ( ) ; int size = list . size ( ) ; for ( int i = 1 ; i <= size ; i ++ ) { info = list . get ( i - 1 ) ; if ( info . getRevisionCounter ( ) != i ) { if ( reverseMapping . length ( ) > 0 ) { reverseMapping . append ( " " ) ; } reverseMapping . append ( i ) ; reverseMapping . append ( " " ) ; reverseMapping . append ( info . getRevisionCounter ( ) ) ; } info . setIndex ( i ) ; info . setSortFlag ( false ) ; } Collections . sort ( list ) ; StringBuilder mapping = new StringBuilder ( ) ; while ( ! list . isEmpty ( ) ) { info = list . remove ( 0 ) ; if ( info . getRevisionCounter ( ) != info . getIndex ( ) ) { if ( mapping . length ( ) > 0 ) { mapping . append ( " " ) ; } mapping . append ( info . getRevisionCounter ( ) ) ; mapping . append ( " " ) ; mapping . append ( info . getIndex ( ) ) ; } } if ( mapping . length ( ) > 0 ) { boolean sql = ! insertStatement . isEmpty ( ) ; String val = ( sql ? "(" : "" ) + articleID + ( sql ? ",'" : ",\"" ) + mapping . toString ( ) + ( sql ? "','" : "\",\"" ) + reverseMapping . toString ( ) + ( sql ? "')" : "\"" ) ; if ( buffer . length ( ) + val . length ( ) >= MAX_ALLOWED_PACKET ) { storeBuffer ( ) ; } if ( sql && buffer . length ( ) > insertStatement . length ( ) ) { buffer . append ( "," ) ; } buffer . append ( val ) ; if ( ! sql ) { buffer . append ( "\n" ) ; } } } }
|
Creates the mapping and the reverse mapping . The generated information will be added to the query buffer . This list will be cleared afterwards .
|
4,182
|
private List < Integer > getPathToRoot ( int root , int node ) throws WikiApiException { List < Integer > pathToRoot = new LinkedList < Integer > ( ) ; List < Integer > shortestPath = new ArrayList < Integer > ( ) ; expandPath ( root , node , pathToRoot , shortestPath ) ; if ( shortestPath . size ( ) == 0 ) { return null ; } else { return shortestPath ; } }
|
Returns the shortest path from node to root as a list of pageIds of the nodes on the path . Node and root are included in the path node list .
|
4,183
|
public int getPathLengthInEdges ( Category node1 , Category node2 ) { if ( this . graph . containsVertex ( node1 . getPageId ( ) ) && this . graph . containsVertex ( node2 . getPageId ( ) ) ) { if ( node1 . getPageId ( ) == node2 . getPageId ( ) ) { return 0 ; } List edgeList = DijkstraShortestPath . findPathBetween ( undirectedGraph , node1 . getPageId ( ) , node2 . getPageId ( ) ) ; if ( edgeList == null ) { return - 1 ; } else { return edgeList . size ( ) ; } } else { return - 1 ; } }
|
Gets the path length between two category nodes - measured in edges .
|
4,184
|
public int getPathLengthInNodes ( Category node1 , Category node2 ) throws WikiApiException { int retValue = getPathLengthInEdges ( node1 , node2 ) ; if ( retValue == 0 ) { return 0 ; } else if ( retValue > 0 ) { return ( -- retValue ) ; } else if ( retValue == - 1 ) { return - 1 ; } else { throw new WikiApiException ( "Unknown return value." ) ; } }
|
Gets the path length between two category nodes - measured in nodes .
|
4,185
|
public void createRootPathMap ( ) throws WikiApiException { if ( rootPathMap != null ) { return ; } File rootPathFile = new File ( wiki . getWikipediaId ( ) + "_" + this . rootPathMapFilename ) ; if ( rootPathFile . exists ( ) ) { logger . info ( "Loading saved rootPathMap ..." ) ; rootPathMap = deserializeMap ( rootPathFile ) ; logger . info ( "Done loading saved rootPathMap" ) ; return ; } logger . info ( "Computing rootPathMap" ) ; rootPathMap = new HashMap < Integer , List < Integer > > ( ) ; List < Integer > queue = new ArrayList < Integer > ( ) ; Set < Integer > leafNodes = this . __getLeafNodes ( ) ; queue . addAll ( leafNodes ) ; logger . info ( queue . size ( ) + " leaf nodes." ) ; fillRootPathMap ( queue ) ; queue . clear ( ) ; for ( Category cat : wiki . getCategories ( ) ) { if ( ! rootPathMap . containsKey ( cat . getPageId ( ) ) ) { queue . add ( cat . getPageId ( ) ) ; } } logger . info ( queue . size ( ) + " non leaf nodes not on a shortest leaf-node to root path." ) ; fillRootPathMap ( queue ) ; for ( Category cat : wiki . getCategories ( ) ) { if ( ! rootPathMap . containsKey ( cat . getPageId ( ) ) ) { logger . info ( "no path for " + cat . getPageId ( ) ) ; } } this . depth = getDepthFromRootPathMap ( ) ; logger . info ( "Setting depth of category graph: " + this . depth ) ; logger . info ( "Serializing rootPathMap" ) ; this . serializeMap ( rootPathMap , rootPathFile ) ; }
|
Computes the paths from each category node to the root . Computing n paths will take some time . Thus efficient computing is based on the assumption that all subpaths in the shortest path to the root are also shortest paths for the corresponding nodes . Starting with the leaf nodes gives the longest initial paths with most subpaths .
|
4,186
|
public void deleteRootPathMap ( ) throws WikiApiException { File rootPathFile = new File ( this . rootPathMapFilename + "_" + wiki . getLanguage ( ) + "_" + wiki . getMetaData ( ) . getVersion ( ) ) ; rootPathFile . delete ( ) ; }
|
Deleted the root path map file .
|
4,187
|
public Map < Integer , Integer > getDegreeDistribution ( ) { if ( degreeDistribution == null ) { logger . debug ( "Calling setGraphParameters" ) ; setGraphParameters ( ) ; } return degreeDistribution ; }
|
Computes the degree distribution . The degree of a node is the number of edges that it is connected with . The graph is treated as an undirected graph . Computing graph parameters requires touching all node pairs . Therefore if one is called the others are computed as well and stored for later retrieval .
|
4,188
|
private int getNumberOfNeighborConnections ( int node ) { int numberOfConnections = 0 ; Set < Integer > neighbors = getNeighbors ( node ) ; if ( neighbors . size ( ) > 0 ) { Object [ ] nodeArray = neighbors . toArray ( ) ; Arrays . sort ( nodeArray ) ; for ( int i = 0 ; i < neighbors . size ( ) ; i ++ ) { int outerNode = ( Integer ) nodeArray [ i ] ; for ( int j = i + 1 ; j < neighbors . size ( ) ; j ++ ) { int innerNode = ( Integer ) nodeArray [ j ] ; if ( undirectedGraph . containsEdge ( innerNode , outerNode ) ) { numberOfConnections ++ ; } } } } return numberOfConnections ; }
|
Get the number of connections that exist between the neighbors of a node .
|
4,189
|
protected Set < Integer > getNeighbors ( int node ) { Set < Integer > neighbors = new HashSet < Integer > ( ) ; Set < DefaultEdge > edges = undirectedGraph . edgesOf ( node ) ; for ( DefaultEdge edge : edges ) { if ( undirectedGraph . getEdgeSource ( edge ) != node ) { neighbors . add ( undirectedGraph . getEdgeSource ( edge ) ) ; } if ( undirectedGraph . getEdgeTarget ( edge ) != node ) { neighbors . add ( undirectedGraph . getEdgeTarget ( edge ) ) ; } } return neighbors ; }
|
Get the neighbors of a given node . The category graph is treated as an undirected graph .
|
4,190
|
private void setGraphParameters ( ) { double maxPathLength = 0.0 ; double shortestPathLengthSum = 0.0 ; double degreeSum = 0.0 ; double clusterCoefficientSum = 0.0 ; Set < Integer > nodes = undirectedGraph . vertexSet ( ) ; Set < Integer > wasSource = new HashSet < Integer > ( ) ; int progress = 0 ; for ( int node : nodes ) { progress ++ ; ApiUtilities . printProgressInfo ( progress , nodes . size ( ) , 100 , ApiUtilities . ProgressInfoMode . TEXT , "Getting graph parameters" ) ; int nodeDegree = undirectedGraph . degreeOf ( node ) ; degreeSum += nodeDegree ; updateDegreeDistribution ( nodeDegree ) ; if ( undirectedGraph . degreeOf ( node ) > 1 ) { double numberOfNeighborConnections = getNumberOfNeighborConnections ( node ) ; clusterCoefficientSum += ( numberOfNeighborConnections / ( nodeDegree * ( nodeDegree - 1 ) ) ) ; } double [ ] returnValues = computeShortestPathLenghts ( node , shortestPathLengthSum , maxPathLength , wasSource ) ; shortestPathLengthSum = returnValues [ 0 ] ; maxPathLength = returnValues [ 1 ] ; wasSource . add ( node ) ; } if ( nodes . size ( ) > 1 ) { this . averageShortestPathLength = shortestPathLengthSum / ( nodes . size ( ) * ( nodes . size ( ) - 1 ) / 2 ) ; } else { this . averageShortestPathLength = 0 ; } this . diameter = maxPathLength ; this . averageDegree = degreeSum / nodes . size ( ) ; this . clusterCoefficient = clusterCoefficientSum / nodes . size ( ) ; }
|
Computes and sets the diameter the average degree and the average shortest path length of the graph . Do not call this in the constructor . May run a while . It is called in the getters if parameters are not yet initialized when retrieved .
|
4,191
|
private double computeDepth ( ) throws WikiApiException { Category root = wiki . getMetaData ( ) . getMainCategory ( ) ; if ( root == null ) { logger . error ( "There is no root node for this wiki. Check the parameter that provides the name of the root node." ) ; return 0.0 ; } if ( ! graph . containsVertex ( root . getPageId ( ) ) ) { logger . error ( "The root node is not part of this graph. Cannot compute depth of this graph. Setting depth to 0.0" ) ; return 0.0 ; } double maxPathLength = 0.0 ; double [ ] returnValues = computeShortestPathLenghts ( root . getPageId ( ) , 0.0 , maxPathLength , new HashSet < Integer > ( ) ) ; maxPathLength = returnValues [ 1 ] ; return maxPathLength ; }
|
Computes the depth of the category graph i . e . the maximum path length starting with the root node .
|
4,192
|
private void serializeMap ( Map < ? , ? > map , File file ) { try ( ObjectOutputStream os = new ObjectOutputStream ( new BufferedOutputStream ( new FileOutputStream ( file ) ) ) ) { os . writeObject ( map ) ; } catch ( Exception e ) { logger . error ( e . getLocalizedMessage ( ) , e ) ; } }
|
Serialize a Map .
|
4,193
|
private Map deserializeMap ( File file ) { Map < ? , ? > map ; try ( ObjectInputStream is = new ObjectInputStream ( new BufferedInputStream ( new FileInputStream ( file ) ) ) ) { map = ( Map < ? , ? > ) is . readObject ( ) ; } catch ( Exception e ) { logger . error ( e . getLocalizedMessage ( ) , e ) ; return null ; } return map ; }
|
Deserialize a map
|
4,194
|
public void saveGraph ( String destination ) throws WikiApiException { try { GraphSerialization . saveGraph ( graph , destination ) ; } catch ( IOException e ) { throw new WikiApiException ( e ) ; } }
|
Serializes the graph to the given destination .
|
4,195
|
public void setSections ( List < Section > sections ) { for ( Section s : sections ) superSection . addSection ( s ) ; }
|
Sets the Sections of a ParsedPage .
|
4,196
|
public void validate ( ) { boolean flagA = ! controller . isEnableSQLDatabaseOutput ( ) ; boolean flagB = controller . isMultipleOutputFiles ( ) ; OutputCompressionEnum oce = controller . getOutputCompression ( ) ; enableZipEncodingCompression . setSelected ( controller . isZipCompressionEnabled ( ) ) ; disableOutputCompression . setSelected ( oce == OutputCompressionEnum . None ) ; enableBZip2OutputCompression . setSelected ( oce == OutputCompressionEnum . BZip2 ) ; activateDataFileOutput . setSelected ( controller . isEnableDataFileOutput ( ) ) ; outputLabel . setEnabled ( flagA ) ; outputPathField . setEnabled ( flagA ) ; enableZipEncodingCompression . setEnabled ( flagA ) ; outputCompression . setEnabled ( flagA ) ; disableOutputCompression . setEnabled ( flagA ) ; enable7ZipOutputCompression . setEnabled ( flagA && controller . is7ZipEnabled ( ) ) ; enable7ZipOutputCompression . setSelected ( oce == OutputCompressionEnum . SevenZip ) ; enableBZip2OutputCompression . setEnabled ( flagA ) ; enableMultipleOutputFiles . setEnabled ( flagA && ( oce == OutputCompressionEnum . None ) ) ; enableMultipleOutputFiles . setSelected ( flagB ) ; outputSizeLimitLabel . setEnabled ( flagA && flagB && ( oce == OutputCompressionEnum . None ) ) ; outputSizeLimitField . setEnabled ( flagA && flagB && ( oce == OutputCompressionEnum . None ) ) ; }
|
A call of this method should validate the status of the panels components .
|
4,197
|
private String copy ( final char [ ] array , final int start , final int end ) { StringBuilder text = new StringBuilder ( ) ; for ( int j = start ; j < end ; j ++ ) { text . append ( array [ j ] ) ; } return text . toString ( ) ; }
|
Copies the specified interval of characters for the array .
|
4,198
|
private void insert ( final char [ ] revB , final DiffBlock curB ) throws UnsupportedEncodingException { String text = copy ( revB , curB . getRevBStart ( ) , curB . getRevBEnd ( ) ) ; DiffPart action = new DiffPart ( DiffAction . INSERT ) ; action . setStart ( version . length ( ) ) ; codecData . checkBlocksizeS ( version . length ( ) ) ; action . setText ( text ) ; codecData . checkBlocksizeL ( text . getBytes ( WIKIPEDIA_ENCODING ) . length ) ; diff . add ( action ) ; version . append ( text ) ; }
|
Creates an insert operation .
|
4,199
|
private void delete ( final DiffBlock curA ) { DiffPart action = new DiffPart ( DiffAction . DELETE ) ; action . setStart ( version . length ( ) ) ; codecData . checkBlocksizeS ( version . length ( ) ) ; action . setLength ( curA . getRevAEnd ( ) - curA . getRevAStart ( ) ) ; codecData . checkBlocksizeE ( action . getLength ( ) ) ; diff . add ( action ) ; }
|
Creates a delete operation .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.