idx
int64
0
41.2k
question
stringlengths
74
4.21k
target
stringlengths
5
888
36,000
public FileSystem getProxiedFileSystem ( State properties , AuthType authType , String authPath , String uri , final Configuration conf ) throws IOException , InterruptedException , URISyntaxException { Preconditions . checkArgument ( StringUtils . isNotBlank ( properties . getProp ( ConfigurationKeys . FS_PROXY_AS_USE...
Getter for proxiedFs using the passed parameters to create an instance of a proxiedFs .
36,001
private static Optional < Token < ? > > getTokenFromSeqFile ( String authPath , String proxyUserName ) throws IOException { try ( Closer closer = Closer . create ( ) ) { FileSystem localFs = FileSystem . getLocal ( new Configuration ( ) ) ; SequenceFile . Reader tokenReader = closer . register ( new SequenceFile . Read...
Get token from the token sequence file .
36,002
public static Duration getLeadTimeDurationFromConfig ( State state ) { String leadTimeProp = state . getProp ( DATE_PARTITIONED_SOURCE_PARTITION_LEAD_TIME ) ; if ( leadTimeProp == null || leadTimeProp . length ( ) == 0 ) { return DEFAULT_PARTITIONED_SOURCE_PARTITION_LEAD_TIME ; } int leadTime = Integer . parseInt ( lea...
Retrieve the lead time duration from the LEAD_TIME and LEAD_TIME granularity config settings .
36,003
public void purge ( ) throws IOException { this . datasetOwner = getOwner ( ) ; State state = new State ( this . state ) ; this . datasetOwnerFs = ProxyUtils . getOwnerFs ( state , this . datasetOwner ) ; try ( HiveProxyQueryExecutor queryExecutor = ProxyUtils . getQueryExecutor ( state , this . datasetOwner ) ) { if (...
This method is responsible for actual purging . - It first creates a staging table partition with the same schema as of original table partition . - Staging table partition is then populated by original table left outer joined with compliance id table .
36,004
public void launch ( ) throws IOException , YarnException { this . eventBus . register ( this ) ; String clusterName = this . config . getString ( GobblinClusterConfigurationKeys . HELIX_CLUSTER_NAME_KEY ) ; HelixUtils . createGobblinHelixCluster ( this . config . getString ( GobblinClusterConfigurationKeys . ZK_CONNEC...
Launch a new Gobblin instance on Yarn .
36,005
public List < HivePartitionDataset > findDatasets ( ) throws IOException { Preconditions . checkArgument ( this . state . contains ( ComplianceConfigurationKeys . RESTORE_DATASET ) , "Missing required property " + ComplianceConfigurationKeys . RESTORE_DATASET ) ; HivePartitionDataset hivePartitionDataset = HivePartitio...
Will return a Singleton list of HivePartitionDataset to be restored .
36,006
public void pushMessages ( List < byte [ ] > messages ) { List < KeyedMessage < String , byte [ ] > > keyedMessages = Lists . transform ( messages , new Function < byte [ ] , KeyedMessage < String , byte [ ] > > ( ) { public KeyedMessage < String , byte [ ] > apply ( byte [ ] bytes ) { return new KeyedMessage < String ...
Push all mbyte array messages to the Kafka topic .
36,007
protected ProducerCloseable < String , byte [ ] > createProducer ( ProducerConfig config ) { return this . closer . register ( new ProducerCloseable < String , byte [ ] > ( config ) ) ; }
Actually creates the Kafka producer .
36,008
private void acquirePermits ( long permits ) throws InterruptedException { long startMs = System . currentTimeMillis ( ) ; limiter . acquirePermits ( permits ) ; long permitAcquisitionTime = System . currentTimeMillis ( ) - startMs ; if ( throttledTimer . isPresent ( ) ) { Instrumented . updateTimer ( throttledTimer , ...
Acquire permit along with emitting metrics if enabled .
36,009
public S getSchemaByKey ( K key ) throws SchemaRegistryException { try { return cachedSchemasByKeys . get ( key ) ; } catch ( ExecutionException e ) { throw new SchemaRegistryException ( String . format ( "Schema with key %s cannot be retrieved" , key ) , e ) ; } }
Get schema from schema registry by key .
36,010
public static void generateDumpScript ( Path dumpScript , FileSystem fs , String heapFileName , String chmod ) throws IOException { if ( fs . exists ( dumpScript ) ) { LOG . info ( "Heap dump script already exists: " + dumpScript ) ; return ; } try ( BufferedWriter scriptWriter = new BufferedWriter ( new OutputStreamWr...
Generate the dumpScript which is used when OOM error is thrown during task execution . The current content dumpScript puts the . prof files to the DUMP_FOLDER within the same directory of the dumpScript .
36,011
public static void submit ( Optional < EventSubmitter > submitter , String name ) { if ( submitter . isPresent ( ) ) { submitter . get ( ) . submit ( name ) ; } }
Calls submit on submitter if present .
36,012
public static String getJvmInputArguments ( ) { RuntimeMXBean runtimeMxBean = ManagementFactory . getRuntimeMXBean ( ) ; List < String > arguments = runtimeMxBean . getInputArguments ( ) ; return String . format ( "JVM Input Arguments: %s" , JOINER . join ( arguments ) ) ; }
Gets the input arguments passed to the JVM .
36,013
public static String formatJvmArguments ( Optional < String > jvmArguments ) { if ( jvmArguments . isPresent ( ) ) { return PORT_UTILS . replacePortTokens ( jvmArguments . get ( ) ) ; } return StringUtils . EMPTY ; }
Formats the specified jvm arguments such that any tokens are replaced with concrete values ;
36,014
public void initialize ( ) { String table = Preconditions . checkNotNull ( this . state . getProp ( ForkOperatorUtils . getPropertyNameForBranch ( JdbcPublisher . JDBC_PUBLISHER_FINAL_TABLE_NAME , this . branches , this . branchId ) ) ) ; String db = Preconditions . checkNotNull ( this . state . getProp ( ForkOperatorU...
AvroToJdbcEntryConverter list of date columns existing in the table . As we don t want each converter making a connection against database to get the same information . Here ConverterInitializer will retrieve it and store it into WorkUnit so that AvroToJdbcEntryConverter will use it later .
36,015
protected void startUp ( ) { log . info ( "Starting the " + getClass ( ) . getSimpleName ( ) ) ; log . info ( "Polling git with interval {} " , this . pollingInterval ) ; this . scheduledExecutor . scheduleAtFixedRate ( new Runnable ( ) { public void run ( ) { try { if ( shouldPollGit ( ) ) { processGitConfigChanges ( ...
Start the service .
36,016
void processGitConfigChangesHelper ( List < DiffEntry > changes ) throws IOException { for ( DiffEntry change : changes ) { switch ( change . getChangeType ( ) ) { case ADD : case MODIFY : addChange ( change ) ; break ; case DELETE : removeChange ( change ) ; break ; case RENAME : removeChange ( change ) ; addChange ( ...
A helper method where actual processing of the list of changes since the last refresh of the repository takes place and the changes applied .
36,017
public void addAll ( State otherState ) { Properties diffCommonProps = new Properties ( ) ; diffCommonProps . putAll ( Maps . difference ( this . commonProperties , otherState . commonProperties ) . entriesOnlyOnRight ( ) ) ; addAll ( diffCommonProps ) ; addAll ( otherState . specProperties ) ; }
Populates this instance with properties of the other instance .
36,018
public String getProp ( String key ) { if ( this . specProperties . containsKey ( key ) ) { return this . specProperties . getProperty ( key ) ; } return this . commonProperties . getProperty ( key ) ; }
Get the value of a property .
36,019
public List < String > getPropAsList ( String key , String def ) { return LIST_SPLITTER . splitToList ( getProp ( key , def ) ) ; }
Get the value of a property as a list of strings using the given default value if the property is not set .
36,020
public long getPropAsLong ( String key , long def ) { return Long . parseLong ( getProp ( key , String . valueOf ( def ) ) ) ; }
Get the value of a property as a long integer using the given default value if the property is not set .
36,021
public int getPropAsInt ( String key , int def ) { return Integer . parseInt ( getProp ( key , String . valueOf ( def ) ) ) ; }
Get the value of a property as an integer using the given default value if the property is not set .
36,022
public double getPropAsDouble ( String key , double def ) { return Double . parseDouble ( getProp ( key , String . valueOf ( def ) ) ) ; }
Get the value of a property as a double using the given default value if the property is not set .
36,023
public boolean getPropAsBoolean ( String key , boolean def ) { return Boolean . parseBoolean ( getProp ( key , String . valueOf ( def ) ) ) ; }
Get the value of a property as a boolean using the given default value if the property is not set .
36,024
public void removeProp ( String key ) { this . specProperties . remove ( key ) ; if ( this . commonProperties . containsKey ( key ) ) { Properties commonPropsCopy = new Properties ( ) ; commonPropsCopy . putAll ( this . commonProperties ) ; commonPropsCopy . remove ( key ) ; this . commonProperties = commonPropsCopy ; ...
Remove a property if it exists .
36,025
public void removePropsWithPrefix ( String prefix ) { this . specProperties . entrySet ( ) . removeIf ( entry -> ( ( String ) entry . getKey ( ) ) . startsWith ( prefix ) ) ; Properties newCommonProperties = null ; for ( Object key : this . commonProperties . keySet ( ) ) { if ( ( ( String ) key ) . startsWith ( prefix...
Remove all properties with a certain keyPrefix
36,026
private String getDbTableName ( String schemaName ) { Preconditions . checkArgument ( schemaName . matches ( ".+_.+_.+" ) ) ; return schemaName . replaceFirst ( "_" , "." ) . substring ( 0 , schemaName . lastIndexOf ( '_' ) ) ; }
Translate schema name to dbname . tablename to use in path
36,027
public static Collection < WorkUnitState > mergeAllSplitWorkUnits ( FileSystem fs , Collection < WorkUnitState > workUnits ) throws IOException { ListMultimap < CopyableFile , WorkUnitState > splitWorkUnitsMap = ArrayListMultimap . create ( ) ; for ( WorkUnitState workUnit : workUnits ) { if ( isSplitWorkUnit ( workUni...
Finds all split work units in the input collection and merges the file parts into the expected output files .
36,028
private boolean shouldStickToTheSameNode ( int containerExitStatus ) { switch ( containerExitStatus ) { case ContainerExitStatus . DISKS_FAILED : return false ; case ContainerExitStatus . ABORTED : return false ; default : return this . containerHostAffinityEnabled ; } }
Check the exit status of a completed container and see if the replacement container should try to be started on the same node . Some exit status indicates a disk or node failure and in such cases the replacement container should try to be started on a different node .
36,029
private void handleContainerCompletion ( ContainerStatus containerStatus ) { Map . Entry < Container , String > completedContainerEntry = this . containerMap . remove ( containerStatus . getContainerId ( ) ) ; String completedInstanceName = completedContainerEntry . getValue ( ) ; LOGGER . info ( String . format ( "Con...
Handle the completion of a container . A new container will be requested to replace the one that just exited . Depending on the exit status and if container host affinity is enabled the new container may or may not try to be started on the same node .
36,030
public static Builder builder ( URI catalogURI , Properties jobProps ) { String name = JobState . getJobNameFromProps ( jobProps ) ; String group = JobState . getJobGroupFromProps ( jobProps ) ; if ( null == group ) { group = "default" ; } try { URI jobURI = new URI ( catalogURI . getScheme ( ) , catalogURI . getAuthor...
Creates a builder for the JobSpec based on values in a job properties config .
36,031
public void sendNotification ( final Notification notification ) { ContextAwareTimer . Context timer = this . notificationTimer . time ( ) ; if ( ! this . notificationTargets . isEmpty ( ) ) { for ( final Map . Entry < UUID , Function < Notification , Void > > entry : this . notificationTargets . entrySet ( ) ) { try {...
Send a notification to all targets of this context and to the parent of this context .
36,032
public RecordWriter getRecordWriter ( TaskAttemptContext taskAttemptContext ) throws IOException { Configuration conf = taskAttemptContext . getConfiguration ( ) ; String extension = "." + conf . get ( COMPACTION_OUTPUT_EXTENSION , "orc" ) ; Path filename = getDefaultWorkFile ( taskAttemptContext , extension ) ; Writer...
Required for extension since super method hard - coded file extension as . orc . To keep flexibility of extension name we made it configuration driven .
36,033
public static FileBasedJobLockFactory create ( Config factoryConfig , Configuration hadoopConf , Optional < Logger > log ) throws IOException { FileSystem fs = factoryConfig . hasPath ( FS_URI_CONFIG ) ? FileSystem . get ( URI . create ( factoryConfig . getString ( FS_URI_CONFIG ) ) , hadoopConf ) : getDefaultFileSyste...
Create a new instance using the specified factory and hadoop configurations .
36,034
boolean isLocked ( Path lockFile ) throws JobLockException { try { return this . fs . exists ( lockFile ) ; } catch ( IOException e ) { throw new JobLockException ( e ) ; } }
Check if the lock is locked .
36,035
public long getRecordCount ( Collection < Path > paths ) { long count = 0 ; for ( Path path : paths ) { count += getRecordCount ( path ) ; } return count ; }
Get record count for a list of paths .
36,036
public void launch ( ) throws IOException , InterruptedException { this . eventBus . register ( this ) ; HelixUtils . createGobblinHelixCluster ( this . zkConnectionString , this . helixClusterName , false ) ; LOGGER . info ( "Created Helix cluster " + this . helixClusterName ) ; connectHelixManager ( ) ; this . cluste...
Launch a new Gobblin cluster on AWS .
36,037
protected HttpOperation generateHttpOperation ( GenericRecord inputRecord , State state ) { Map < String , String > keyAndValue = new HashMap < > ( ) ; Optional < Iterable < String > > keys = getKeys ( state ) ; HttpOperation operation ; if ( keys . isPresent ( ) ) { for ( String key : keys . get ( ) ) { String value =...
Extract user defined keys by looking at gobblin . converter . http . keys If keys are defined extract key - value pair from inputRecord and set it to HttpOperation If keys are not defined generate HttpOperation by HttpUtils . toHttpOperation
36,038
public static Path getJobStateFilePath ( boolean usingStateStore , Path appWorkPath , String jobId ) { final Path jobStateFilePath ; if ( usingStateStore ) { jobStateFilePath = new Path ( appWorkPath , GobblinClusterConfigurationKeys . JOB_STATE_DIR_NAME + Path . SEPARATOR + jobId + Path . SEPARATOR + jobId + "." + Abs...
Generate the path to the job . state file
36,039
private String loadExistingMetadata ( Path metadataFilename , int branchId ) { try { FileSystem fsForBranch = writerFileSystemByBranches . get ( branchId ) ; if ( ! fsForBranch . exists ( metadataFilename ) ) { return null ; } FSDataInputStream existingMetadata = writerFileSystemByBranches . get ( branchId ) . open ( m...
Read in existing metadata as a UTF8 string .
36,040
protected DatasetDescriptor createDestinationDescriptor ( WorkUnitState state , int branchId ) { Path publisherOutputDir = getPublisherOutputDir ( state , branchId ) ; FileSystem fs = this . publisherFileSystemByBranches . get ( branchId ) ; DatasetDescriptor destination = new DatasetDescriptor ( fs . getScheme ( ) , p...
Create destination dataset descriptor
36,041
public void publishMetadata ( Collection < ? extends WorkUnitState > states ) throws IOException { Set < String > partitions = new HashSet < > ( ) ; mergeMetadataAndCollectPartitionNames ( states , partitions ) ; partitions . removeIf ( Objects :: isNull ) ; WorkUnitState anyState = states . iterator ( ) . next ( ) ; f...
Merge all of the metadata output from each work - unit and publish the merged record .
36,042
private void publishMetadata ( String metadataValue , int branchId , Path metadataOutputPath ) throws IOException { try { if ( metadataOutputPath == null ) { LOG . info ( "Metadata output path not set for branch " + String . valueOf ( branchId ) + ", not publishing." ) ; return ; } if ( metadataValue == null ) { LOG . ...
Publish metadata to a set of paths
36,043
public Collection < URI > getImports ( URI configKeyUri , boolean recursive ) throws ConfigStoreFactoryDoesNotExistsException , ConfigStoreCreationException , VersionDoesNotExistException { return getImports ( configKeyUri , recursive , Optional . < Config > absent ( ) ) ; }
Get the import links of the input URI .
36,044
@ SuppressWarnings ( "unchecked" ) private ConfigStoreFactory < ConfigStore > getConfigStoreFactory ( URI configKeyUri ) throws ConfigStoreFactoryDoesNotExistsException { @ SuppressWarnings ( "rawtypes" ) ConfigStoreFactory csf = this . configStoreFactoryRegister . getConfigStoreFactory ( configKeyUri . getScheme ( ) )...
use serviceLoader to load configStoreFactories
36,045
private void seekNext ( ) { if ( ! needSeek ) { return ; } if ( this . currentIterator != null && this . currentIterator . hasNext ( ) ) { needSeek = false ; return ; } nextWu = null ; this . currentIterator = null ; while ( nextWu == null && workUnits . hasNext ( ) ) { nextWu = workUnits . next ( ) ; if ( nextWu insta...
Seek to the next available work unit skipping all empty work units
36,046
public void clean ( ) throws IOException { if ( this . isDatasetBlacklisted ) { this . log . info ( "Dataset blacklisted. Cleanup skipped for " + datasetRoot ( ) ) ; return ; } boolean atLeastOneFailureSeen = false ; for ( VersionFinderAndPolicy < T > versionFinderAndPolicy : getVersionFindersAndPolicies ( ) ) { Versio...
Method to perform the Retention operations for this dataset .
36,047
public Iterable < JsonObject > convertRecord ( JsonArray outputSchema , String inputRecord , WorkUnitState workUnit ) throws DataConversionException { try { String strDelimiter = workUnit . getProp ( ConfigurationKeys . CONVERTER_CSV_TO_JSON_DELIMITER ) ; if ( Strings . isNullOrEmpty ( strDelimiter ) ) { throw new Ille...
Takes in a record with format String and splits the data based on SOURCE_SCHEMA_DELIMITER Uses the inputSchema and the split record to convert the record to a JsonObject
36,048
@ SuppressWarnings ( "unchecked" ) public T compareAll ( ) { this . compareInputFormat ( ) . compareOutputFormat ( ) . compareIsCompressed ( ) . compareIsStoredAsSubDirs ( ) . compareNumBuckets ( ) . compareBucketCols ( ) . compareRawLocation ( ) . compareParameters ( ) ; return ( T ) this ; }
Compare all parameters .
36,049
public void inc ( E e , long n ) { if ( counters != null && counters . containsKey ( e ) ) { counters . get ( e ) . inc ( n ) ; } }
Increment the counter associated with enum value passed .
36,050
public long getCount ( E e ) { if ( counters . containsKey ( e ) ) { return counters . get ( e ) . getCount ( ) ; } else { return 0l ; } }
Get count for counter associated with enum value passed .
36,051
public void addOrAlterPartition ( HiveTable table , HivePartition partition ) throws IOException { if ( ! addPartitionIfNotExists ( table , partition ) ) { alterPartition ( table , partition ) ; } }
Add a partition to a table if not exists or alter a partition if exists .
36,052
public Schema getSchema ( ) { if ( this . workUnit . contains ( ConfigurationKeys . SOURCE_SCHEMA ) ) { return new Schema . Parser ( ) . parse ( this . workUnit . getProp ( ConfigurationKeys . SOURCE_SCHEMA ) ) ; } AvroFsHelper hfsHelper = ( AvroFsHelper ) this . fsHelper ; if ( this . filesToPull . isEmpty ( ) ) { ret...
Assumption is that all files in the input directory have the same schema
36,053
public static String getMachedLookbackTime ( String datasetName , String datasetsAndLookBacks , String sysDefaultLookback ) { String defaultLookback = sysDefaultLookback ; for ( String entry : Splitter . on ( ";" ) . trimResults ( ) . omitEmptyStrings ( ) . splitToList ( datasetsAndLookBacks ) ) { List < String > datas...
Find the correct lookback time for a given dataset .
36,054
private boolean validateTemplateURI ( URI flowURI ) { if ( ! this . sysConfig . hasPath ( ServiceConfigKeys . TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY ) ) { log . error ( "Missing config " + ServiceConfigKeys . TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY ) ; return false ; } if ( ! flowURI . getScheme ( ) . equals ( F...
Determine if an URI of a jobTemplate or a FlowTemplate is valid .
36,055
public QueryBasedHivePublishEntity generatePublishQueries ( ) throws DataConversionException { QueryBasedHivePublishEntity publishEntity = new QueryBasedHivePublishEntity ( ) ; List < String > publishQueries = publishEntity . getPublishQueries ( ) ; Map < String , String > publishDirectories = publishEntity . getPublis...
Returns a QueryBasedHivePublishEntity which includes publish level queries and cleanup commands .
36,056
private ApacheHttpRequest < GenericRecord > buildWriteRequest ( BufferedRecord < GenericRecord > record ) { if ( record == null ) { return null ; } ApacheHttpRequest < GenericRecord > request = new ApacheHttpRequest < > ( ) ; HttpOperation httpOperation = HttpUtils . toHttpOperation ( record . getRecord ( ) ) ; URI uri...
Build a write request from a single record
36,057
@ CliObjectOption ( description = "Specifies files should be updated if they're different in the source." ) public EmbeddedGobblinDistcp update ( ) { this . setConfiguration ( RecursiveCopyableDataset . UPDATE_KEY , Boolean . toString ( true ) ) ; return this ; }
Specifies that files in the target should be updated if they have changed in the source . Equivalent to - update option in Hadoop distcp .
36,058
@ CliObjectOption ( description = "Delete files in target that don't exist on source." ) public EmbeddedGobblinDistcp delete ( ) { this . setConfiguration ( RecursiveCopyableDataset . DELETE_KEY , Boolean . toString ( true ) ) ; return this ; }
Specifies that files in the target that don t exist in the source should be deleted . Equivalent to - delete option in Hadoop distcp .
36,059
public EmbeddedGobblin setTemplate ( String templateURI ) throws URISyntaxException , SpecNotFoundException , JobTemplate . TemplateException { return super . setTemplate ( templateURI ) ; }
Remove template from CLI
36,060
public static void addFile ( State state , String file ) { state . setProp ( ADD_FILES , state . getProp ( ADD_FILES , "" ) + "," + file ) ; }
Add the input file to the Hive session before running the task .
36,061
public static void addJar ( State state , String jar ) { state . setProp ( ADD_JARS , state . getProp ( ADD_JARS , "" ) + "," + jar ) ; }
Add the input jar to the Hive session before running the task .
36,062
public static void addSetupQuery ( State state , String query ) { state . setProp ( SETUP_QUERIES , state . getProp ( SETUP_QUERIES , "" ) + ";" + query ) ; }
Run the specified setup query on the Hive session before running the task .
36,063
public static String getQuotedString ( String st ) { Preconditions . checkNotNull ( st ) ; String quotedString = "" ; if ( ! st . startsWith ( SINGLE_QUOTE ) ) { quotedString += SINGLE_QUOTE ; } quotedString += st ; if ( ! st . endsWith ( SINGLE_QUOTE ) ) { quotedString += SINGLE_QUOTE ; } return quotedString ; }
Add single quotes to the string if not present . TestString will be converted to TestString
36,064
public static boolean isUnixTimeStamp ( String timeStamp ) { if ( timeStamp . length ( ) != ComplianceConfigurationKeys . TIME_STAMP_LENGTH ) { return false ; } try { Long . parseLong ( timeStamp ) ; return true ; } catch ( NumberFormatException e ) { return false ; } }
Check if a given string is a valid unixTimeStamp
36,065
public void stopMetricsReporting ( ) { LOGGER . info ( "Metrics reporting will be stopped: GobblinMetrics {}" , this . toString ( ) ) ; if ( ! this . metricsReportingStarted ) { LOGGER . warn ( "Metric reporting has not started yet" ) ; return ; } if ( this . jmxReporter . isPresent ( ) ) { this . jmxReporter . get ( )...
Stop metric reporting .
36,066
public static SpecExecutor createDummySpecExecutor ( URI uri ) { Properties properties = new Properties ( ) ; properties . setProperty ( ConfigurationKeys . SPECEXECUTOR_INSTANCE_URI_KEY , uri . toString ( ) ) ; return new InMemorySpecExecutor ( ConfigFactory . parseProperties ( properties ) ) ; }
A creator that create a SpecExecutor only specifying URI for uniqueness .
36,067
private static long getRecordTimestamp ( Optional < Object > writerPartitionColumnValue ) { return writerPartitionColumnValue . orNull ( ) instanceof Long ? ( Long ) writerPartitionColumnValue . get ( ) : System . currentTimeMillis ( ) ; }
Check if the partition column value is present and is a Long object . Otherwise use current system time .
36,068
private Optional < Object > getWriterPartitionColumnValue ( GenericRecord record ) { if ( ! this . partitionColumns . isPresent ( ) ) { return Optional . absent ( ) ; } for ( String partitionColumn : this . partitionColumns . get ( ) ) { Optional < Object > fieldValue = AvroUtils . getFieldValue ( record , partitionCol...
Retrieve the value of the partition column field specified by this . partitionColumns
36,069
public synchronized void addWeakListener ( L listener ) { Preconditions . checkNotNull ( listener ) ; _log . info ( "Adding a weak listener " + listener ) ; _autoListeners . put ( listener , null ) ; }
Only weak references are stored for weak listeners . They will be removed from the dispatcher automatically once the listener objects are GCed . Note that weak listeners cannot be removed explicitly .
36,070
private static List < String > getDependencies ( Config config ) { return config . hasPath ( ConfigurationKeys . JOB_DEPENDENCIES ) ? Arrays . asList ( config . getString ( ConfigurationKeys . JOB_DEPENDENCIES ) . split ( "," ) ) : new ArrayList < > ( ) ; }
Get job dependencies of a given job from its config .
36,071
public static List < Object > getDecoratorLineage ( Object obj ) { List < Object > lineage = Lists . newArrayList ( obj ) ; Object currentObject = obj ; while ( currentObject instanceof Decorator ) { currentObject = ( ( Decorator ) currentObject ) . getDecoratedObject ( ) ; lineage . add ( currentObject ) ; } return Li...
Finds the decorator lineage of the given object .
36,072
private DateTime getCompactionTimestamp ( ) throws IOException { DateTimeZone timeZone = DateTimeZone . forID ( this . dataset . jobProps ( ) . getProp ( MRCompactor . COMPACTION_TIMEZONE , MRCompactor . DEFAULT_COMPACTION_TIMEZONE ) ) ; if ( ! this . recompactFromDestPaths ) { return new DateTime ( timeZone ) ; } Set ...
For regular compactions compaction timestamp is the time the compaction job starts .
36,073
private void submitSlaEvent ( Job job ) { try { CompactionSlaEventHelper . getEventSubmitterBuilder ( this . dataset , Optional . of ( job ) , this . fs ) . eventSubmitter ( this . eventSubmitter ) . eventName ( CompactionSlaEventHelper . COMPACTION_COMPLETED_EVENT_NAME ) . additionalMetadata ( CompactionSlaEventHelper...
Submit an event when compaction MR job completes
36,074
private void submitRecordsCountsEvent ( ) { long lateOutputRecordCount = this . datasetHelper . getLateOutputRecordCount ( ) ; long outputRecordCount = this . datasetHelper . getOutputRecordCount ( ) ; try { CompactionSlaEventHelper . getEventSubmitterBuilder ( this . dataset , Optional . < Job > absent ( ) , this . fs...
Submit an event reporting late record counts and non - late record counts .
36,075
public RecordStreamWithMetadata < D , S > processStream ( RecordStreamWithMetadata < D , S > inputStream , WorkUnitState state ) { Flowable < StreamEntity < D > > filteredStream = inputStream . getRecordStream ( ) . filter ( r -> { if ( r instanceof ControlMessage ) { getMessageHandler ( ) . handleMessage ( ( ControlMe...
Process the stream and drop any records that fail the quality check .
36,076
public void afterCheck ( Result result , long startTimeNanos ) { switch ( result ) { case FAILED : Instrumented . markMeter ( this . failedRecordsMeter ) ; break ; case PASSED : Instrumented . markMeter ( this . passedRecordsMeter ) ; break ; default : } Instrumented . updateTimer ( this . policyTimer , System . nanoTi...
Called after check is run .
36,077
public void addSerDeProperties ( Path path , HiveRegistrationUnit hiveUnit ) throws IOException { hiveUnit . setSerDeType ( this . serDeWrapper . getSerDe ( ) . getClass ( ) . getName ( ) ) ; hiveUnit . setInputFormat ( this . serDeWrapper . getInputFormatClassName ( ) ) ; hiveUnit . setOutputFormat ( this . serDeWrapp...
Add ORC SerDe attributes into HiveUnit
36,078
protected void addSchemaPropertiesHelper ( Path path , HiveRegistrationUnit hiveUnit ) throws IOException { TypeInfo schema = getSchemaFromLatestFile ( path , this . fs ) ; if ( schema instanceof StructTypeInfo ) { StructTypeInfo structTypeInfo = ( StructTypeInfo ) schema ; hiveUnit . setSerDeProp ( SCHEMA_LITERAL , sc...
Extensible if there s other source - of - truth for fetching schema instead of interacting with HDFS .
36,079
public static < T > Retryer < T > newInstance ( Config config ) { config = config . withFallback ( DEFAULTS ) ; RetryType type = RetryType . valueOf ( config . getString ( RETRY_TYPE ) . toUpperCase ( ) ) ; switch ( type ) { case EXPONENTIAL : return newExponentialRetryer ( config ) ; case FIXED : return newFixedRetrye...
Creates new instance of retryer based on the config . Accepted config keys are defined in RetryerFactory as static member variable . You can use State along with ConfigBuilder and config prefix to build config .
36,080
public static PasswordManager getInstance ( State state ) { try { return CACHED_INSTANCES . get ( new CachedInstanceKey ( state ) ) ; } catch ( ExecutionException e ) { throw new RuntimeException ( "Unable to get an instance of PasswordManager" , e ) ; } }
Get an instance . The location of the master password file is provided via encrypt . key . loc .
36,081
public static PasswordManager getInstance ( Path masterPwdLoc ) { State state = new State ( ) ; state . setProp ( ConfigurationKeys . ENCRYPT_KEY_LOC , masterPwdLoc . toString ( ) ) ; state . setProp ( ConfigurationKeys . ENCRYPT_KEY_FS_URI , masterPwdLoc . toUri ( ) ) ; try { return CACHED_INSTANCES . get ( new Cached...
Get an instance . The master password file is given by masterPwdLoc .
36,082
public String encryptPassword ( String plain ) { Preconditions . checkArgument ( this . encryptors . size ( ) > 0 , "A master password needs to be provided for encrypting passwords." ) ; try { return this . encryptors . get ( 0 ) . encrypt ( plain ) ; } catch ( Exception e ) { throw new RuntimeException ( "Failed to en...
Encrypt a password . A master password must have been provided in the constructor .
36,083
public String decryptPassword ( String encrypted ) { Preconditions . checkArgument ( this . encryptors . size ( ) > 0 , "A master password needs to be provided for decrypting passwords." ) ; for ( TextEncryptor encryptor : encryptors ) { try { return encryptor . decrypt ( encrypted ) ; } catch ( Exception e ) { LOG . w...
Decrypt an encrypted password . A master password file must have been provided in the constructor .
36,084
private JobConfig . Builder createJobBuilder ( Properties jobProps ) { String planningId = getPlanningJobId ( jobProps ) ; Map < String , TaskConfig > taskConfigMap = Maps . newHashMap ( ) ; Map < String , String > rawConfigMap = Maps . newHashMap ( ) ; for ( String key : jobProps . stringPropertyNames ( ) ) { rawConfi...
Create a job config builder which has a single task that wraps the original jobProps .
36,085
private void submitJobToHelix ( String jobName , String jobId , JobConfig . Builder jobConfigBuilder ) throws Exception { TaskDriver taskDriver = new TaskDriver ( this . planningJobHelixManager ) ; HelixUtils . submitJobToWorkFlow ( jobConfigBuilder , jobName , jobId , taskDriver , this . planningJobHelixManager , this...
Submit a planning job to helix so that it can launched from a remote node .
36,086
public static List < FileStatus > listMostNestedPathRecursively ( FileSystem fs , Path path ) throws IOException { return listMostNestedPathRecursively ( fs , path , NO_OP_PATH_FILTER ) ; }
Method to list out all files or directory if no file exists under a specified path .
36,087
public static FileStatus getAnyNonHiddenFile ( FileSystem fs , Path path ) throws IOException { HiddenFilter hiddenFilter = new HiddenFilter ( ) ; FileStatus root = fs . getFileStatus ( path ) ; if ( ! root . isDirectory ( ) ) { return hiddenFilter . accept ( path ) ? root : null ; } Stack < FileStatus > folders = new ...
Get any data file which is not hidden or a directory from the given path
36,088
public FlowStatus get ( ComplexResourceKey < FlowStatusId , EmptyRecord > key ) { String flowGroup = key . getKey ( ) . getFlowGroup ( ) ; String flowName = key . getKey ( ) . getFlowName ( ) ; long flowExecutionId = key . getKey ( ) . getFlowExecutionId ( ) ; LOG . info ( "Get called with flowGroup " + flowGroup + " f...
Retrieve the FlowStatus with the given key
36,089
static ExecutionStatus updatedFlowExecutionStatus ( ExecutionStatus jobExecutionStatus , ExecutionStatus currentFlowExecutionStatus ) { if ( currentFlowExecutionStatus == ExecutionStatus . FAILED || jobExecutionStatus == ExecutionStatus . FAILED ) { return ExecutionStatus . FAILED ; } if ( currentFlowExecutionStatus ==...
Determines the new flow status based on the current flow status and new job status
36,090
public static String calculateEdgeIdentity ( ServiceNode sourceNode , ServiceNode targetNode , SpecExecutor specExecutorInstance ) { return sourceNode . getNodeName ( ) + "-" + specExecutorInstance . getUri ( ) + "-" + targetNode . getNodeName ( ) ; }
A naive implementation of edge identity calculation .
36,091
private URI getBaseURI ( URI configKey ) throws URISyntaxException { return new URI ( configKey . getScheme ( ) , configKey . getAuthority ( ) , null , configKey . getQuery ( ) , configKey . getFragment ( ) ) ; }
Base URI for a config store should be root of the zip file so change path part of URI to be null
36,092
public void stop ( Map < String , String > additionalMetadata ) { if ( this . stopped ) { return ; } this . stopped = true ; long endTime = System . currentTimeMillis ( ) ; long duration = endTime - this . startTime ; Map < String , String > finalMetadata = Maps . newHashMap ( ) ; finalMetadata . putAll ( additionalMet...
Stop the timer and submit the event along with the additional metadata specified . If the timer was already stopped before this is a no - op .
36,093
public synchronized static RecordAccessor getRecordAccessorForObject ( Object obj ) { for ( RecordAccessorProvider p : recordAccessorProviders ) { RecordAccessor accessor = p . recordAccessorForObject ( obj ) ; if ( accessor != null ) { return accessor ; } } throw new IllegalArgumentException ( "Can't build accessor fo...
Get a RecordAccessor for a given object . Throws IllegalArgumentException if none can be built .
36,094
private Set < Path > getNewDataInFolder ( Path inputFolder , Path outputFolder ) throws IOException { Set < Path > newFiles = Sets . newHashSet ( ) ; if ( ! this . fs . exists ( inputFolder ) || ! this . fs . exists ( outputFolder ) ) { return newFiles ; } DateTime lastCompactionTime = new DateTime ( MRCompactor . read...
Check if inputFolder contains any files which have modification times which are more recent than the last compaction time as stored within outputFolder ; return any files which do . An empty list will be returned if all files are older than the last compaction time .
36,095
public static String generateStagingCTASStatement ( HiveDatasetFinder . DbAndTable outputDbAndTable , String sourceQuery , StorageFormat storageFormat , String outputTableLocation ) { Preconditions . checkArgument ( ! Strings . isNullOrEmpty ( outputDbAndTable . getDb ( ) ) && ! Strings . isNullOrEmpty ( outputDbAndTab...
Generates a CTAS statement to dump the results of a query into a new table .
36,096
public static String generateTableCopy ( String inputTblName , String outputTblName , String inputDbName , String outputDbName , Optional < Map < String , String > > optionalPartitionDMLInfo ) { Preconditions . checkArgument ( StringUtils . isNotBlank ( inputTblName ) ) ; Preconditions . checkArgument ( StringUtils . i...
Fills data from input table into output table .
36,097
public static void populatePartitionInfo ( HiveProcessingEntity conversionEntity , Map < String , String > partitionsDDLInfo , Map < String , String > partitionsDMLInfo ) { String partitionsInfoString = null ; String partitionsTypeString = null ; if ( conversionEntity . getPartition ( ) . isPresent ( ) ) { partitionsIn...
It fills partitionsDDLInfo and partitionsDMLInfo with the partition information
36,098
public static void createStagingDirectory ( FileSystem fs , String destination , HiveProcessingEntity conversionEntity , WorkUnitState workUnit ) { Path destinationPath = new Path ( destination ) ; try { FsPermission permission ; String group = null ; if ( conversionEntity . getTable ( ) . getDataLocation ( ) != null )...
Creates a staging directory with the permission as in source directory .
36,099
public static Pair < Optional < Table > , Optional < List < Partition > > > getDestinationTableMeta ( String dbName , String tableName , Properties props ) { Optional < Table > table = Optional . < Table > absent ( ) ; Optional < List < Partition > > partitions = Optional . < List < Partition > > absent ( ) ; try { Hiv...
Returns a pair of Hive table and its partitions