text
stringlengths
63
450k
public static TYPE_1 METHOD_1 ( String VAR_1 , TYPE_2 < TYPE_3 > VAR_2 , TYPE_4 VAR_3 , TYPE_5 VAR_4 ) throws TYPE_6 { TYPE_7 < String , String > VAR_5 = new TYPE_8 < > ( ) ; TYPE_9 VAR_6 = TYPE_10 . METHOD_2 ( VAR_3 . METHOD_3 ( ) , VAR_5 ) ; final TYPE_2 < TYPE_11 > VAR_7 = new TYPE_12 ( VAR_2 , VAR_4 . METHOD_4 ( ) ) ; TYPE_13 VAR_8 = VAR_6 . METHOD_1 ( VAR_7 ) ; return new TYPE_1 ( VAR_1 , VAR_8 , VAR_4 . METHOD_5 ( ) , VAR_4 . METHOD_6 ( ) , VAR_5 , VAR_4 ) ; }
public File downloadAndUntar ( ) throws IOException { if ( fileDir != null ) { return fileDir ; } // mac gives unique tmp each run and we want to store this persist // this data across restarts File tmpDir = new File ( System . getProperty ( "user.home" ) ) ; File baseDir = new File ( tmpDir , LOCAL_DIR_NAME ) ; if ( ! ( baseDir . isDirectory ( ) || baseDir . mkdir ( ) ) ) { throw new IOException ( "Could not mkdir " + baseDir ) ; } log . info ( "Downloading mnist..." ) ; // getFromOrigin training records File tarFile = new File ( baseDir , trainingFilesFilename ) ; if ( ! tarFile . isFile ( ) ) { FileUtils . copyURLToFile ( new URL ( trainingFilesURL ) , tarFile ) ; } ArchiveUtils . unzipFileTo ( tarFile . getAbsolutePath ( ) , baseDir . getAbsolutePath ( ) ) ; // getFromOrigin training records File labels = new File ( baseDir , trainingFileLabelsFilename ) ; if ( ! labels . isFile ( ) ) { FileUtils . copyURLToFile ( new URL ( trainingFileLabelsURL ) , labels ) ; } ArchiveUtils . unzipFileTo ( labels . getAbsolutePath ( ) , baseDir . getAbsolutePath ( ) ) ; fileDir = baseDir ; return fileDir ; }
public int compareTo ( final Key < T > o ) { if ( m_sGroup . equals ( DEFAULT_GROUP ) && ! o . m_sGroup . equals ( DEFAULT_GROUP ) ) return - 1 ; if ( ! m_sGroup . equals ( DEFAULT_GROUP ) && o . m_sGroup . equals ( DEFAULT_GROUP ) ) return 1 ; int ret = m_sGroup . compareTo ( o . getGroup ( ) ) ; if ( ret == 0 ) ret = m_sName . compareTo ( o . getName ( ) ) ; return ret ; }
public static void importWeights ( Hdf5Archive weightsArchive , String weightsRoot , Map < String , KerasLayer > layers , int kerasVersion , String backend ) throws InvalidKerasConfigurationException , UnsupportedKerasConfigurationException { // check to ensure naming scheme doesn't include forward slash boolean includesSlash = false ; for ( String layerName : layers . keySet ( ) ) { if ( layerName . contains ( "/" ) ) includesSlash = true ; } synchronized ( KerasModelUtils . class ) { List < String > layerGroups ; if ( ! includesSlash ) { layerGroups = weightsRoot != null ? weightsArchive . getGroups ( weightsRoot ) : weightsArchive . getGroups ( ) ; } else { layerGroups = new ArrayList <> ( layers . keySet ( ) ) ; } /* Set weights in KerasLayer for each entry in weights map. */ for ( String layerName : layerGroups ) { List < String > layerParamNames ; // there's a bug where if a layer name contains a forward slash, the first fragment must be appended // to the name of the dataset; it appears h5 interprets the forward slash as a data group String [ ] layerFragments = layerName . split ( "/" ) ; // Find nested groups when using Tensorflow String rootPrefix = weightsRoot != null ? weightsRoot + "/" : "" ; List < String > attributeStrParts = new ArrayList <> ( ) ; String attributeStr = weightsArchive . readAttributeAsString ( "weight_names" , rootPrefix + layerName ) ; String attributeJoinStr ; Matcher attributeMatcher = Pattern . compile ( ":\\d+" ) . matcher ( attributeStr ) ; Boolean foundTfGroups = attributeMatcher . find ( ) ; if ( foundTfGroups ) { for ( String part : attributeStr . split ( "/" ) ) { part = part . trim ( ) ; if ( part . length ( ) == 0 ) break ; Matcher tfSuffixMatcher = Pattern . compile ( ":\\d+" ) . matcher ( part ) ; if ( tfSuffixMatcher . find ( ) ) break ; attributeStrParts . add ( part ) ; } attributeJoinStr = StringUtils . join ( attributeStrParts , "/" ) ; } else { attributeJoinStr = layerFragments [ 0 ] ; } String baseAttributes = layerName + "/" + attributeJoinStr ; if ( layerFragments . length > 1 ) { try { layerParamNames = weightsArchive . getDataSets ( rootPrefix + baseAttributes ) ; } catch ( Exception e ) { layerParamNames = weightsArchive . getDataSets ( rootPrefix + layerName ) ; } } else { if ( foundTfGroups ) { layerParamNames = weightsArchive . getDataSets ( rootPrefix + baseAttributes ) ; } else { if ( kerasVersion == 2 ) { if ( backend . equals ( "theano" ) && layerName . contains ( "bidirectional" ) ) { for ( String part : attributeStr . split ( "/" ) ) { if ( part . contains ( "forward" ) ) baseAttributes = baseAttributes + "/" + part ; } } if ( layers . get ( layerName ) . getNumParams ( ) > 0 ) { try { layerParamNames = weightsArchive . getDataSets ( rootPrefix + baseAttributes ) ; } catch ( Exception e ) { log . warn ( "No HDF5 group with weights found for layer with name " + layerName + ", continuing import." ) ; layerParamNames = Collections . emptyList ( ) ; } } else { layerParamNames = weightsArchive . getDataSets ( rootPrefix + layerName ) ; } } else { layerParamNames = weightsArchive . getDataSets ( rootPrefix + layerName ) ; } } } if ( layerParamNames . isEmpty ( ) ) continue ; if ( ! layers . containsKey ( layerName ) ) throw new InvalidKerasConfigurationException ( "Found weights for layer not in model (named " + layerName + ")" ) ; KerasLayer layer = layers . get ( layerName ) ; if ( layerParamNames . size ( ) != layer . getNumParams ( ) ) if ( kerasVersion == 2 && layer instanceof KerasBidirectional && 2 * layerParamNames . size ( ) != layer . getNumParams ( ) ) throw new InvalidKerasConfigurationException ( "Found " + layerParamNames . size ( ) + " weights for layer with " + layer . getNumParams ( ) + " trainable params (named " + layerName + ")" ) ; Map < String , INDArray > weights = new HashMap <> ( ) ; for ( String layerParamName : layerParamNames ) { String paramName = KerasModelUtils . findParameterName ( layerParamName , layerFragments ) ; INDArray paramValue ; if ( kerasVersion == 2 && layer instanceof KerasBidirectional ) { String backwardAttributes = baseAttributes . replace ( "forward" , "backward" ) ; INDArray forwardParamValue = weightsArchive . readDataSet ( layerParamName , rootPrefix + baseAttributes ) ; INDArray backwardParamValue = weightsArchive . readDataSet ( layerParamName , rootPrefix + backwardAttributes ) ; weights . put ( "forward_" + paramName , forwardParamValue ) ; weights . put ( "backward_" + paramName , backwardParamValue ) ; } else { if ( foundTfGroups ) { paramValue = weightsArchive . readDataSet ( layerParamName , rootPrefix + baseAttributes ) ; } else { if ( layerFragments . length > 1 ) { paramValue = weightsArchive . readDataSet ( layerFragments [ 0 ] + "/" + layerParamName , rootPrefix , layerName ) ; } else { if ( kerasVersion == 2 ) { paramValue = weightsArchive . readDataSet ( layerParamName , rootPrefix + baseAttributes ) ; } else { paramValue = weightsArchive . readDataSet ( layerParamName , rootPrefix , layerName ) ; } } } weights . put ( paramName , paramValue ) ; } } layer . setWeights ( weights ) ; } /* Look for layers in model with no corresponding entries in weights map. */ Set < String > layerNames = new HashSet <> ( layers . keySet ( ) ) ; layerNames . removeAll ( layerGroups ) ; for ( String layerName : layerNames ) { if ( layers . get ( layerName ) . getNumParams ( ) > 0 ) throw new InvalidKerasConfigurationException ( "Could not find weights required for layer " + layerName ) ; } } }
public static TYPE_1 METHOD_1 ( String VAR_1 ) { TYPE_2 VAR_2 = TYPE_3 . METHOD_2 ( VAR_1 ) ; if ( ! VAR_2 . METHOD_3 ( ) ) { return null ; } String state = VAR_2 . METHOD_4 ( 1 ) ; String VAR_3 = VAR_2 . METHOD_4 ( INT_1 ) ; String VAR_4 = VAR_2 . METHOD_4 ( INT_2 ) ; return new TYPE_1 ( VAR_3 , TYPE_4 . METHOD_1 ( state ) , VAR_4 ) ; }
public String getColName ( ) { // CursorAdapter requires lowercase _id col if ( EntityModel . ID_COL . equals ( colName ) ) { return EntityModel . ID_COL ; } else { return colName . toUpperCase ( ) ; } }
ANNOTATION_1 public TYPE_1 METHOD_1 ( TYPE_2 VAR_1 , String method , TYPE_3 VAR_2 , TYPE_3 to ) { throw new TYPE_4 ( STRING_1 ) ; }
public void executeAction ( AjaxRequestTarget target ) { Entity entity = getActionContext ( ) . getEntity ( ) ; try { SchedulerJob job = ( SchedulerJob ) entity ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "job = " + job ) ; } EntityBrowserPanel panel = findParent ( EntityBrowserPanel . class ) ; ScheduleWizard wizard = new ScheduleWizard ( "work" , job ) ; wizard . add ( AttributeModifier . append ( "class" , "wizardScheduler" ) ) ; panel . forwardWorkspace ( wizard , target ) ; //setResponsePage(new ScheduleWizardPage(job)); } catch ( Exception e ) { e . printStackTrace ( ) ; } }
@ Override public SequenceView < C > getSubSequence ( Integer start , Integer end ) { return new SequenceProxyView < C > ( this , start , end ) ; }
public static boolean [ ] distinct ( final boolean [ ] a , final int fromIndex , final int toIndex ) { return N . removeDuplicates ( a , fromIndex , toIndex , false ) ; }
static SymbolTable newSharedSymbolTable ( IonReader reader , boolean isOnStruct ) { if ( ! isOnStruct ) { IonType t = reader . next ( ) ; if ( t != IonType . STRUCT ) { throw new IonException ( "invalid symbol table image passed " + "into reader, " + t + " encountered when a " + "struct was expected" ) ; } } String name = null ; int version = - 1 ; List < String > symbolsList = new ArrayList < String > ( ) ; reader . stepIn ( ) ; IonType fieldType = null ; while ( ( fieldType = reader . next ( ) ) != null ) { if ( reader . isNullValue ( ) ) continue ; SymbolToken symTok = reader . getFieldNameSymbol ( ) ; int sid = symTok . getSid ( ) ; if ( sid == SymbolTable . UNKNOWN_SYMBOL_ID ) { // This is a user-defined IonReader or a pure DOM, fall // back to text final String fieldName = reader . getFieldName ( ) ; sid = getSidForSymbolTableField ( fieldName ) ; } // TODO amzn/ion-java/issues/35 If there's more than one 'symbols' or 'imports' // field, they will be merged together. // TODO amzn/ion-java/issues/36 Switching over SIDs doesn't cover the case // where the relevant field names are defined by a prev LST; // the prev LST could have 'symbols' defined locally with a // different SID! switch ( sid ) { case VERSION_SID : if ( fieldType == IonType . INT ) { version = reader . intValue ( ) ; } break ; case NAME_SID : if ( fieldType == IonType . STRING ) { name = reader . stringValue ( ) ; } break ; case SYMBOLS_SID : // As per the Spec, other field types are treated as // empty lists if ( fieldType == IonType . LIST ) { reader . stepIn ( ) ; { IonType t ; while ( ( t = reader . next ( ) ) != null ) { String text = null ; if ( t == IonType . STRING && ! reader . isNullValue ( ) ) { // As per the Spec, if any element of // the list is the empty string or any // other type, treat it as null text = reader . stringValue ( ) ; if ( text . length ( ) == 0 ) text = null ; } symbolsList . add ( text ) ; } } reader . stepOut ( ) ; } break ; default : break ; } } reader . stepOut ( ) ; if ( name == null || name . length ( ) == 0 ) { String message = "shared symbol table is malformed: field 'name' " + "must be a non-empty string." ; throw new IonException ( message ) ; } // As per the Spec, if 'version' field is missing or not at // least 1, treat it as 1. version = ( version < 1 ) ? 1 : version ; Map < String , Integer > symbolsMap = null ; if ( ! symbolsList . isEmpty ( ) ) { symbolsMap = new HashMap < String , Integer > ( ) ; transferNonExistingSymbols ( symbolsList , symbolsMap ) ; } else { // Empty Map is more efficient than an empty HashMap symbolsMap = Collections . emptyMap ( ) ; } // We have all necessary data, pass it over to the private constructor. return new SharedSymbolTable ( name , version , symbolsList , symbolsMap ) ; }
public void contentsToStringBuilder ( StringBuilder sb ) { if ( mResultString != null ) { sb . append ( mResultString ) ; } else if ( mResultArray != null ) { sb . append ( mResultArray ) ; } else if ( mInputStart >= 0 ) { // shared array if ( mInputLen > 0 ) { sb . append ( mInputBuffer , mInputStart , mInputLen ) ; } } else { // First stored segments if ( mSegments != null ) { for ( int i = 0 , len = mSegments . size ( ) ; i < len ; ++ i ) { char [ ] curr = mSegments . get ( i ) ; sb . append ( curr , 0 , curr . length ) ; } } // And finally, current segment: sb . append ( mCurrentSegment , 0 , mCurrentSize ) ; } }
@ Override public String getPullRequestUrl ( Project project , Repository repository , PullRequest pullRequest ) { return configure . getWebAppBaseURL ( ) + "/git/" + project . getProjectKey ( ) + "/" + repository . getName ( ) + "/pullRequests/" + pullRequest . getNumber ( ) ; }
public Request < RegisterDefaultPatchBaselineRequest > marshall ( RegisterDefaultPatchBaselineRequest registerDefaultPatchBaselineRequest ) { if ( registerDefaultPatchBaselineRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { final ProtocolRequestMarshaller < RegisterDefaultPatchBaselineRequest > protocolMarshaller = protocolFactory . createProtocolMarshaller ( SDK_OPERATION_BINDING , registerDefaultPatchBaselineRequest ) ; protocolMarshaller . startMarshalling ( ) ; RegisterDefaultPatchBaselineRequestMarshaller . getInstance ( ) . marshall ( registerDefaultPatchBaselineRequest , protocolMarshaller ) ; return protocolMarshaller . finishMarshalling ( ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } }
@ Override public boolean open ( NodeEngine nodeEngine ) { NodeEngineImpl nei = ( NodeEngineImpl ) nodeEngine ; ss = nei . getSerializationService ( ) ; Address thisAddress = nei . getThisAddress ( ) ; InternalPartitionService ps = nei . getPartitionService ( ) ; Data data = ss . toData ( setName , StringAndPartitionAwarePartitioningStrategy . INSTANCE ) ; int partitionId = ps . getPartitionId ( data ) ; Address partitionOwner = ps . getPartitionOwner ( partitionId ) ; if ( partitionOwner == null ) { return false ; } if ( thisAddress . equals ( partitionOwner ) ) { SetService setService = nei . getService ( SetService . SERVICE_NAME ) ; SetContainer setContainer = setService . getOrCreateContainer ( setName , false ) ; List < CollectionItem > items = new ArrayList < CollectionItem > ( setContainer . getCollection ( ) ) ; iterator = items . iterator ( ) ; } return true ; }
public ParameterBuilder < T > type ( Class < T > value ) { if ( value == null || ! SUPPORTED_TYPES . contains ( value ) ) { throw new IllegalArgumentException ( "Invalid type: " + value ) ; } this . type = value ; return this ; }
public static String METHOD_1 ( byte [ ] array , String VAR_1 ) { assert array != null ; TYPE_1 buffer = new TYPE_1 ( ) ; for ( int i = 0 ; i < array . length ; i ++ ) { if ( i != 0 ) { buffer . append ( VAR_1 ) ; } if ( ( array [ i ] & INT_1 ) < INT_2 ) { buffer . append ( "0" ) ; } buffer . append ( TYPE_2 . toString ( array [ i ] & INT_1 , INT_3 ) ) ; } return buffer . toString ( ) ; }
public Object getDefaultValue ( ) { if ( defaultValue == null ) return null ; if ( "java.lang.String" . equals ( javaTypeName ) ) return defaultValue ; if ( "java.lang.Boolean" . equals ( javaTypeName ) ) return Boolean . valueOf ( defaultValue ) ; if ( "java.lang.Long" . equals ( javaTypeName ) ) return Long . parseLong ( defaultValue ) ; if ( "java.lang.Integer" . equals ( javaTypeName ) ) return Integer . parseInt ( defaultValue ) ; if ( "java.lang.Double" . equals ( javaTypeName ) ) return Double . parseDouble ( defaultValue ) ; if ( "java.lang.Float" . equals ( javaTypeName ) ) return Float . parseFloat ( defaultValue ) ; if ( "java.util.List" . equals ( javaTypeName ) ) return Collections . EMPTY_LIST ; if ( "java.util.Map" . equals ( javaTypeName ) ) return Collections . EMPTY_MAP ; throw new RuntimeException ( "Unable to convert " + javaName + " default value " + defaultValue + " to type " + javaTypeName ) ; }
ANNOTATION_1 public void METHOD_1 ( TYPE_1 VAR_1 , TYPE_2 VAR_2 , TYPE_3 VAR_3 ) throws TYPE_4 , TYPE_5 { if ( METHOD_2 ( ( TYPE_6 ) VAR_1 ) ) { TYPE_7 . METHOD_3 ( STRING_1 , VAR_1 ) ; ( ( TYPE_8 ) VAR_2 ) . METHOD_4 ( VAR_4 . METHOD_5 ( ) + STRING_2 ) ; } else if ( METHOD_6 ( ( TYPE_6 ) VAR_1 ) ) { TYPE_7 . info ( STRING_3 , VAR_1 ) ; String VAR_5 = STRING_4 ; String VAR_6 = METHOD_7 ( ) ; new TYPE_9 ( ( TYPE_6 ) VAR_1 , ( TYPE_8 ) VAR_2 , VAR_5 , VAR_6 ) . METHOD_8 ( ) ; } else { VAR_3 . METHOD_1 ( VAR_1 , VAR_2 ) ; } }
public Context merge ( final Vars vars , final Writer writer ) { return merge ( vars , new WriterOut ( writer , engine ) ) ; }
protected TYPE_1 METHOD_1 ( ) throws TYPE_2 { TYPE_1 buffer = METHOD_2 ( ) . METHOD_3 ( VAR_1 . VAR_2 ) ; int VAR_3 = buffer . VAR_4 ( ) ; if ( VAR_3 < INT_1 ) { throw new TYPE_3 ( STRING_1 ) ; } int VAR_5 = INT_2 * INT_3 ; TYPE_4 VAR_6 = null ; for ( int i = VAR_3 - INT_1 ; i > TYPE_5 . METHOD_4 ( 0 , VAR_3 - VAR_5 ) ; i -- ) { int VAR_7 = buffer . getInt ( i ) ; if ( VAR_7 == TYPE_4 . VAR_8 ) { TYPE_6 . position ( buffer , i + INT_4 ) ; VAR_6 = new TYPE_4 ( ) ; VAR_6 . METHOD_5 ( TYPE_6 . METHOD_6 ( buffer ) ) ; VAR_6 . METHOD_7 ( TYPE_6 . METHOD_6 ( buffer ) ) ; VAR_6 . METHOD_8 ( TYPE_6 . METHOD_6 ( buffer ) ) ; VAR_6 . METHOD_9 ( TYPE_6 . METHOD_6 ( buffer ) ) ; VAR_6 . METHOD_10 ( TYPE_6 . METHOD_11 ( buffer ) ) ; VAR_6 . METHOD_12 ( TYPE_6 . METHOD_11 ( buffer ) ) ; VAR_6 . METHOD_13 ( TYPE_6 . METHOD_6 ( buffer ) ) ; } } if ( VAR_6 == null ) { return null ; } int VAR_9 = INT_5 ; long VAR_10 = VAR_6 . METHOD_14 ( ) ; TYPE_6 . position ( buffer , VAR_10 - VAR_9 ) ; String VAR_11 = TYPE_6 . METHOD_15 ( buffer , VAR_9 ) ; if ( ! VAR_11 . equals ( VAR_12 . VAR_13 ) ) { return null ; } TYPE_6 . position ( buffer , VAR_10 - INT_6 ) ; int VAR_14 = TYPE_7 . METHOD_16 ( buffer . VAR_15 ( ) ) ; TYPE_6 . position ( buffer , VAR_10 - VAR_14 - INT_7 ) ; long VAR_16 = TYPE_7 . METHOD_17 ( buffer . VAR_15 ( ) ) ; if ( VAR_14 != VAR_16 ) { return null ; } return TYPE_6 . METHOD_18 ( buffer , VAR_14 - VAR_9 ) ; }
private TYPE_1 < ? extends String , ? extends TYPE_2 > METHOD_1 ( ) { TYPE_1 < String , TYPE_2 > VAR_1 = new TYPE_3 < > ( ) ; for ( String VAR_2 : VAR_3 ) { if ( TYPE_4 . METHOD_2 ( VAR_2 ) ) { for ( TYPE_5 < ? extends TYPE_6 < ? > > VAR_4 : METHOD_3 ( VAR_2 ) ) { METHOD_4 ( VAR_1 , VAR_2 , VAR_4 ) ; } } } return VAR_1 ; }
ANNOTATION_1 public TYPE_1 METHOD_1 ( TYPE_2 VAR_1 , TYPE_3 VAR_2 ) { if ( VAR_2 == null ) { return TYPE_1 . METHOD_2 ( VAR_3 , STRING_1 ) ; } return TYPE_1 . METHOD_3 ( ) ; }
void METHOD_1 ( ) { try { if ( ! VAR_1 ) { VAR_1 = true ; try { VAR_2 . METHOD_2 ( this ) ; } finally { try { VAR_3 . METHOD_3 ( ) ; } finally { if ( VAR_4 != null ) { VAR_4 . METHOD_3 ( ) ; } } } } } finally { VAR_5 . METHOD_4 ( STRING_1 + VAR_6 ) ; try { VAR_7 . METHOD_5 ( ) ; } catch ( TYPE_1 VAR_8 ) { VAR_5 . METHOD_6 ( VAR_8 ) ; } } }
public TYPE_1 METHOD_1 ( TYPE_2 < String , TYPE_3 > VAR_1 ) { return new TYPE_1 ( VAR_2 , parameters , VAR_3 , VAR_4 , VAR_5 , VAR_6 , VAR_1 ) ; }
public JobInput unmarshall ( JsonUnmarshallerContext context ) throws Exception { JobInput jobInput = new JobInput ( ) ; int originalDepth = context . getCurrentDepth ( ) ; String currentParentElement = context . getCurrentParentElement ( ) ; int targetDepth = originalDepth + 1 ; JsonToken token = context . getCurrentToken ( ) ; if ( token == null ) token = context . nextToken ( ) ; if ( token == VALUE_NULL ) { return null ; } while ( true ) { if ( token == null ) break ; if ( token == FIELD_NAME || token == START_OBJECT ) { if ( context . testExpression ( "Key" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setKey ( context . getUnmarshaller ( String . class ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "FrameRate" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setFrameRate ( context . getUnmarshaller ( String . class ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "Resolution" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setResolution ( context . getUnmarshaller ( String . class ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "AspectRatio" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setAspectRatio ( context . getUnmarshaller ( String . class ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "Interlaced" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setInterlaced ( context . getUnmarshaller ( String . class ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "Container" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setContainer ( context . getUnmarshaller ( String . class ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "Encryption" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setEncryption ( EncryptionJsonUnmarshaller . getInstance ( ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "TimeSpan" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setTimeSpan ( TimeSpanJsonUnmarshaller . getInstance ( ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "InputCaptions" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setInputCaptions ( InputCaptionsJsonUnmarshaller . getInstance ( ) . unmarshall ( context ) ) ; } if ( context . testExpression ( "DetectedProperties" , targetDepth ) ) { context . nextToken ( ) ; jobInput . setDetectedProperties ( DetectedPropertiesJsonUnmarshaller . getInstance ( ) . unmarshall ( context ) ) ; } } else if ( token == END_ARRAY || token == END_OBJECT ) { if ( context . getLastParsedParentElement ( ) == null || context . getLastParsedParentElement ( ) . equals ( currentParentElement ) ) { if ( context . getCurrentDepth ( ) <= originalDepth ) break ; } } token = context . nextToken ( ) ; } return jobInput ; }
public boolean sameAs ( Object obj ) { if ( obj == null ) { return false ; } if ( obj instanceof String || obj instanceof IdVersionString ) { if ( this . toString ( ) . equals ( obj . toString ( ) ) ) { return true ; } } return false ; }
private final String METHOD_1 ( int i ) { String VAR_1 ; int VAR_2 = VAR_3 + i ; if ( ( VAR_2 > 0 ) && ( VAR_2 < VAR_4 . METHOD_2 ( ) ) ) { VAR_1 = ( String ) VAR_4 . VAR_5 . METHOD_3 ( VAR_2 ) ; } else { VAR_1 = null ; } return VAR_1 ; }
void validatePackage ( EntityType entityType ) { Package pack = entityType . getPackage ( ) ; if ( pack != null && isSystemPackage ( pack ) && ! systemEntityTypeRegistry . hasSystemEntityType ( entityType . getId ( ) ) ) { throw new MolgenisValidationException ( new ConstraintViolation ( format ( "Adding entity [%s] to system package [%s] is not allowed" , entityType . getId ( ) , pack . getId ( ) ) ) ) ; } }
@ Override public boolean preHandle ( HttpServletRequest request , HttpServletResponse response , Object handler ) throws Exception { return false ; }
String [ ] getStations ( StationList stations , Double lon , Double lat , Double north , Double south , Double east , Double west ) { if ( lat != null && lon != null ) { // Pull nearest station StationList . Station nearest = stations . getNearest ( lon , lat ) ; if ( nearest == null ) { throw new UnsupportedOperationException ( "No stations " + "available to search for nearest." ) ; } return new String [ ] { nearest . getStid ( ) } ; } else if ( north != null && south != null && east != null && west != null ) { // Pull all stations within box List < StationList . Station > inBox = stations . getStations ( east , west , north , south ) ; List < String > stIds = new ArrayList <> ( inBox . size ( ) ) ; for ( StationList . Station s : inBox ) { stIds . add ( s . getStid ( ) ) ; } return stIds . toArray ( new String [ stIds . size ( ) ] ) ; } else { throw new UnsupportedOperationException ( "Either station, " + "a lat/lon point, or a box defined by north, " + "south, east, and west parameters must be provided." ) ; } }
@ Override public CommandResult execute ( CommandInvocation commandInvocation ) throws CommandException , InterruptedException { if ( print || arguments == null || arguments . size ( ) == 0 ) { String out = manager . printAllAliases ( ) ; if ( out != null && out . length ( ) > 0 ) commandInvocation . println ( out ) ; } else if ( arguments . size ( ) == 1 ) { String out = manager . parseAlias ( "alias " + arguments . get ( 0 ) ) ; if ( out != null && out . length ( ) > 0 ) commandInvocation . println ( out ) ; } return CommandResult . SUCCESS ; }
public static void main ( String [ ] args ) throws TYPE_1 , TYPE_2 , TYPE_3 { if ( args.length < 1 ) { throw new TYPE_4 ( STRING_1 ) ; } TYPE_5 path = TYPE_6 . get ( args [ 0 ] ) ; TYPE_7 VAR_1 = TYPE_8 . METHOD_1 ( path ) ; TYPE_9 < TYPE_10 > VAR_2 = VAR_1 . METHOD_2 ( ) . METHOD_3 ( ) . METHOD_4 ( ) ; for ( TYPE_10 VAR_3 : VAR_2 ) { VAR_4 . out . METHOD_5 ( STRING_2 , VAR_3 . getName ( ) , VAR_3 . METHOD_6 ( ) ) ; TYPE_9 < TYPE_11 > VAR_5 = VAR_3 . METHOD_7 ( ) ; for ( TYPE_11 VAR_6 : VAR_5 ) { TYPE_9 < TYPE_12 > VAR_7 = VAR_6 . METHOD_8 ( ) ; for ( TYPE_12 VAR_8 : VAR_7 ) { VAR_4 . out . METHOD_5 ( STRING_3 , VAR_8 . getId ( ) , VAR_8 . METHOD_9 ( ) , VAR_8 . METHOD_10 ( ) ) ; } } VAR_4 . out . METHOD_5 ( STRING_4 , VAR_3 . getName ( ) ) ; } VAR_4 . out . METHOD_11 ( ) ; }
public static TYPE_1 < TYPE_2 > METHOD_1 ( final TYPE_3 < TYPE_4 > VAR_1 , final String VAR_2 ) { return VAR_1 . METHOD_2 ( ) . METHOD_3 ( TYPE_2 : : METHOD_4 ) . METHOD_5 ( t - > t . VAR_3 ( VAR_2 ) ) . METHOD_6 ( ) ; }
public static void validateTableTrue ( boolean condition , String message , Object ... args ) { if ( ! condition ) { throw new AchillesInvalidTableException ( format ( message , args ) ) ; } }
ANNOTATION_1 ( STRING_1 ) ANNOTATION_2 protected void METHOD_1 ( final String target , final TYPE_1 VAR_1 , final TYPE_2 VAR_2 , final TYPE_3 response ) { final TYPE_4 VAR_3 = VAR_4 . get ( target ) ; final TYPE_5 VAR_5 = ( TYPE_5 ) VAR_6 . METHOD_2 ( VAR_3 ) ; final TYPE_6 < ? , ? > VAR_7 ; try { VAR_7 = METHOD_3 ( VAR_5 , VAR_1 , VAR_2 , response ) ; } catch ( TYPE_7 | TYPE_8 VAR_8 ) { VAR_9 . error ( String . format ( STRING_2 , target ) , VAR_8 ) ; METHOD_4 ( VAR_1 , response , VAR_8 ) ; return ; } try { VAR_5 . execute ( VAR_7 ) ; } catch ( final TYPE_9 VAR_8 ) { VAR_9 . error ( String . format ( STRING_3 , target , VAR_3 ) , VAR_8 ) ; METHOD_5 ( VAR_1 , response , VAR_8 ) ; } }
public static < TYPE_1 > TYPE_2 < TYPE_2 < TYPE_1 > > METHOD_1 ( final TYPE_2 < TYPE_1 > VAR_1 ) { final TYPE_3 < TYPE_1 > array = TYPE_4 . METHOD_1 ( VAR_1 ) ; TYPE_5 . METHOD_2 ( array . size ( ) <= INT_1 , STRING_1 ) ; return TYPE_6 . METHOD_1 ( TYPE_7 . get ( 1 < < array . size ( ) ) , new TYPE_8 < TYPE_9 , TYPE_2 < TYPE_1 > > ( ) { ANNOTATION_1 public TYPE_2 < TYPE_1 > METHOD_3 ( final TYPE_9 VAR_2 ) { return TYPE_6 . METHOD_1 ( TYPE_10 . METHOD_1 ( TYPE_7 . get ( array . size ( ) ) , new TYPE_11 < TYPE_9 > ( ) { ANNOTATION_1 public boolean METHOD_4 ( TYPE_9 index ) { return TYPE_12 . get ( VAR_2 , index ) ; } } ) , new TYPE_8 < TYPE_9 , TYPE_1 > ( ) { ANNOTATION_1 public TYPE_1 METHOD_3 ( TYPE_9 index ) { return array . get ( index ) ; } } ) ; } } ) ; }
public Formatter format ( Locale l , String format , Object ... args ) { ensureOpen ( ) ; // index of last argument referenced int last = - 1 ; // last ordinary index int lasto = - 1 ; FormatString [ ] fsa = parse ( format ) ; for ( int i = 0 ; i < fsa . length ; i ++ ) { FormatString fs = fsa [ i ] ; int index = fs . index ( ) ; try { switch ( index ) { case - 2 : // fixed string, "%n", or "%%" fs . print ( null , l ) ; break ; case - 1 : // relative index if ( last < 0 || ( args != null && last > args . length - 1 ) ) throw new MissingFormatArgumentException ( fs . toString ( ) ) ; fs . print ( ( args == null ? null : args [ last ] ) , l ) ; break ; case 0 : // ordinary index lasto ++ ; last = lasto ; if ( args != null && lasto > args . length - 1 ) throw new MissingFormatArgumentException ( fs . toString ( ) ) ; fs . print ( ( args == null ? null : args [ lasto ] ) , l ) ; break ; default : // explicit index last = index - 1 ; if ( args != null && last > args . length - 1 ) throw new MissingFormatArgumentException ( fs . toString ( ) ) ; fs . print ( ( args == null ? null : args [ last ] ) , l ) ; break ; } } catch ( IOException x ) { lastException = x ; } } return this ; }
public void METHOD_1 ( String id , TYPE_1 VAR_1 ) { String start = TYPE_2 . METHOD_2 ( VAR_1 . METHOD_3 ( ) . METHOD_4 ( ) ) ; String end = TYPE_2 . METHOD_2 ( VAR_1 . METHOD_5 ( ) . METHOD_4 ( ) ) ; VAR_2 . METHOD_6 ( VAR_3 + id , start + STRING_1 + end ) ; }
public TYPE_1 METHOD_1 ( TYPE_2 VAR_1 , TYPE_2 VAR_2 ) { String VAR_3 = this . VAR_4 ; TYPE_3 VAR_5 = TYPE_3 . METHOD_2 ( ) . setValue ( VAR_3 ) . build ( ) ; TYPE_1 . VAR_6 VAR_7 = TYPE_1 . METHOD_2 ( ) . METHOD_3 ( VAR_8 . name ) . METHOD_4 ( VAR_5 ) ; TYPE_4 . VAR_6 VAR_9 = TYPE_4 . METHOD_2 ( ) ; TYPE_5 < String > VAR_10 = new TYPE_6 < > ( ) ; for ( TYPE_4 . VAR_11 VAR_12 : VAR_9 . METHOD_5 ( ) ) { VAR_10 . add ( VAR_12 . getName ( ) ) ; } for ( TYPE_7 VAR_13 : VAR_8 . VAR_14 ) { VAR_9 . METHOD_6 ( TYPE_4 . VAR_11 . METHOD_2 ( ) . METHOD_3 ( VAR_13 . name ) . setValue ( VAR_13 . value ) ) ; } VAR_7 . METHOD_7 ( METHOD_8 ( VAR_15 . VAR_16 , VAR_8 . VAR_17 ) ) . METHOD_7 ( METHOD_8 ( VAR_15 . VAR_18 , VAR_8 . VAR_19 ) ) . METHOD_7 ( METHOD_8 ( VAR_15 . VAR_20 , VAR_8 . VAR_21 ) ) . METHOD_7 ( METHOD_9 ( VAR_15 . VAR_22 , this . VAR_23 . get ( 0 ) , this . VAR_23 . get ( this . VAR_23 . size ( ) - 1 ) ) ) . METHOD_10 ( this . VAR_24 . METHOD_11 ( ) ) ; int VAR_25 = TYPE_8 . METHOD_12 ( VAR_3 ) ; String VAR_26 = METHOD_13 ( VAR_1 , VAR_2 , VAR_25 ) ; TYPE_9 . VAR_6 VAR_27 = TYPE_9 . METHOD_2 ( ) ; TYPE_10 < TYPE_9 . VAR_28 > VAR_29 = new TYPE_11 < > ( ) ; for ( String VAR_30 : VAR_8 . VAR_31 ) { VAR_29 . add ( TYPE_9 . VAR_28 . METHOD_2 ( ) . setValue ( VAR_30 ) . METHOD_14 ( true ) . build ( ) ) ; } VAR_27 . setValue ( VAR_26 ) . METHOD_15 ( VAR_8 . VAR_32 ) . METHOD_16 ( VAR_9 ) . METHOD_17 ( VAR_29 ) ; if ( ! VAR_8 . VAR_33 . isEmpty ( ) ) { VAR_27 . METHOD_18 ( VAR_8 . VAR_33 ) ; } VAR_7 . METHOD_19 ( VAR_27 ) ; return VAR_7 . build ( ) ; }
public void write ( final String resourceName , final byte [ ] clazzData ) { try { this . javaDialectRuntimeData . write ( resourceName , clazzData ) ; } catch ( final Exception e ) { e . printStackTrace ( ) ; this . errors . add ( new JavaDialectError ( "PackageStore was unable to write resourceName='" + resourceName + "'" ) ) ; } }
public void putCustom ( String key , Object value ) { if ( custom == null ) { custom = new JSONObject ( ) ; try { report . put ( "custom" , custom ) ; } catch ( JSONException ex ) { ex . printStackTrace ( ) ; // not expected } } try { custom . put ( key , value ) ; } catch ( JSONException ex ) { try { // should only happen if value is infinite or NaN custom . put ( key , String . valueOf ( value ) ) ; } catch ( JSONException ex1 ) { ex1 . printStackTrace ( ) ; // not expected } } }
public synchronized int getPGArrayElement ( int oid ) throws SQLException { if ( oid == Oid . UNSPECIFIED ) { return Oid . UNSPECIFIED ; } Integer pgType = pgArrayToPgType . get ( oid ) ; if ( pgType != null ) { return pgType ; } if ( getArrayElementOidStatement == null ) { String sql ; sql = "SELECT e.oid, n.nspname = ANY(current_schemas(true)), n.nspname, e.typname " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_type e ON t.typelem = e.oid " + "JOIN pg_catalog.pg_namespace n ON t.typnamespace = n.oid WHERE t.oid = ?" ; getArrayElementOidStatement = conn . prepareStatement ( sql ) ; } getArrayElementOidStatement . setInt ( 1 , oid ) ; // Go through BaseStatement to avoid transaction start. if ( ! ( ( BaseStatement ) getArrayElementOidStatement ) . executeWithFlags ( QueryExecutor . QUERY_SUPPRESS_BEGIN ) ) { throw new PSQLException ( GT . tr ( "No results were returned by the query." ) , PSQLState . NO_DATA ) ; } ResultSet rs = getArrayElementOidStatement . getResultSet ( ) ; if ( ! rs . next ( ) ) { throw new PSQLException ( GT . tr ( "No results were returned by the query." ) , PSQLState . NO_DATA ) ; } pgType = ( int ) rs . getLong ( 1 ) ; boolean onPath = rs . getBoolean ( 2 ) ; String schema = rs . getString ( 3 ) ; String name = rs . getString ( 4 ) ; pgArrayToPgType . put ( oid , pgType ) ; pgNameToOid . put ( schema + "." + name , pgType ) ; String fullName = "\"" + schema + "\".\"" + name + "\"" ; pgNameToOid . put ( fullName , pgType ) ; if ( onPath && name . equals ( name . toLowerCase ( ) ) ) { oidToPgName . put ( pgType , name ) ; pgNameToOid . put ( name , pgType ) ; } else { oidToPgName . put ( pgType , fullName ) ; } rs . close ( ) ; return pgType ; }
public static MozuClient < com . mozu . api . contracts . commerceruntime . orders . OrderNote > getOrderNoteClient ( String orderId , String noteId ) throws Exception { return getOrderNoteClient ( orderId , noteId , null ) ; }
@ Override protected boolean execute ( OutlineScenarioRowNode node , RootNodeExecutionContext context ) { return basicScenarioNodeRunner . run ( node . getBasicScenarioNode ( ) , context ) ; }
private void doUpdateDatasource ( ) throws PageException { int allow = ( getBoolV ( "allowed_select" , false ) ? DataSource . ALLOW_SELECT : 0 ) + ( getBoolV ( "allowed_insert" , false ) ? DataSource . ALLOW_INSERT : 0 ) + ( getBoolV ( "allowed_update" , false ) ? DataSource . ALLOW_UPDATE : 0 ) + ( getBoolV ( "allowed_delete" , false ) ? DataSource . ALLOW_DELETE : 0 ) + ( getBoolV ( "allowed_alter" , false ) ? DataSource . ALLOW_ALTER : 0 ) + ( getBoolV ( "allowed_drop" , false ) ? DataSource . ALLOW_DROP : 0 ) + ( getBoolV ( "allowed_revoke" , false ) ? DataSource . ALLOW_REVOKE : 0 ) + ( getBoolV ( "allowed_grant" , false ) ? DataSource . ALLOW_GRANT : 0 ) + ( getBoolV ( "allowed_create" , false ) ? DataSource . ALLOW_CREATE : 0 ) ; if ( allow == 0 ) allow = DataSource . ALLOW_ALL ; String cn = getString ( "admin" , action , "classname" ) ; if ( "com.microsoft.jdbc.sqlserver.SQLServerDriver" . equals ( cn ) ) { cn = "com.microsoft.sqlserver.jdbc.SQLServerDriver" ; } ClassDefinition cd = new ClassDefinitionImpl ( cn , getString ( "bundleName" , null ) , getString ( "bundleVersion" , null ) , config . getIdentification ( ) ) ; // customParameterSyntax Struct sct = getStruct ( "customParameterSyntax" , null ) ; ParamSyntax ps = ( sct != null && sct . containsKey ( "delimiter" ) && sct . containsKey ( "separator" ) ) ? ParamSyntax . toParamSyntax ( sct ) : ParamSyntax . DEFAULT ; // boolean literalTimestampWithTSOffset = getBoolV ( "literalTimestampWithTSOffset" , false ) ; boolean alwaysSetTimeout = getBoolV ( "alwaysSetTimeout" , false ) ; String id = getString ( "id" , null ) ; String dsn = getString ( "admin" , action , "dsn" ) ; String name = getString ( "admin" , action , "name" ) ; String newName = getString ( "admin" , action , "newName" ) ; String username = getString ( "admin" , action , "dbusername" ) ; String password = getString ( "admin" , action , "dbpassword" ) ; String host = getString ( "host" , "" ) ; String timezone = getString ( "timezone" , "" ) ; String database = getString ( "database" , "" ) ; int port = getInt ( "port" , - 1 ) ; int connLimit = getInt ( "connectionLimit" , - 1 ) ; int connTimeout = getInt ( "connectionTimeout" , - 1 ) ; long metaCacheTimeout = getLong ( "metaCacheTimeout" , 60000 ) ; boolean blob = getBoolV ( "blob" , false ) ; boolean clob = getBoolV ( "clob" , false ) ; boolean validate = getBoolV ( "validate" , false ) ; boolean storage = getBoolV ( "storage" , false ) ; boolean verify = getBoolV ( "verify" , true ) ; Struct custom = getStruct ( "custom" , new StructImpl ( ) ) ; String dbdriver = getString ( "dbdriver" , "" ) ; // config.getDatasourceConnectionPool().remove(name); DataSource ds = null ; try { ds = new DataSourceImpl ( config , name , cd , host , dsn , database , port , username , password , null , connLimit , connTimeout , metaCacheTimeout , blob , clob , allow , custom , false , validate , storage , null , dbdriver , ps , literalTimestampWithTSOffset , alwaysSetTimeout , config . getLog ( "application" ) ) ; } catch ( Exception e ) { throw Caster . toPageException ( e ) ; } if ( verify ) _doVerifyDatasource ( ds , username , password ) ; // print.out("limit:"+connLimit); admin . updateDataSource ( id , name , newName , cd , dsn , username , password , host , database , port , connLimit , connTimeout , metaCacheTimeout , blob , clob , allow , validate , storage , timezone , custom , dbdriver , ps , literalTimestampWithTSOffset , alwaysSetTimeout ) ; store ( ) ; adminSync . broadcast ( attributes , config ) ; }
@ Override public List < SqlRow > findList ( SqlQuery query , Transaction transaction ) { return delegate . extended ( ) . findList ( query , transaction ) ; }
ANNOTATION_1 public TYPE_1 METHOD_1 ( final String operation , final String VAR_1 , final String ... keys ) throws TYPE_2 { TYPE_3 op = null ; if ( operation . toLowerCase ( ) . equals ( STRING_1 ) ) { op = TYPE_3 . VAR_2 ; } else if ( operation . toLowerCase ( ) . equals ( STRING_2 ) ) { op = TYPE_3 . VAR_3 ; } else if ( operation . toLowerCase ( ) . equals ( STRING_3 ) ) { op = TYPE_3 . VAR_4 ; } else if ( operation . toLowerCase ( ) . equals ( STRING_4 ) ) { op = TYPE_3 . VAR_5 ; } TYPE_4 VAR_6 = METHOD_2 ( STRING_5 , op , VAR_1 , keys ) ; if ( VAR_6 instanceof TYPE_2 ) { throw ( TYPE_2 ) VAR_6 ; } return ( TYPE_1 ) VAR_6 ; }
@ Nonnull public final UnifiedResponse setContent ( @ Nonnull final byte [ ] aContent ) { ValueEnforcer . notNull ( aContent , "Content" ) ; return setContent ( aContent , 0 , aContent . length ) ; }
public static void main ( final String [ ] args ) throws TYPE_1 { final String VAR_1 = STRING_1 ; final int VAR_2 = INT_1 ; final String VAR_3 = STRING_2 ; final TYPE_2 VAR_4 = new TYPE_2 ( ) ; final TYPE_3 VAR_5 = new TYPE_3 ( VAR_4 , VAR_1 , VAR_2 ) ; final TYPE_4 VAR_6 = new TYPE_4 ( VAR_5 ) ; new TYPE_5 ( new TYPE_6 ( ) { ANNOTATION_1 public void METHOD_1 ( ) { try { TYPE_5 . METHOD_2 ( INT_2 ) ; VAR_7 . VAR_8 . METHOD_3 ( STRING_3 ) ; VAR_6 . METHOD_4 ( ) ; } catch ( final TYPE_1 VAR_9 ) { VAR_9 . METHOD_5 ( ) ; } } } ) . start ( ) ; VAR_5 . METHOD_1 ( ) ; }
public CodeableConcept getNotDoneReason ( ) { if ( this . notDoneReason == null ) if ( Configuration . errorOnAutoCreate ( ) ) throw new Error ( "Attempt to auto-create Communication.notDoneReason" ) ; else if ( Configuration . doAutoCreate ( ) ) this . notDoneReason = new CodeableConcept ( ) ; // cc return this . notDoneReason ; }
public static String METHOD_1 ( String VAR_1 ) { if ( ! METHOD_2 ( VAR_1 ) ) { throw new TYPE_1 ( format ( STRING_1 , VAR_1 , VAR_2 ) ) ; } return VAR_1 ; }
public static String METHOD_1 ( String VAR_1 ) { if ( VAR_1 == null ) return null ; if ( TYPE_1 . METHOD_2 ( VAR_1 . METHOD_3 ( 0 ) ) ) VAR_1 = STRING_1 + VAR_1 ; try { return TYPE_2 . METHOD_4 ( VAR_1 , STRING_2 ) . replaceAll ( STRING_3 , STRING_4 ) ; } catch ( TYPE_3 VAR_2 ) { TYPE_4 . METHOD_5 ( ) . error ( null , STRING_5 , VAR_2 , STRING_2 ) ; return VAR_1 ; } }
public String METHOD_1 ( ) throws TYPE_1 { TYPE_2 response = new TYPE_2 ( ) ; METHOD_2 ( response , VAR_1 . METHOD_3 ( ) ) ; return response . toString ( ) ; }
public TouchActions singleTap ( WebElement onElement ) { if ( touchScreen != null ) { action . addAction ( new SingleTapAction ( touchScreen , ( Locatable ) onElement ) ) ; } tick ( touchPointer . createPointerDown ( 0 ) ) ; tick ( touchPointer . createPointerUp ( 0 ) ) ; return this ; }
ANNOTATION_1 public void METHOD_1 ( final int VAR_1 , final String type ) { buf . VAR_2 ( 0 ) ; buf . append ( name ) . append ( STRING_1 ) . append ( VAR_3 [ VAR_1 ] ) . append ( STRING_2 ) ; METHOD_2 ( type ) ; buf . append ( STRING_3 ) ; text . add ( buf . toString ( ) ) ; }
private Iterator < Iterable < Row > > migrationScan ( DeltaPlacement placement , ByteBufferRange keyRange , ReadConsistency consistency ) { ByteBuffer startToken = keyRange . getStart ( ) ; ByteBuffer endToken = keyRange . getEnd ( ) ; // Note: if Cassandra is asked to perform a token range query where start >= end it will wrap // around which is absolutely *not* what we want. checkArgument ( AstyanaxStorage . compareKeys ( startToken , endToken ) < 0 , "Cannot migrate rows which loop from maximum- to minimum-token" ) ; TableDDL tableDDL = placement . getDeltaTableDDL ( ) ; // Our query needs to be inclusive on both sides so that we ensure that we get all records in the event of a re-split Statement statement = selectFrom ( tableDDL ) . where ( gte ( token ( tableDDL . getRowKeyColumnName ( ) ) , startToken ) ) . and ( lte ( token ( tableDDL . getRowKeyColumnName ( ) ) , endToken ) ) . setConsistencyLevel ( SorConsistencies . toCql ( consistency ) ) ; return deltaQueryAsync ( placement , statement , false , "Failed to scan (for migration) token range [%s, %s] for %s" , ByteBufferUtil . bytesToHex ( startToken ) , ByteBufferUtil . bytesToHex ( endToken ) , "multiple tables" ) ; }
protected void buildList ( ) { listParams = new LinkedList <> ( ) ; listParams . add ( renderAllFaces ) ; listParams . add ( useBlockBounds ) ; listParams . add ( renderBounds ) ; listParams . add ( useCustomTexture ) ; listParams . add ( applyTexture ) ; listParams . add ( icon ) ; listParams . add ( iconProvider ) ; listParams . add ( useWorldSensitiveIcon ) ; listParams . add ( useTexture ) ; listParams . add ( interpolateUV ) ; listParams . add ( rotateIcon ) ; listParams . add ( calculateAOColor ) ; listParams . add ( calculateBrightness ) ; listParams . add ( usePerVertexColor ) ; listParams . add ( usePerVertexAlpha ) ; listParams . add ( usePerVertexBrightness ) ; listParams . add ( useEnvironmentBrightness ) ; listParams . add ( useNormals ) ; listParams . add ( colorMultiplier ) ; listParams . add ( colorFactor ) ; listParams . add ( brightness ) ; listParams . add ( alpha ) ; listParams . add ( direction ) ; listParams . add ( textureSide ) ; listParams . add ( aoMatrix ) ; listParams . add ( flipU ) ; listParams . add ( flipV ) ; listParams . add ( deductParameters ) ; }
public Map < String , String > toKeyValueMap ( ) { Map < String , String > valueMap = new HashMap < String , String > ( ) ; String [ ] tokens ; if ( selectorString . contains ( " AND" ) ) { String [ ] chunks = selectorString . split ( " AND" ) ; for ( String chunk : chunks ) { tokens = escapeEqualsFromXpathNodeTest ( chunk ) . split ( "=" ) ; valueMap . put ( unescapeEqualsFromXpathNodeTest ( tokens [ 0 ] . trim ( ) ) , tokens [ 1 ] . trim ( ) . substring ( 1 , tokens [ 1 ] . trim ( ) . length ( ) - 1 ) ) ; } } else { tokens = escapeEqualsFromXpathNodeTest ( selectorString ) . split ( "=" ) ; valueMap . put ( unescapeEqualsFromXpathNodeTest ( tokens [ 0 ] . trim ( ) ) , tokens [ 1 ] . trim ( ) . substring ( 1 , tokens [ 1 ] . trim ( ) . length ( ) - 1 ) ) ; } return valueMap ; }
Table ASSERTIONS ( ) { Table t = sysTables [ ASSERTIONS ] ; if ( t == null ) { t = createBlankTable ( sysTableHsqlNames [ ASSERTIONS ] ) ; addColumn ( t , "CONSTRAINT_CATALOG" , SQL_IDENTIFIER ) ; addColumn ( t , "CONSTRAINT_SCHEMA" , SQL_IDENTIFIER ) ; addColumn ( t , "CONSTRAINT_NAME" , SQL_IDENTIFIER ) ; // not null addColumn ( t , "IS_DEFERRABLE" , YES_OR_NO ) ; addColumn ( t , "INITIALLY_DEFERRED" , YES_OR_NO ) ; HsqlName name = HsqlNameManager . newInfoSchemaObjectName ( sysTableHsqlNames [ ASSERTIONS ] . name , false , SchemaObject . INDEX ) ; t . createPrimaryKey ( name , new int [ ] { 0 , 1 , 2 } , false ) ; return t ; } final int constraint_catalog = 0 ; final int constraint_schema = 1 ; final int constraint_name = 2 ; final int is_deferrable = 3 ; final int initially_deferred = 4 ; return t ; }
ANNOTATION_1 public TYPE_1 METHOD_1 ( ANNOTATION_2 final String text ) { this . text = METHOD_2 ( this . text , STRING_1 ) + text ; return this ; }
ANNOTATION_1 ( "unchecked" ) protected void METHOD_1 ( TYPE_1 element , String key , TYPE_2 < String , String > VAR_1 ) { for ( TYPE_3 < TYPE_4 > VAR_2 = element . VAR_3 ( ) ; VAR_2 . METHOD_2 ( ) ; ) { TYPE_4 node = VAR_2 . METHOD_3 ( ) ; if ( node instanceof TYPE_1 ) { TYPE_5 VAR_4 = new TYPE_5 ( key ) ; if ( VAR_4 . length ( ) > 0 ) { VAR_4 . append ( STRING_1 ) ; } VAR_4 . append ( node . getName ( ) ) ; if ( TYPE_6 . isEmpty ( VAR_5 ) || ! VAR_5 . contains ( VAR_4 . toString ( ) ) ) { METHOD_1 ( ( TYPE_1 ) node , VAR_4 . toString ( ) , VAR_1 ) ; } } else { String value = node . getText ( ) ; if ( TYPE_7 . METHOD_4 ( value ) && METHOD_5 ( key ) ) { if ( VAR_6 . METHOD_6 ( ) ) { VAR_6 . METHOD_7 ( String . format ( STRING_2 , value , key ) ) ; } VAR_1 . add ( key , TYPE_7 . trim ( value ) ) ; } } } }
public static TYPE_1 METHOD_1 ( String to , String VAR_1 , String text ) { return METHOD_1 ( new String [ ] { to } , VAR_1 , text ) ; }
@ Override public List < TargetWrapperDescriptor > listPossibleTargets ( AbstractApplication app ) { // Find the matching targets based on registered hints String key = new InstanceContext ( app ) . toString ( ) ; String tplKey = null ; if ( app instanceof Application ) tplKey = new InstanceContext ( ( ( Application ) app ) . getTemplate ( ) ) . toString ( ) ; List < File > targetDirectories = new ArrayList <> ( ) ; File dir = new File ( this . configurationMngr . getWorkingDirectory ( ) , ConfigurationUtils . TARGETS ) ; for ( File f : Utils . listDirectories ( dir ) ) { // If there is no hint for this target, then it is global. // We can list it. File hintsFile = new File ( f , TARGETS_HINTS_FILE ) ; if ( ! hintsFile . exists ( ) ) { targetDirectories . add ( f ) ; continue ; } // Otherwise, the key must exist in the file Properties props = Utils . readPropertiesFileQuietly ( hintsFile , this . logger ) ; if ( props . containsKey ( key ) ) targetDirectories . add ( f ) ; else if ( tplKey != null && props . containsKey ( tplKey ) ) targetDirectories . add ( f ) ; } // Build the result return buildList ( targetDirectories , app ) ; }
@ Override public void setAlbumImage ( byte [ ] albumImage , String mimeType , byte imageType , String imageDescription ) { if ( albumImage != null && albumImage . length > 0 && mimeType != null && mimeType . length ( ) > 0 ) { invalidateDataLength ( ) ; ID3v2PictureFrameData frameData = new ID3v2PictureFrameData ( useFrameUnsynchronisation ( ) , mimeType , imageType , null == imageDescription ? null : new EncodedText ( imageDescription ) , albumImage ) ; addFrame ( createFrame ( ID_IMAGE , frameData . toBytes ( ) ) , true ) ; } }
protected void checkTopicPartition ( TopicPartition topicPartition , boolean forceUpload ) throws Exception { boolean shouldUpload ; if ( mDeterministicUploadPolicyTracker != null ) { shouldUpload = mDeterministicUploadPolicyTracker . shouldUpload ( topicPartition ) ; } else { final long size = mFileRegistry . getSize ( topicPartition ) ; final long modificationAgeSec = mFileRegistry . getModificationAgeSec ( topicPartition ) ; LOG . debug ( "size: " + size + " modificationAge: " + modificationAgeSec ) ; shouldUpload = forceUpload || size >= mConfig . getMaxFileSizeBytes ( ) || modificationAgeSec >= mConfig . getMaxFileAgeSeconds ( ) || isRequiredToUploadAtTime ( topicPartition ) ; } if ( shouldUpload ) { long newOffsetCount = mZookeeperConnector . getCommittedOffsetCount ( topicPartition ) ; long oldOffsetCount = mOffsetTracker . setCommittedOffsetCount ( topicPartition , newOffsetCount ) ; long lastSeenOffset = mOffsetTracker . getLastSeenOffset ( topicPartition ) ; if ( oldOffsetCount == newOffsetCount ) { LOG . debug ( "Uploading for: " + topicPartition ) ; uploadFiles ( topicPartition ) ; } else if ( newOffsetCount > lastSeenOffset ) { // && oldOffset < newOffset LOG . debug ( "last seen offset {} is lower than committed offset count {}. Deleting files in topic {} partition {}" , lastSeenOffset , newOffsetCount , topicPartition . getTopic ( ) , topicPartition . getPartition ( ) ) ; // There was a rebalancing event and someone committed an offset beyond that of the // current message. We need to delete the local file. mFileRegistry . deleteTopicPartition ( topicPartition ) ; if ( mDeterministicUploadPolicyTracker != null ) { mDeterministicUploadPolicyTracker . reset ( topicPartition ) ; } } else { // oldOffsetCount < newOffsetCount <= lastSeenOffset LOG . debug ( "previous committed offset count {} is lower than committed offset {} is lower than or equal to last seen offset {}. " + "Trimming files in topic {} partition {}" , oldOffsetCount , newOffsetCount , lastSeenOffset , topicPartition . getTopic ( ) , topicPartition . getPartition ( ) ) ; // There was a rebalancing event and someone committed an offset lower than that // of the current message. We need to trim local files. trimFiles ( topicPartition , newOffsetCount ) ; // We might still be at the right place to upload. (In fact, we always trim the first time // we hit the upload condition because oldOffsetCount starts at -1, but this is usually a no-op trim.) // Check again! This is especially important if this was an "upload in graceful shutdown". checkTopicPartition ( topicPartition , forceUpload ) ; } } }
protected ThreadFactory getThreadFactory ( ) { return new ThreadFactory ( ) { private final ThreadFactory parent = Executors . defaultThreadFactory ( ) ; private final AtomicInteger tid = new AtomicInteger ( 1 ) ; @ Override public Thread newThread ( final Runnable task ) { final Thread t = parent . newThread ( task ) ; t . setName ( THREAD_PREFIX + tid . getAndIncrement ( ) ) ; return t ; } } ; }
public static String getMappedTypeSize ( FieldInfo field , int order , FieldType type , boolean isList , boolean debug , File path ) { String fieldName = getFieldName ( order ) ; String spath = "null" ; if ( path != null ) { spath = "new java.io.File(\"" + path . getAbsolutePath ( ) . replace ( ' ' , ' ' ) + "\")" ; } if ( isList ) { String typeString = type . getType ( ) . toUpperCase ( ) ; return "CodedConstant.computeListSize(" + order + "," + fieldName + ", FieldType." + typeString + "," + Boolean . valueOf ( debug ) + "," + spath + ");\n" ; } if ( type == FieldType . OBJECT ) { String typeString = type . getType ( ) . toUpperCase ( ) ; return "CodedConstant.computeSize(" + order + "," + fieldName + ", FieldType." + typeString + "," + Boolean . valueOf ( debug ) + "," + spath + ");\n" ; } String t = type . getType ( ) ; if ( type == FieldType . STRING || type == FieldType . BYTES ) { t = "bytes" ; } t = capitalize ( t ) ; boolean enumSpecial = false ; if ( type == FieldType . ENUM ) { if ( EnumReadable . class . isAssignableFrom ( field . getField ( ) . getType ( ) ) ) { String clsName = field . getField ( ) . getType ( ) . getName ( ) . replaceAll ( "\\$" , "." ) ; fieldName = "((" + clsName + ") " + fieldName + ").value()" ; enumSpecial = true ; } } if ( ! enumSpecial ) { fieldName = fieldName + type . getToPrimitiveType ( ) ; } return "com.google.protobuf.CodedOutputStream.compute" + t + "Size(" + order + "," + fieldName + ");\n" ; }
@ Override protected boolean isEarlyExit ( XExpression expr ) { // This function is redefined in order to take care about the SARL early exit statements // that are not Java early exit statements. // In this case, a Java "return" statement must be applied. if ( this . isOnJavaEarlyExit ) { this . isOnJavaEarlyExit = false ; return this . earlyExit . isEarlyExitInJava ( expr ) ; } return this . earlyExit . isEarlyExit ( expr ) ; }
public static DictionaryMaker load ( String path ) { DictionaryMaker dictionaryMaker = new DictionaryMaker ( ) ; dictionaryMaker . addAll ( DictionaryMaker . loadAsItemList ( path ) ) ; return dictionaryMaker ; }
private static void raw ( CCEXTradeServiceRaw tradeService ) throws IOException { CurrencyPair pair = new CurrencyPair ( "DASH" , "BTC" ) ; LimitOrder limitOrder = new LimitOrder . Builder ( OrderType . BID , pair ) . limitPrice ( new BigDecimal ( "0.00001000" ) ) . originalAmount ( new BigDecimal ( "100" ) ) . build ( ) ; try { String uuid = tradeService . placeCCEXLimitOrder ( limitOrder ) ; System . out . println ( "Order successfully placed. ID=" + uuid ) ; Thread . sleep ( 7000 ) ; // wait for order to propagate System . out . println ( ) ; System . out . println ( tradeService . getCCEXOpenOrders ( ) ) ; System . out . println ( "Attempting to cancel order " + uuid ) ; boolean cancelled = tradeService . cancelCCEXLimitOrder ( uuid ) ; if ( cancelled ) { System . out . println ( "Order successfully canceled." ) ; } else { System . out . println ( "Order not successfully canceled." ) ; } Thread . sleep ( 7000 ) ; // wait for cancellation to propagate System . out . println ( ) ; System . out . println ( tradeService . getCCEXOpenOrders ( ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } }
@ Override public SBindIR caseASetBind ( ASetBind node , IRInfo question ) throws AnalysisException { PPattern pattern = node . getPattern ( ) ; SPatternIR patternCg = pattern . apply ( question . getPatternVisitor ( ) , question ) ; PExp set = node . getSet ( ) ; SExpIR setCg = set . apply ( question . getExpVisitor ( ) , question ) ; ASetBindIR setBind = new ASetBindIR ( ) ; setBind . setPattern ( patternCg ) ; setBind . setSet ( setCg ) ; return setBind ; }
private static CmsPropertyResourceComparator create ( CmsResource resource , CmsObject cms , String property ) { CmsPropertyResourceComparator result = new CmsPropertyResourceComparator ( null , null , false ) ; result . init ( resource , cms , property ) ; return result ; }
public static boolean shouldUseGenericArrayData ( int elementSize , long length ) { final long headerInBytes = calculateHeaderPortionInBytes ( length ) ; final long valueRegionInBytes = elementSize * length ; final long totalSizeInLongs = ( headerInBytes + valueRegionInBytes + 7 ) / 8 ; return totalSizeInLongs > Integer . MAX_VALUE / 8 ; }
@ Override public PersistentReadWriteLockedOffHeapClockCache < K , V > newInstance ( ) { PersistentStorageEngine < ? super K , ? super V > storageEngine = storageEngineFactory . newInstance ( ) ; try { return new PersistentReadWriteLockedOffHeapClockCache <> ( tableSource , storageEngine , tableSize , bootstrap ) ; } catch ( RuntimeException e ) { storageEngine . destroy ( ) ; throw e ; } }
@ Override public void decode ( AsnInputStream ais ) throws ParseException { integerSecurityId = null ; objectSecurityId = null ; try { if ( ! ais . isTagPrimitive ( ) || ais . getTagClass ( ) != Tag . CLASS_CONTEXT_SPECIFIC ) throw new ParseException ( PAbortCause . BadlyStructuredDialoguePortion , "Error decoding SecurityContext: bad tagClass or not primitive, found tagClass=" + ais . getTagClass ( ) ) ; switch ( ais . getTag ( ) ) { case SecurityContext . _TAG_SECURITY_CONTEXT_OID : this . objectSecurityId = ais . readObjectIdentifier ( ) ; break ; case SecurityContext . _TAG_SECURITY_CONTEXT_INTEGER : this . integerSecurityId = ais . readInteger ( ) ; break ; default : throw new ParseException ( PAbortCause . BadlyStructuredDialoguePortion , "Error decoding SecurityContext: bad tag, found tag=" + ais . getTag ( ) ) ; } } catch ( IOException e ) { throw new ParseException ( PAbortCause . BadlyStructuredDialoguePortion , "IOException while decoding SecurityContext: " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { throw new ParseException ( PAbortCause . BadlyStructuredDialoguePortion , "AsnException while decoding SecurityContext: " + e . getMessage ( ) , e ) ; } }
public static List < String > listFile ( File dirFile , final String suffix ) throws IOException { List < String > list = new ArrayList < String > ( ) ; addListFile ( list , "/" , dirFile , suffix ) ; return list ; }
@ Override public void initializeUI ( UIBuilder builder ) throws Exception { super . initializeUI ( builder ) ; builder . add ( scoped ) ; }
public static String METHOD_1 ( int [ ] VAR_1 , int VAR_2 ) { assert ( VAR_1 != null ) ; assert ( VAR_1 . length >= INT_1 ) ; TYPE_1 VAR_3 = new TYPE_1 ( ) ; VAR_3 . append ( STRING_1 ) . append ( VAR_1 [ 0 ] ) ; VAR_3 . append ( STRING_2 ) . append ( VAR_1 [ 1 ] ) ; VAR_3 . append ( STRING_3 ) . append ( VAR_1 [ INT_2 ] / INT_2 ) ; int VAR_4 = TYPE_2 . min ( VAR_1 [ INT_2 ] , VAR_5 ) ; int VAR_6 = 0 ; for ( int i = VAR_7 ; i < VAR_7 + VAR_4 ; i += INT_2 ) { VAR_3 . append ( STRING_4 ) . append ( VAR_1 [ i ] ) ; VAR_3 . append ( STRING_5 ) . append ( VAR_1 [ i + 1 ] ) ; if ( VAR_6 == VAR_2 ) { VAR_3 . append ( STRING_6 ) ; } VAR_6 ++ ; } if ( VAR_1 [ INT_2 ] > VAR_5 ) { VAR_3 . append ( STRING_7 ) ; if ( VAR_2 == VAR_8 . VAR_9 ) { VAR_3 . append ( STRING_8 + STRING_9 + STRING_10 ) ; } } return VAR_3 . toString ( ) ; }
ANNOTATION_1 public TYPE_1 < TYPE_2 > METHOD_1 ( final TYPE_3 < TYPE_4 , TYPE_1 < String > > VAR_1 ) throws TYPE_5 , TYPE_6 { if ( VAR_1 == null || METHOD_2 ( VAR_1 ) ) { return METHOD_3 ( ) ; } TYPE_1 < TYPE_2 > VAR_2 = METHOD_3 ( ) ; TYPE_1 < TYPE_2 > VAR_3 = new TYPE_7 < TYPE_2 > ( ) ; VAR_4 : for ( TYPE_2 VAR_5 : VAR_2 ) { VAR_6 : for ( TYPE_8 < TYPE_4 , TYPE_1 < String > > VAR_7 : VAR_1 . METHOD_4 ( ) ) { TYPE_4 VAR_8 = VAR_7 . METHOD_5 ( ) ; TYPE_1 < String > values = VAR_7 . getValue ( ) ; TYPE_1 < String > VAR_9 = METHOD_6 ( VAR_8 , VAR_5 ) ; if ( values != null && values . size ( ) != 0 ) { for ( String VAR_10 : values ) { if ( VAR_9 . contains ( VAR_10 ) ) { continue VAR_6 ; } } continue VAR_4 ; } } VAR_3 . add ( VAR_5 ) ; } return VAR_3 ; }
public synchronized void delete ( ) { if ( swigCPtr != 0 ) { if ( swigCMemOwn ) { swigCMemOwn = false ; libtorrent_jni . delete_session_error_alert ( swigCPtr ) ; } swigCPtr = 0 ; } super . delete ( ) ; }
@ Override public boolean getSwappableListeners ( short requestedListener ) { short thisListenerFlag = getListenerFlag ( ) ; boolean rc = false ; // check session's listenrCnt to see if it has any of the type we want // input listener is either BINDING or ACTIVATION, so if the session has both, its a match if ( thisListenerFlag == requestedListener || thisListenerFlag == HTTP_SESSION_BINDING_AND_ACTIVATION_LISTENER ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "loading data because we have listener match for " + requestedListener ) ; rc = true ; if ( ! populatedAppData ) { try { getSessions ( ) . getIStore ( ) . setThreadContext ( ) ; getMultiRowAppData ( ) ; } finally { getSessions ( ) . getIStore ( ) . unsetThreadContext ( ) ; } } } return rc ; }
public TYPE_1 METHOD_1 ( TYPE_2 VAR_1 , String className ) { String VAR_2 = null ; String VAR_3 = null ; String VAR_4 = null ; String VAR_5 = null ; if ( className == null ) { VAR_2 = VAR_1 . METHOD_2 ( VAR_6 . VAR_7 ) ; className = VAR_1 . METHOD_2 ( VAR_6 . VAR_8 ) ; VAR_3 = VAR_1 . METHOD_2 ( VAR_6 . VAR_9 ) ; VAR_4 = VAR_1 . METHOD_2 ( VAR_6 . VAR_10 ) ; VAR_5 = VAR_1 . METHOD_2 ( VAR_6 . VAR_11 ) ; } if ( className == null ) className = VAR_12 . VAR_13 ; if ( ( className . length ( ) == 0 ) || ( className . equals ( STRING_1 ) ) || ( className . equals ( STRING_2 ) ) ) { if ( VAR_2 != null ) if ( VAR_2 . length ( ) > 0 ) className = VAR_2 ; } try { this . METHOD_3 ( VAR_12 . VAR_14 ) ; if ( className . VAR_15 ( CHAR_1 ) != - 1 ) className = className . substring ( className . VAR_15 ( CHAR_1 ) + 1 ) ; this . METHOD_4 ( VAR_16 . VAR_17 ) . METHOD_5 ( className ) ; boolean VAR_18 = false ; this . METHOD_3 ( VAR_16 . VAR_19 ) ; if ( className . length ( ) > 0 ) VAR_18 = this . METHOD_6 ( STRING_3 ) ; if ( ! VAR_18 ) { if ( ( VAR_4 != null ) && ( VAR_4 . length ( ) > 0 ) ) this . METHOD_4 ( VAR_16 . VAR_17 ) . METHOD_5 ( STRING_4 ) ; else if ( ( VAR_2 != null ) && ( VAR_2 . length ( ) > 0 ) ) { if ( ( VAR_3 != null ) && ( VAR_3 . length ( ) > 0 ) && ( VAR_3 . METHOD_7 ( VAR_20 . VAR_21 ) ) ) this . METHOD_4 ( VAR_16 . VAR_17 ) . METHOD_5 ( STRING_1 ) ; else this . METHOD_4 ( VAR_16 . VAR_17 ) . METHOD_5 ( STRING_2 ) ; } else this . METHOD_4 ( VAR_16 . VAR_17 ) . METHOD_5 ( VAR_5 ) ; if ( ! VAR_18 ) VAR_18 = this . METHOD_6 ( STRING_3 ) ; } } catch ( TYPE_3 VAR_22 ) { VAR_22 . METHOD_8 ( ) ; return null ; } return this ; }
public void flush ( ) throws IOException { super . flush ( ) ; if ( _complete && ! _completed ) { _completed = true ; _packet . resetData ( ) ; _packet . addByte ( AJP13ResponsePacket . __END_RESPONSE ) ; _packet . addBoolean ( _persistent ) ; _packet . setDataSize ( ) ; write ( _packet ) ; _packet . resetData ( ) ; } }
private void METHOD_1 ( TYPE_1 VAR_1 , TYPE_2 VAR_2 ) throws TYPE_3 { VAR_3 . METHOD_2 ( 0 ) ; if ( ! VAR_1 . METHOD_3 ( ) ) { VAR_3 . append ( VAR_4 . VAR_5 ) ; } else { VAR_3 . append ( VAR_4 . VAR_6 ) ; } VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( format ( METHOD_4 ( VAR_2 . METHOD_5 ( ) ) ) ) ; VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( format ( METHOD_4 ( VAR_2 . METHOD_6 ( ) ) ) ) ; VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( VAR_2 . METHOD_7 ( ) ? STRING_1 : "0" ) ; VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( format ( METHOD_8 ( VAR_2 . METHOD_9 ( 0 ) . METHOD_10 ( ) ) ) ) ; VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( format ( METHOD_8 ( VAR_2 . METHOD_9 ( 0 ) . METHOD_11 ( ) ) ) ) ; VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( format ( METHOD_8 ( VAR_2 . METHOD_9 ( 1 ) . METHOD_10 ( ) ) ) ) ; VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( format ( METHOD_8 ( VAR_2 . METHOD_9 ( 1 ) . METHOD_11 ( ) ) ) ) ; VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( format ( METHOD_8 ( VAR_2 . METHOD_9 ( INT_1 ) . METHOD_10 ( ) ) ) ) ; VAR_3 . append ( VAR_7 ) ; VAR_3 . append ( format ( METHOD_8 ( VAR_2 . METHOD_9 ( INT_1 ) . METHOD_11 ( ) ) ) ) ; METHOD_12 ( VAR_3 ) ; VAR_3 . append ( VAR_4 . VAR_8 ) ; VAR_9 . METHOD_13 ( VAR_3 . toString ( ) ) ; }
@ Override public void removeCache ( String name , EnumSet < CacheContainerAdmin . AdminFlag > flags ) { throw log . immutableConfiguration ( ) ; }
public Wear remoteInput ( @ DrawableRes int icon , String title , PendingIntent pendingIntent , RemoteInput remoteInput ) { if ( icon <= 0 ) { throw new IllegalArgumentException ( "Resource ID Icon Should Not Be Less Than Or Equal To Zero!" ) ; } if ( title == null ) { throw new IllegalArgumentException ( "Title Must Not Be Null!" ) ; } if ( pendingIntent == null ) { throw new IllegalArgumentException ( "PendingIntent Must Not Be Null!" ) ; } if ( remoteInput == null ) { throw new IllegalArgumentException ( "RemoteInput Must Not Be Null!" ) ; } this . remoteInput = remoteInput ; wearableExtender . addAction ( new NotificationCompat . Action . Builder ( icon , title , pendingIntent ) . addRemoteInput ( remoteInput ) . build ( ) ) ; return this ; }
public static TYPE_1 METHOD_1 ( String VAR_1 , TYPE_2 < String , TYPE_3 > VAR_2 ) { return new TYPE_1 ( VAR_3 . VAR_4 , VAR_1 , VAR_2 , true , null ) ; }
public String METHOD_1 ( String VAR_1 , String VAR_2 ) { if ( ! VAR_2 . METHOD_2 ( STRING_1 ) ) { VAR_2 += STRING_1 ; } String path = VAR_3 . METHOD_3 ( VAR_2 ) ; if ( TYPE_1 . METHOD_4 ( VAR_2 ) ) { String VAR_4 = VAR_1 ; int VAR_5 = VAR_4 . METHOD_5 ( TYPE_2 ) ; path = TYPE_3 . METHOD_6 ( VAR_4 . substring ( 0 , VAR_5 + TYPE_2 . length ( ) ) + path ) ; } return TYPE_1 . METHOD_7 ( path , VAR_6 ) ; }
static StructureDefinition loadProfileOrReturnNull ( List < ValidationMessage > theMessages , FhirContext theCtx , String theResourceName ) { if ( isBlank ( theResourceName ) ) { if ( theMessages != null ) { theMessages . add ( new ValidationMessage ( ) . setLevel ( IssueSeverity . FATAL ) . setMessage ( "Could not determine resource type from request. Content appears invalid." ) ) ; } return null ; } String profileClasspath = theCtx . getVersion ( ) . getPathToSchemaDefinitions ( ) . replace ( "/schema" , "/profile" ) ; String profileCpName = profileClasspath + ' ' + theResourceName . toLowerCase ( ) + ".profile.xml" ; String profileText ; try ( InputStream inputStream = FhirInstanceValidator . class . getResourceAsStream ( profileCpName ) ) { if ( inputStream == null ) { if ( theMessages != null ) { theMessages . add ( new ValidationMessage ( ) . setLevel ( IssueSeverity . FATAL ) . setMessage ( "No profile found for resource type " + theResourceName ) ) ; return null ; } else { return null ; } } profileText = IOUtils . toString ( inputStream , "UTF-8" ) ; } catch ( IOException e1 ) { if ( theMessages != null ) { theMessages . add ( new ValidationMessage ( ) . setLevel ( IssueSeverity . FATAL ) . setMessage ( "No profile found for resource type " + theResourceName ) ) ; } return null ; } StructureDefinition profile = getHl7OrgDstu2Ctx ( theCtx ) . newXmlParser ( ) . parseResource ( StructureDefinition . class , profileText ) ; return profile ; }
public TYPE_1 METHOD_1 ( TYPE_2 VAR_1 ) throws TYPE_3 { if ( ! ( VAR_1 instanceof TYPE_4 ) ) { VAR_1 = new TYPE_4 ( VAR_1 ) ; } TYPE_5 VAR_2 = VAR_3 . METHOD_2 ( ( TYPE_4 ) VAR_1 ) ; TYPE_1 VAR_4 = METHOD_1 ( VAR_2 ) ; try { VAR_4 . METHOD_3 ( VAR_1 ) ; } catch ( TYPE_6 VAR_5 ) { VAR_6 . error ( STRING_1 , VAR_5 . METHOD_4 ( ) ) ; VAR_6 . METHOD_5 ( VAR_5 ) ; } return VAR_4 ; }
public TYPE_1 METHOD_1 ( final String VAR_1 , final String size , final String VAR_2 , final int VAR_3 , final String VAR_4 ) { TYPE_1 VAR_5 = new TYPE_1 ( ) ; VAR_5 . METHOD_2 ( VAR_1 ) ; VAR_5 . METHOD_3 ( STRING_1 + TYPE_2 . METHOD_4 ( ) . toString ( ) . substring ( 0 , VAR_6 ) ) ; VAR_5 . METHOD_5 ( size ) ; VAR_5 . METHOD_6 ( VAR_2 ) ; VAR_5 . METHOD_7 ( VAR_3 ) ; VAR_5 . METHOD_8 ( VAR_4 ) ; VAR_7 . put ( VAR_5 . METHOD_9 ( ) , VAR_5 ) ; return VAR_5 ; }
private ControllerModel handleController ( ) throws ProcessorException { // get Annotation ... Controller annotation = controllerElement . getAnnotation ( Controller . class ) ; // handle ... TypeElement componentTypeElement = this . getComponentTypeElement ( annotation ) ; if ( componentTypeElement == null ) { throw new ProcessorException ( "Nalu-Processor: componentTypeElement is null" ) ; } TypeElement componentInterfaceTypeElement = this . getComponentInterfaceTypeElement ( annotation ) ; TypeMirror componentTypeTypeMirror = this . getComponentType ( controllerElement . asType ( ) ) ; // check and save the component type ... if ( metaModel . getComponentType ( ) == null ) { metaModel . setComponentType ( new ClassNameModel ( componentTypeTypeMirror . toString ( ) ) ) ; } else { ClassNameModel compareValue = new ClassNameModel ( componentTypeTypeMirror . toString ( ) ) ; if ( ! metaModel . getComponentType ( ) . equals ( compareValue ) ) { throw new ProcessorException ( "Nalu-Processor: componentType >>" + compareValue + "<< is different. All controllers must implement the componentType!" ) ; } } // check, if the controller implements IsComponentController boolean componentController = this . checkIsComponentCreator ( controllerElement , componentInterfaceTypeElement ) ; // get context! String context = this . getContextType ( controllerElement ) ; if ( Objects . isNull ( context ) ) { throw new ProcessorException ( "Nalu-Processor: controller >>" + controllerElement . toString ( ) + "<< does not have a context generic!" ) ; } // save model ... return new ControllerModel ( annotation . route ( ) , getRoute ( annotation . route ( ) ) , annotation . selector ( ) , getParametersFromRoute ( annotation . route ( ) ) , new ClassNameModel ( context ) , new ClassNameModel ( controllerElement . toString ( ) ) , new ClassNameModel ( componentInterfaceTypeElement . toString ( ) ) , new ClassNameModel ( componentTypeElement . toString ( ) ) , new ClassNameModel ( componentTypeTypeMirror . toString ( ) ) , new ClassNameModel ( controllerElement . toString ( ) ) , componentController ) ; }
public void toFile ( Path path , boolean append ) throws IOException { OutputStream out ; if ( append ) { out = Files . newOutputStream ( path , StandardOpenOption . APPEND , StandardOpenOption . CREATE ) ; } else { out = Files . newOutputStream ( path , StandardOpenOption . WRITE , StandardOpenOption . CREATE , StandardOpenOption . TRUNCATE_EXISTING ) ; } try { to ( out ) ; } finally { out . close ( ) ; } }
@ SdkInternalApi final GetMediaForFragmentListResult executeGetMediaForFragmentList ( GetMediaForFragmentListRequest getMediaForFragmentListRequest ) { ExecutionContext executionContext = createExecutionContext ( getMediaForFragmentListRequest ) ; AWSRequestMetrics awsRequestMetrics = executionContext . getAwsRequestMetrics ( ) ; awsRequestMetrics . startEvent ( Field . ClientExecuteTime ) ; Request < GetMediaForFragmentListRequest > request = null ; Response < GetMediaForFragmentListResult > response = null ; try { awsRequestMetrics . startEvent ( Field . RequestMarshallTime ) ; try { request = new GetMediaForFragmentListRequestProtocolMarshaller ( protocolFactory ) . marshall ( super . beforeMarshalling ( getMediaForFragmentListRequest ) ) ; // Binds the request metrics to the current request. request . setAWSRequestMetrics ( awsRequestMetrics ) ; request . addHandlerContext ( HandlerContextKey . SIGNING_REGION , getSigningRegion ( ) ) ; request . addHandlerContext ( HandlerContextKey . SERVICE_ID , "Kinesis Video Archived Media" ) ; request . addHandlerContext ( HandlerContextKey . OPERATION_NAME , "GetMediaForFragmentList" ) ; request . addHandlerContext ( HandlerContextKey . ADVANCED_CONFIG , advancedConfig ) ; } finally { awsRequestMetrics . endEvent ( Field . RequestMarshallTime ) ; } HttpResponseHandler < AmazonWebServiceResponse < GetMediaForFragmentListResult > > responseHandler = protocolFactory . createResponseHandler ( new JsonOperationMetadata ( ) . withPayloadJson ( false ) . withHasStreamingSuccessResponse ( true ) , new GetMediaForFragmentListResultJsonUnmarshaller ( ) ) ; response = invoke ( request , responseHandler , executionContext ) ; response . getAwsResponse ( ) . setPayload ( new com . amazonaws . util . ServiceClientHolderInputStream ( response . getAwsResponse ( ) . getPayload ( ) , this ) ) ; return response . getAwsResponse ( ) ; } finally { endClientExecution ( awsRequestMetrics , request , response ) ; } }
public boolean METHOD_1 ( String VAR_1 , TYPE_1 < TYPE_2 > key ) { String VAR_2 = METHOD_2 ( VAR_1 , key ) ; METHOD_3 ( VAR_2 ) ; return true ; }
public List < Flow > flows ( ) { return this . flows . stream ( ) . < Flow > map ( l -> ( ) -> unmodifiableList ( new ArrayList <> ( l ) ) ) . collect ( toList ( ) ) ; }
@ Override public int compareTo ( SearchInfo other ) { return Integer . valueOf ( count ) . compareTo ( other . getCount ( ) ) ; }
private void METHOD_1 ( TYPE_1 VAR_1 , boolean VAR_2 ) { final TYPE_2 VAR_3 = METHOD_2 ( ) ; VAR_3 . METHOD_3 ( VAR_4 . VAR_5 ) ; METHOD_4 ( VAR_6 . format ( VAR_1 . METHOD_5 ( ) ) ) ; METHOD_4 ( VAR_6 . format ( VAR_1 . METHOD_6 ( ) ) ) ; METHOD_4 ( VAR_6 . format ( VAR_1 . METHOD_7 ( ) ) ) ; if ( VAR_1 . METHOD_8 ( ) >= 0 ) { METHOD_4 ( VAR_6 . format ( VAR_1 . METHOD_8 ( ) ) ) ; } else { METHOD_4 ( STRING_1 ) ; } if ( METHOD_9 ( ) ) { if ( VAR_1 . METHOD_10 ( ) >= 0 ) { METHOD_4 ( VAR_6 . format ( VAR_1 . METHOD_10 ( ) ) ) ; } else { METHOD_4 ( STRING_1 ) ; } } METHOD_4 ( VAR_7 . format ( VAR_1 . METHOD_11 ( ) ) ) ; if ( VAR_2 ) { final boolean VAR_8 = VAR_1 . METHOD_12 ( ) ; if ( VAR_8 ) { METHOD_4 ( VAR_6 . format ( VAR_1 . METHOD_13 ( ) ) ) ; } else { METHOD_4 ( STRING_1 ) ; } if ( VAR_8 ) { METHOD_4 ( VAR_6 . format ( VAR_1 . METHOD_14 ( ) ) ) ; } else { METHOD_4 ( STRING_1 ) ; } } }
public Set < BioPAXElement > complete ( Collection < BioPAXElement > elements ) { completed . clear ( ) ; for ( BioPAXElement element : elements ) { if ( completed . add ( element ) ) { traverser . traverse ( element , null ) ; //model is not required here because of 'visit' impl. below } } return completed ; }
README.md exists but content is empty. Use the Edit dataset card button to edit it.
Downloads last month
0
Edit dataset card