code_text
stringlengths
604
999k
repo_name
stringlengths
4
100
file_path
stringlengths
4
873
language
stringclasses
23 values
license
stringclasses
15 values
size
int32
1.02k
999k
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.transport.local; import org.apache.axis2.AxisFault; import org.apache.axis2.addressing.EndpointReference; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.context.MessageContext; import org.apache.axis2.description.TransportOutDescription; import org.apache.axis2.handlers.AbstractHandler; import org.apache.axis2.transport.TransportSender; import org.apache.axis2.transport.TransportUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; public class LocalTransportSender extends AbstractHandler implements TransportSender { protected static final Log log = LogFactory.getLog(LocalTransportSender.class); public void init(ConfigurationContext confContext, TransportOutDescription transportOut) throws AxisFault { } public void stop() { } public void cleanup(MessageContext msgContext) throws AxisFault { } /** * Method invoke * * @param msgContext the current MessageContext * @throws AxisFault */ public InvocationResponse invoke(MessageContext msgContext) throws AxisFault { // Check for the REST behaviour, if you desire rest beahaviour // put a <parameter name="doREST" value="true"/> at the axis2.xml msgContext.setDoingMTOM(TransportUtils.doWriteMTOM(msgContext)); msgContext.setDoingSwA(TransportUtils.doWriteSwA(msgContext)); OutputStream out; EndpointReference epr = msgContext.getTo(); if (log.isDebugEnabled()) { log.debug("Sending - " + msgContext.getEnvelope().toString()); } if (epr != null) { if (!epr.hasNoneAddress()) { out = new ByteArrayOutputStream(); TransportUtils.writeMessage(msgContext, out); finalizeSendWithToAddress(msgContext, (ByteArrayOutputStream)out); } } else { out = (OutputStream) msgContext.getProperty(MessageContext.TRANSPORT_OUT); if (out != null) { TransportUtils.writeMessage(msgContext, out); } else { throw new AxisFault( "Both the TO and Property MessageContext.TRANSPORT_OUT is Null, No where to send"); } } TransportUtils.setResponseWritten(msgContext, true); return InvocationResponse.CONTINUE; } public void finalizeSendWithToAddress(MessageContext msgContext, ByteArrayOutputStream out) throws AxisFault { try { InputStream in = new ByteArrayInputStream(out.toByteArray()); ByteArrayOutputStream response = new ByteArrayOutputStream(); LocalTransportReceiver localTransportReceiver = new LocalTransportReceiver(this, isNonBlocking()); localTransportReceiver.processMessage(msgContext, in, response); in.close(); out.close(); if (response.size() > 0) { in = new ByteArrayInputStream(response.toByteArray()); msgContext.setProperty(MessageContext.TRANSPORT_IN, in); } } catch (IOException e) { throw AxisFault.makeFault(e); } } protected boolean isNonBlocking() { if (log.isDebugEnabled()) { log.debug("Local Transport Sender Selected"); } return false; } }
imesh/wso2-axis2
modules/transport/local/src/org/apache/axis2/transport/local/LocalTransportSender.java
Java
apache-2.0
4,385
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.uamp.utils; import android.media.MediaMetadata; import android.media.session.MediaSession; import com.example.android.uamp.model.MusicProvider; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import static com.example.android.uamp.utils.MediaIDHelper.MEDIA_ID_MUSICS_BY_GENRE; import static com.example.android.uamp.utils.MediaIDHelper.MEDIA_ID_MUSICS_BY_SEARCH; /** * Utility class to help on queue related tasks. */ public class QueueHelper { private static final String TAG = LogHelper.makeLogTag(QueueHelper.class); public static List<MediaSession.QueueItem> getPlayingQueue(String mediaId, MusicProvider musicProvider) { // extract the browsing hierarchy from the media ID: String[] hierarchy = MediaIDHelper.getHierarchy(mediaId); if (hierarchy.length != 2) { LogHelper.e(TAG, "Could not build a playing queue for this mediaId: ", mediaId); return null; } String categoryType = hierarchy[0]; String categoryValue = hierarchy[1]; LogHelper.d(TAG, "Creating playing queue for ", categoryType, ", ", categoryValue); Iterable<MediaMetadata> tracks = null; // This sample only supports genre and by_search category types. if (categoryType.equals(MEDIA_ID_MUSICS_BY_GENRE)) { tracks = musicProvider.getMusicsByGenre(categoryValue); } else if (categoryType.equals(MEDIA_ID_MUSICS_BY_SEARCH)) { tracks = musicProvider.searchMusic(categoryValue); } if (tracks == null) { LogHelper.e(TAG, "Unrecognized category type: ", categoryType, " for mediaId ", mediaId); return null; } return convertToQueue(tracks, hierarchy[0], hierarchy[1]); } public static List<MediaSession.QueueItem> getPlayingQueueFromSearch(String query, MusicProvider musicProvider) { LogHelper.d(TAG, "Creating playing queue for musics from search ", query); return convertToQueue(musicProvider.searchMusic(query), MEDIA_ID_MUSICS_BY_SEARCH, query); } public static int getMusicIndexOnQueue(Iterable<MediaSession.QueueItem> queue, String mediaId) { int index = 0; for (MediaSession.QueueItem item : queue) { if (mediaId.equals(item.getDescription().getMediaId())) { return index; } index++; } return -1; } public static int getMusicIndexOnQueue(Iterable<MediaSession.QueueItem> queue, long queueId) { int index = 0; for (MediaSession.QueueItem item : queue) { if (queueId == item.getQueueId()) { return index; } index++; } return -1; } private static List<MediaSession.QueueItem> convertToQueue( Iterable<MediaMetadata> tracks, String... categories) { List<MediaSession.QueueItem> queue = new ArrayList<>(); int count = 0; for (MediaMetadata track : tracks) { // We create a hierarchy-aware mediaID, so we know what the queue is about by looking // at the QueueItem media IDs. String hierarchyAwareMediaID = MediaIDHelper.createMediaID( track.getDescription().getMediaId(), categories); MediaMetadata trackCopy = new MediaMetadata.Builder(track) .putString(MediaMetadata.METADATA_KEY_MEDIA_ID, hierarchyAwareMediaID) .build(); // We don't expect queues to change after created, so we use the item index as the // queueId. Any other number unique in the queue would work. MediaSession.QueueItem item = new MediaSession.QueueItem( trackCopy.getDescription(), count++); queue.add(item); } return queue; } /** * Create a random queue. For simplicity sake, instead of a random queue, we create a * queue using the first genre. * * @param musicProvider the provider used for fetching music. * @return list containing {@link MediaSession.QueueItem}'s */ public static List<MediaSession.QueueItem> getRandomQueue(MusicProvider musicProvider) { Iterator<String> genres = musicProvider.getGenres().iterator(); if (!genres.hasNext()) { return Collections.emptyList(); } String genre = genres.next(); Iterable<MediaMetadata> tracks = musicProvider.getMusicsByGenre(genre); return convertToQueue(tracks, MEDIA_ID_MUSICS_BY_GENRE, genre); } public static boolean isIndexPlayable(int index, List<MediaSession.QueueItem> queue) { return (queue != null && index >= 0 && index < queue.size()); } }
Bob1993/android-UniversalMusicPlayer
mobile/src/main/java/com/example/android/uamp/utils/QueueHelper.java
Java
apache-2.0
5,499
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.dagger.activitygraphs.ui; import android.app.Activity; import com.example.dagger.activitygraphs.PerActivity; import javax.inject.Inject; /** * A simple abstraction which provides the ability to set the title on an activity. * <p> * Fragments should not directly modify any part of an activity outside of the view or dialog that * it creates. This class provides a way for fragments to inject a controller that will allow for * control of the activity title. While not exceedingly useful in practice, this concept could be * expanded to things like facilitating control over the action bar, dialogs, notifications, etc. */ @PerActivity public class ActivityTitleController { private final Activity activity; @Inject public ActivityTitleController(Activity activity) { this.activity = activity; } public void setTitle(CharSequence title) { activity.setTitle(title); } }
hanks-zyh/dagger
examples/android-activity-graphs/src/main/java/com/example/dagger/activitygraphs/ui/ActivityTitleController.java
Java
apache-2.0
1,513
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package record import ( "fmt" "math/rand" "time" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" utilruntime "k8s.io/apimachinery/pkg/util/runtime" "k8s.io/apimachinery/pkg/watch" restclient "k8s.io/client-go/rest" "k8s.io/client-go/util/clock" "k8s.io/kubernetes/pkg/api/v1" "net/http" "github.com/golang/glog" ) const maxTriesPerEvent = 12 var defaultSleepDuration = 10 * time.Second const maxQueuedEvents = 1000 // EventSink knows how to store events (client.Client implements it.) // EventSink must respect the namespace that will be embedded in 'event'. // It is assumed that EventSink will return the same sorts of errors as // pkg/client's REST client. type EventSink interface { Create(event *v1.Event) (*v1.Event, error) Update(event *v1.Event) (*v1.Event, error) Patch(oldEvent *v1.Event, data []byte) (*v1.Event, error) } // EventRecorder knows how to record events on behalf of an EventSource. type EventRecorder interface { // Event constructs an event from the given information and puts it in the queue for sending. // 'object' is the object this event is about. Event will make a reference-- or you may also // pass a reference to the object directly. // 'type' of this event, and can be one of Normal, Warning. New types could be added in future // 'reason' is the reason this event is generated. 'reason' should be short and unique; it // should be in UpperCamelCase format (starting with a capital letter). "reason" will be used // to automate handling of events, so imagine people writing switch statements to handle them. // You want to make that easy. // 'message' is intended to be human readable. // // The resulting event will be created in the same namespace as the reference object. Event(object runtime.Object, eventtype, reason, message string) // Eventf is just like Event, but with Sprintf for the message field. Eventf(object runtime.Object, eventtype, reason, messageFmt string, args ...interface{}) // PastEventf is just like Eventf, but with an option to specify the event's 'timestamp' field. PastEventf(object runtime.Object, timestamp metav1.Time, eventtype, reason, messageFmt string, args ...interface{}) } // EventBroadcaster knows how to receive events and send them to any EventSink, watcher, or log. type EventBroadcaster interface { // StartEventWatcher starts sending events received from this EventBroadcaster to the given // event handler function. The return value can be ignored or used to stop recording, if // desired. StartEventWatcher(eventHandler func(*v1.Event)) watch.Interface // StartRecordingToSink starts sending events received from this EventBroadcaster to the given // sink. The return value can be ignored or used to stop recording, if desired. StartRecordingToSink(sink EventSink) watch.Interface // StartLogging starts sending events received from this EventBroadcaster to the given logging // function. The return value can be ignored or used to stop recording, if desired. StartLogging(logf func(format string, args ...interface{})) watch.Interface // NewRecorder returns an EventRecorder that can be used to send events to this EventBroadcaster // with the event source set to the given event source. NewRecorder(source v1.EventSource) EventRecorder } // Creates a new event broadcaster. func NewBroadcaster() EventBroadcaster { return &eventBroadcasterImpl{watch.NewBroadcaster(maxQueuedEvents, watch.DropIfChannelFull), defaultSleepDuration} } func NewBroadcasterForTests(sleepDuration time.Duration) EventBroadcaster { return &eventBroadcasterImpl{watch.NewBroadcaster(maxQueuedEvents, watch.DropIfChannelFull), sleepDuration} } type eventBroadcasterImpl struct { *watch.Broadcaster sleepDuration time.Duration } // StartRecordingToSink starts sending events received from the specified eventBroadcaster to the given sink. // The return value can be ignored or used to stop recording, if desired. // TODO: make me an object with parameterizable queue length and retry interval func (eventBroadcaster *eventBroadcasterImpl) StartRecordingToSink(sink EventSink) watch.Interface { // The default math/rand package functions aren't thread safe, so create a // new Rand object for each StartRecording call. randGen := rand.New(rand.NewSource(time.Now().UnixNano())) eventCorrelator := NewEventCorrelator(clock.RealClock{}) return eventBroadcaster.StartEventWatcher( func(event *v1.Event) { recordToSink(sink, event, eventCorrelator, randGen, eventBroadcaster.sleepDuration) }) } func recordToSink(sink EventSink, event *v1.Event, eventCorrelator *EventCorrelator, randGen *rand.Rand, sleepDuration time.Duration) { // Make a copy before modification, because there could be multiple listeners. // Events are safe to copy like this. eventCopy := *event event = &eventCopy result, err := eventCorrelator.EventCorrelate(event) if err != nil { utilruntime.HandleError(err) } if result.Skip { return } tries := 0 for { if recordEvent(sink, result.Event, result.Patch, result.Event.Count > 1, eventCorrelator) { break } tries++ if tries >= maxTriesPerEvent { glog.Errorf("Unable to write event '%#v' (retry limit exceeded!)", event) break } // Randomize the first sleep so that various clients won't all be // synced up if the master goes down. if tries == 1 { time.Sleep(time.Duration(float64(sleepDuration) * randGen.Float64())) } else { time.Sleep(sleepDuration) } } } func isKeyNotFoundError(err error) bool { statusErr, _ := err.(*errors.StatusError) if statusErr != nil && statusErr.Status().Code == http.StatusNotFound { return true } return false } // recordEvent attempts to write event to a sink. It returns true if the event // was successfully recorded or discarded, false if it should be retried. // If updateExistingEvent is false, it creates a new event, otherwise it updates // existing event. func recordEvent(sink EventSink, event *v1.Event, patch []byte, updateExistingEvent bool, eventCorrelator *EventCorrelator) bool { var newEvent *v1.Event var err error if updateExistingEvent { newEvent, err = sink.Patch(event, patch) } // Update can fail because the event may have been removed and it no longer exists. if !updateExistingEvent || (updateExistingEvent && isKeyNotFoundError(err)) { // Making sure that ResourceVersion is empty on creation event.ResourceVersion = "" newEvent, err = sink.Create(event) } if err == nil { // we need to update our event correlator with the server returned state to handle name/resourceversion eventCorrelator.UpdateState(newEvent) return true } // If we can't contact the server, then hold everything while we keep trying. // Otherwise, something about the event is malformed and we should abandon it. switch err.(type) { case *restclient.RequestConstructionError: // We will construct the request the same next time, so don't keep trying. glog.Errorf("Unable to construct event '%#v': '%v' (will not retry!)", event, err) return true case *errors.StatusError: if errors.IsAlreadyExists(err) { glog.V(5).Infof("Server rejected event '%#v': '%v' (will not retry!)", event, err) } else { glog.Errorf("Server rejected event '%#v': '%v' (will not retry!)", event, err) } return true case *errors.UnexpectedObjectError: // We don't expect this; it implies the server's response didn't match a // known pattern. Go ahead and retry. default: // This case includes actual http transport errors. Go ahead and retry. } glog.Errorf("Unable to write event: '%v' (may retry after sleeping)", err) return false } // StartLogging starts sending events received from this EventBroadcaster to the given logging function. // The return value can be ignored or used to stop recording, if desired. func (eventBroadcaster *eventBroadcasterImpl) StartLogging(logf func(format string, args ...interface{})) watch.Interface { return eventBroadcaster.StartEventWatcher( func(e *v1.Event) { logf("Event(%#v): type: '%v' reason: '%v' %v", e.InvolvedObject, e.Type, e.Reason, e.Message) }) } // StartEventWatcher starts sending events received from this EventBroadcaster to the given event handler function. // The return value can be ignored or used to stop recording, if desired. func (eventBroadcaster *eventBroadcasterImpl) StartEventWatcher(eventHandler func(*v1.Event)) watch.Interface { watcher := eventBroadcaster.Watch() go func() { defer utilruntime.HandleCrash() for { watchEvent, open := <-watcher.ResultChan() if !open { return } event, ok := watchEvent.Object.(*v1.Event) if !ok { // This is all local, so there's no reason this should // ever happen. continue } eventHandler(event) } }() return watcher } // NewRecorder returns an EventRecorder that records events with the given event source. func (eventBroadcaster *eventBroadcasterImpl) NewRecorder(source v1.EventSource) EventRecorder { return &recorderImpl{source, eventBroadcaster.Broadcaster, clock.RealClock{}} } type recorderImpl struct { source v1.EventSource *watch.Broadcaster clock clock.Clock } func (recorder *recorderImpl) generateEvent(object runtime.Object, timestamp metav1.Time, eventtype, reason, message string) { ref, err := v1.GetReference(object) if err != nil { glog.Errorf("Could not construct reference to: '%#v' due to: '%v'. Will not report event: '%v' '%v' '%v'", object, err, eventtype, reason, message) return } if !validateEventType(eventtype) { glog.Errorf("Unsupported event type: '%v'", eventtype) return } event := recorder.makeEvent(ref, eventtype, reason, message) event.Source = recorder.source go func() { // NOTE: events should be a non-blocking operation defer utilruntime.HandleCrash() recorder.Action(watch.Added, event) }() } func validateEventType(eventtype string) bool { switch eventtype { case v1.EventTypeNormal, v1.EventTypeWarning: return true } return false } func (recorder *recorderImpl) Event(object runtime.Object, eventtype, reason, message string) { recorder.generateEvent(object, metav1.Now(), eventtype, reason, message) } func (recorder *recorderImpl) Eventf(object runtime.Object, eventtype, reason, messageFmt string, args ...interface{}) { recorder.Event(object, eventtype, reason, fmt.Sprintf(messageFmt, args...)) } func (recorder *recorderImpl) PastEventf(object runtime.Object, timestamp metav1.Time, eventtype, reason, messageFmt string, args ...interface{}) { recorder.generateEvent(object, timestamp, eventtype, reason, fmt.Sprintf(messageFmt, args...)) } func (recorder *recorderImpl) makeEvent(ref *v1.ObjectReference, eventtype, reason, message string) *v1.Event { t := metav1.Time{Time: recorder.clock.Now()} namespace := ref.Namespace if namespace == "" { namespace = metav1.NamespaceDefault } return &v1.Event{ ObjectMeta: metav1.ObjectMeta{ Name: fmt.Sprintf("%v.%x", ref.Name, t.UnixNano()), Namespace: namespace, }, InvolvedObject: *ref, Reason: reason, Message: message, FirstTimestamp: t, LastTimestamp: t, Count: 1, Type: eventtype, } }
sjug/perf-tests
compare/vendor/k8s.io/kubernetes/pkg/client/record/event.go
GO
apache-2.0
11,796
/** * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // appjetContext.cache_requestCache()._t_start = (new Date()).valueOf(); var _appjethidden_ = {}; var serverhandlers = { tasks: {} }; /* * @overview * * AppJet standard library preamble. * * This is run at the beginning of every request, right after all * native calls are loaded into appjetContext. This file is run * in the same scope as the app, the global scope, which is also * accessible from all modules. */ //---------------------------------------------------------------- // delete pesky rhino built-in string stuff //---------------------------------------------------------------- (function() { // rhino strings come with a bunch of random "html helpers" // that we don't want var htmlStuff = ["bold", "italics", "fixed", "strike", "small", "big", "sub", "fontsize", "fontcolor", "link", "anchor", "sup", "blink"]; for(var i in htmlStuff) { delete String.prototype[htmlStuff[i]]; } })(); //---------------------------------------------------------------- // module implementation //---------------------------------------------------------------- (function(globalScope) { //---------------------------------------------------------------- // Utility Functions //---------------------------------------------------------------- function appjetContext() { return net.appjet.oui.ExecutionContextUtils.currentContext(); } function internalError(m) { throw new Error("AppJet Internal Error: "+m); } function apiError(m) { throw new Error("AppJet API Error: "+m); } function newScope() { var o = new Object(); o.__parent__ = null; o.__proto__ = globalScope; return o; } _appjethidden_._debugMessage = function(m) { //java.lang.System.out.println(m); }; var debug = _appjethidden_._debugMessage; function copySymbol(srcName, symName, src, dst, dstSymName) { if (!src.hasOwnProperty(symName)) { apiError("Import error: module \""+srcName+"\" does not contain the symbol \""+symName+"\"."); } if (symName.charAt(0) == '_') { apiError("Import error: cannot import symbol \""+symName+"\" because it is private (begins with _)"); } debug(" | copying symbol ["+symName+"]"); dst[dstSymName || symName] = src[symName]; } function copyPublicSymbols(src, dst) { for (k in src) { if (src.hasOwnProperty(k) && (k.length > 0) && (k.charAt(0) != '_')) { copySymbol('', k, src, dst); } } } // Module import cache... hidden from other scopes. var moduleObjects = {}; var modulesBeingLoaded = {}; /*-------------------------------------------------------------------------------- * loadModule(): * Evaluates moduleName in its own private scope, then copies its public identifiers * into a new scope. This new scope is stored in moduleObjects[moduleName] for future use * by import()s. * * If moduleName is currently being loaded (because we are in the middle of another loadModule() * higher in the call stack), then this function does noething, on the assumption * that moduleName will eventually be loaded anyway. Therefore, it cannot be assumed that * moduleName is done being loaded when loadModule() returns, only that it eventually will be * loaded when all loadModule calls return up the call stack. *--------------------------------------------------------------------------------*/ function loadModule(moduleName) { if (modulesBeingLoaded[moduleName]) { // This is OK. The module will be loaded eventually. return; } if (moduleObjects[moduleName]) { return; } modulesBeingLoaded[moduleName] = true; try { debug("loadModule: "+moduleName); var modulePrivateScope = Packages.net.appjet.ajstdlib.ajstdlib.runModuleInNewScope( appjetContext(), moduleName.split('.').join('/')); if (!modulePrivateScope) { // moduleName is not a module. This is normal, because when someone calls // import("foo.bar"), we dont know if bar is a module or an identifier in the foo module. delete modulesBeingLoaded[moduleName]; return; } // Thinking this could be useful: // modulePrivateScope['__MODULE_NAME__'] = moduleName; var moduleObj = newScope(); copyPublicSymbols(modulePrivateScope, moduleObj); moduleObjects[moduleName] = moduleObj; } finally { delete modulesBeingLoaded[moduleName]; } } /*-------------------------------------------------------------------------------- * importSingleModule(): * * Takes a single moduleName (like "etherpad.foo.bar.baz") and creates the identifier "baz" * in dstScope, referencing the module etherpad.foo.bar.baz. * * This function is called one or more times by importPath(). Note that importPath() is more like * the import() function that modules ses. *--------------------------------------------------------------------------------*/ function importSingleModule(moduleName, dstScope) { debug("importSingleModule: "+moduleName); if (typeof(moduleName) != 'string') { apiError("modules should be referred to with string, not "+typeof(moduleName)); } var moduleObj = moduleObjects[moduleName]; // public module scope if (!moduleObj) { return false; } var importedName = moduleName; if (importedName.indexOf(".") != -1) { importedName = importedName.split(".").slice(-1)[0]; } dstScope[importedName] = moduleObj; return true; } /*-------------------------------------------------------------------------------- * importPath(): * takes a modulePath (like "a.b.c.{d,e,f}" or "a.b.*" or just "a.b" or "a") and * repeatedly calls importSingleModule() as necessary, copying public symbols into dst. *--------------------------------------------------------------------------------*/ function importPath(modulePath, dst) { debug("importPath: "+modulePath); // Two possibilties: // 1. import the exact module and that's it. // // 2. module contains a "." and we need to import up to the // last ., and then import a name (or set of names) from it. // first try case 1: var ok = importSingleModule(modulePath, dst); if (ok) { return; } if (modulePath.indexOf(".") == -1) { throw new Error("Module does not exist: "+modulePath); } // now try case 2: var tempDst = newScope(); var moduleName = modulePath.split('.').slice(0, -1).join('.'); var importedName = modulePath.split('.').slice(-1)[0]; var lastName = modulePath.split('.').slice(-2, -1)[0]; ok = importSingleModule(moduleName, tempDst); if (!ok) { throw new Error("Neither module exists: "+moduleName+", "+modulePath); } if (!tempDst[lastName]) { internalError("import failed for "+moduleName+"|"+importedName+". This could be an appjet bug."); } if (importedName == "*") { copyPublicSymbols(tempDst[lastName], dst); } else if (importedName.match(/^\{.*\}$/)) { importedName.slice(1,-1).split(',').forEach(function(sym) { if (sym.match(/^.*=>.*$/)) { copySymbol(moduleName, sym.split("=>")[0], tempDst[lastName], dst, sym.split("=>")[1]); } else { copySymbol(moduleName, sym, tempDst[lastName], dst); } }); } else { copySymbol(moduleName, importedName, tempDst[lastName], dst); } } //---------------------------------------------------------------- // scheduling //---------------------------------------------------------------- var scheduledImports = []; function scheduleImportPath(p, dst) { scheduledImports.push([p, dst]); } function runScheduledImports() { scheduledImports.forEach(function(x) { importPath(x[0], x[1]); }); } //---------------------------------------------------------------- // The global import function //---------------------------------------------------------------- _appjethidden_.importsAllowed = true; globalScope['import'] = function(path1, path2, etc) { if (!_appjethidden_.importsAllowed) { throw Error("Imports are finished. No more imports are allowed."); } var dstScope = this; if (arguments.length < 1) { apiError("importModule() takes the name of at least one module as an argument."); } for (var i = 0; i < arguments.length; i++) { var path = arguments[i]; debug("scheduling import: "+path); scheduleImportPath(path, dstScope); // evaluate all modules in this path. var parts = path.split('.'); for (var j = 0; j < parts.length; j++) { var moduleName = parts.slice(0,j+1).join('.'); loadModule(moduleName); } } }; _appjethidden_.finishImports = function() { debug("Running scheduled imports..."); runScheduledImports(); _appjethidden_.importsAllowed = false; }; //---------------------------------------------------------------- // jimport //---------------------------------------------------------------- function _jimportSinglePackage(pname, dstScope) { //_appjethidden_._debugMessage("_jimportSinglePackage: "+pname); // TODO: support "*" and "{}" syntax like scala. var src = Packages; var srcParent = null; var localName = pname.split(".").pop(); var soFar = ''; pname.split(".").forEach(function(x) { soFar += x+'.'; if (!src[x]) { throw ('Could not find java package/class: '+soFar); } else { //_appjethidden_._debugMessage("descenting into "+src+"["+x+"]"); srcParent = src; src = src[x]; } }); if (String(src).indexOf('function') == 0) { // TODO: checking String(src).indexOf('function') is rather brittle. // is there a cleaner way? // TODO: this only works on static functions... so make sure // src[x] is a static function! dstScope[localName] = function() { return src.apply(srcParent, Array.prototype.slice.call(arguments)); }; } else { // importing a regular java class dstScope[localName] = src; } } /** * Import a java package over LiveConnect. */ globalScope['jimport'] = function() { var dstScope = this; for (var i = 0; i < arguments.length; i++) { var pname = arguments[i].split(".").pop(); _jimportSinglePackage(arguments[i], dstScope); } }; //---------------------------------------------------------------- // {appjet, request, response} imported by default //---------------------------------------------------------------- globalScope['import'].call(globalScope, "global.appjet.appjet", "global.request.request", "global.response.response"); })(this);
mozilla/pad
infrastructure/framework-src/preamble.js
JavaScript
apache-2.0
11,705
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.functions.co; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.functions.AbstractRichFunction; import org.apache.flink.streaming.api.TimeDomain; import org.apache.flink.streaming.api.TimerService; import org.apache.flink.util.Collector; import org.apache.flink.util.OutputTag; /** * A function that processes elements of two streams and produces a single output one. * * <p>The function will be called for every element in the input streams and can produce * zero or more output elements. Contrary to the {@link CoFlatMapFunction}, this function can also * query the time (both event and processing) and set timers, through the provided {@link Context}. * When reacting to the firing of set timers the function can emit yet more elements. * * <p>An example use-case for connected streams would be the application of a set of rules that change * over time ({@code stream A}) to the elements contained in another stream (stream {@code B}). The rules * contained in {@code stream A} can be stored in the state and wait for new elements to arrive on * {@code stream B}. Upon reception of a new element on {@code stream B}, the function can now apply the * previously stored rules to the element and directly emit a result, and/or register a timer that * will trigger an action in the future. * * @param <IN1> Type of the first input. * @param <IN2> Type of the second input. * @param <OUT> Output type. */ @PublicEvolving public abstract class CoProcessFunction<IN1, IN2, OUT> extends AbstractRichFunction { private static final long serialVersionUID = 1L; /** * This method is called for each element in the first of the connected streams. * * <p>This function can output zero or more elements using the {@link Collector} parameter * and also update internal state or set timers using the {@link Context} parameter. * * @param value The stream element * @param ctx A {@link Context} that allows querying the timestamp of the element, * querying the {@link TimeDomain} of the firing timer and getting a * {@link TimerService} for registering timers and querying the time. * The context is only valid during the invocation of this method, do not store it. * @param out The collector to emit resulting elements to * @throws Exception The function may throw exceptions which cause the streaming program * to fail and go into recovery. */ public abstract void processElement1(IN1 value, Context ctx, Collector<OUT> out) throws Exception; /** * This method is called for each element in the second of the connected streams. * * <p>This function can output zero or more elements using the {@link Collector} parameter * and also update internal state or set timers using the {@link Context} parameter. * * @param value The stream element * @param ctx A {@link Context} that allows querying the timestamp of the element, * querying the {@link TimeDomain} of the firing timer and getting a * {@link TimerService} for registering timers and querying the time. * The context is only valid during the invocation of this method, do not store it. * @param out The collector to emit resulting elements to * @throws Exception The function may throw exceptions which cause the streaming program * to fail and go into recovery. */ public abstract void processElement2(IN2 value, Context ctx, Collector<OUT> out) throws Exception; /** * Called when a timer set using {@link TimerService} fires. * * @param timestamp The timestamp of the firing timer. * @param ctx An {@link OnTimerContext} that allows querying the timestamp of the firing timer, * querying the {@link TimeDomain} of the firing timer and getting a * {@link TimerService} for registering timers and querying the time. * The context is only valid during the invocation of this method, do not store it. * @param out The collector for returning result values. * * @throws Exception This method may throw exceptions. Throwing an exception will cause the operation * to fail and may trigger recovery. */ public void onTimer(long timestamp, OnTimerContext ctx, Collector<OUT> out) throws Exception {} /** * Information available in an invocation of {@link #processElement1(Object, Context, Collector)}/ * {@link #processElement2(Object, Context, Collector)} * or {@link #onTimer(long, OnTimerContext, Collector)}. */ public abstract class Context { /** * Timestamp of the element currently being processed or timestamp of a firing timer. * * <p>This might be {@code null}, for example if the time characteristic of your program * is set to {@link org.apache.flink.streaming.api.TimeCharacteristic#ProcessingTime}. */ public abstract Long timestamp(); /** * A {@link TimerService} for querying time and registering timers. */ public abstract TimerService timerService(); /** * Emits a record to the side output identified by the {@link OutputTag}. * * @param outputTag the {@code OutputTag} that identifies the side output to emit to. * @param value The record to emit. */ public abstract <X> void output(OutputTag<X> outputTag, X value); } /** * Information available in an invocation of {@link #onTimer(long, OnTimerContext, Collector)}. */ public abstract class OnTimerContext extends Context { /** * The {@link TimeDomain} of the firing timer. */ public abstract TimeDomain timeDomain(); } }
xiaokuangkuang/kuangjingxiangmu
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/co/CoProcessFunction.java
Java
apache-2.0
6,459
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gson.metrics; import com.google.gson.Gson; import com.google.gson.JsonParseException; import com.google.gson.annotations.Expose; import com.google.gson.reflect.TypeToken; import junit.framework.TestCase; import java.io.StringWriter; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Tests to measure performance for Gson. All tests in this file will be disabled in code. To run * them remove disabled_ prefix from the tests and run them. * * @author Inderjeet Singh * @author Joel Leitch */ public class PerformanceTest extends TestCase { private static final int COLLECTION_SIZE = 5000; private static final int NUM_ITERATIONS = 100; private Gson gson; @Override protected void setUp() throws Exception { super.setUp(); gson = new Gson(); } public void testDummy() { // This is here to prevent Junit for complaining when we disable all tests. } public void disabled_testStringDeserialization() { StringBuilder sb = new StringBuilder(8096); sb.append("Error Yippie"); while (true) { try { String stackTrace = sb.toString(); sb.append(stackTrace); String json = "{\"message\":\"Error message.\"," + "\"stackTrace\":\"" + stackTrace + "\"}"; parseLongJson(json); System.out.println("Gson could handle a string of size: " + stackTrace.length()); } catch (JsonParseException expected) { break; } } } private void parseLongJson(String json) throws JsonParseException { ExceptionHolder target = gson.fromJson(json, ExceptionHolder.class); assertTrue(target.message.contains("Error")); assertTrue(target.stackTrace.contains("Yippie")); } private static class ExceptionHolder { public final String message; public final String stackTrace; // For use by Gson @SuppressWarnings("unused") private ExceptionHolder() { this("", ""); } public ExceptionHolder(String message, String stackTrace) { this.message = message; this.stackTrace = stackTrace; } } @SuppressWarnings("unused") private static class CollectionEntry { final String name; final String value; // For use by Gson private CollectionEntry() { this(null, null); } CollectionEntry(String name, String value) { this.name = name; this.value = value; } } /** * Created in response to http://code.google.com/p/google-gson/issues/detail?id=96 */ public void disabled_testLargeCollectionSerialization() { int count = 1400000; List<CollectionEntry> list = new ArrayList<CollectionEntry>(count); for (int i = 0; i < count; ++i) { list.add(new CollectionEntry("name"+i,"value"+i)); } gson.toJson(list); } /** * Created in response to http://code.google.com/p/google-gson/issues/detail?id=96 */ public void disabled_testLargeCollectionDeserialization() { StringBuilder sb = new StringBuilder(); int count = 87000; boolean first = true; sb.append('['); for (int i = 0; i < count; ++i) { if (first) { first = false; } else { sb.append(','); } sb.append("{name:'name").append(i).append("',value:'value").append(i).append("'}"); } sb.append(']'); String json = sb.toString(); Type collectionType = new TypeToken<ArrayList<CollectionEntry>>(){}.getType(); List<CollectionEntry> list = gson.fromJson(json, collectionType); assertEquals(count, list.size()); } /** * Created in response to http://code.google.com/p/google-gson/issues/detail?id=96 */ // Last I tested, Gson was able to serialize upto 14MB byte array public void disabled_testByteArraySerialization() { for (int size = 4145152; true; size += 1036288) { byte[] ba = new byte[size]; for (int i = 0; i < size; ++i) { ba[i] = 0x05; } gson.toJson(ba); System.out.printf("Gson could serialize a byte array of size: %d\n", size); } } /** * Created in response to http://code.google.com/p/google-gson/issues/detail?id=96 */ // Last I tested, Gson was able to deserialize a byte array of 11MB public void disable_testByteArrayDeserialization() { for (int numElements = 10639296; true; numElements += 16384) { StringBuilder sb = new StringBuilder(numElements*2); sb.append("["); boolean first = true; for (int i = 0; i < numElements; ++i) { if (first) { first = false; } else { sb.append(","); } sb.append("5"); } sb.append("]"); String json = sb.toString(); byte[] ba = gson.fromJson(json, byte[].class); System.out.printf("Gson could deserialize a byte array of size: %d\n", ba.length); } } // The tests to measure serialization and deserialization performance of Gson // Based on the discussion at // http://groups.google.com/group/google-gson/browse_thread/thread/7a50b17a390dfaeb // Test results: 10/19/2009 // Serialize classes avg time: 60 ms // Deserialized classes avg time: 70 ms // Serialize exposed classes avg time: 159 ms // Deserialized exposed classes avg time: 173 ms public void disabled_testSerializeClasses() { ClassWithList c = new ClassWithList("str"); for (int i = 0; i < COLLECTION_SIZE; ++i) { c.list.add(new ClassWithField("element-" + i)); } StringWriter w = new StringWriter(); long t1 = System.currentTimeMillis(); for (int i = 0; i < NUM_ITERATIONS; ++i) { gson.toJson(c, w); } long t2 = System.currentTimeMillis(); long avg = (t2 - t1) / NUM_ITERATIONS; System.out.printf("Serialize classes avg time: %d ms\n", avg); } public void disabled_testDeserializeClasses() { String json = buildJsonForClassWithList(); ClassWithList[] target = new ClassWithList[NUM_ITERATIONS]; long t1 = System.currentTimeMillis(); for (int i = 0; i < NUM_ITERATIONS; ++i) { target[i] = gson.fromJson(json, ClassWithList.class); } long t2 = System.currentTimeMillis(); long avg = (t2 - t1) / NUM_ITERATIONS; System.out.printf("Deserialize classes avg time: %d ms\n", avg); } public void disable_testLargeObjectSerializationAndDeserialization() { Map<String, Long> largeObject = new HashMap<String, Long>(); for (long l = 0; l < 100000; l++) { largeObject.put("field" + l, l); } long t1 = System.currentTimeMillis(); String json = gson.toJson(largeObject); long t2 = System.currentTimeMillis(); System.out.printf("Large object serialized in: %d ms\n", (t2 - t1)); t1 = System.currentTimeMillis(); gson.fromJson(json, new TypeToken<Map<String, Long>>() {}.getType()); t2 = System.currentTimeMillis(); System.out.printf("Large object deserialized in: %d ms\n", (t2 - t1)); } public void disabled_testSerializeExposedClasses() { ClassWithListOfObjects c1 = new ClassWithListOfObjects("str"); for (int i1 = 0; i1 < COLLECTION_SIZE; ++i1) { c1.list.add(new ClassWithExposedField("element-" + i1)); } ClassWithListOfObjects c = c1; StringWriter w = new StringWriter(); long t1 = System.currentTimeMillis(); for (int i = 0; i < NUM_ITERATIONS; ++i) { gson.toJson(c, w); } long t2 = System.currentTimeMillis(); long avg = (t2 - t1) / NUM_ITERATIONS; System.out.printf("Serialize exposed classes avg time: %d ms\n", avg); } public void disabled_testDeserializeExposedClasses() { String json = buildJsonForClassWithList(); ClassWithListOfObjects[] target = new ClassWithListOfObjects[NUM_ITERATIONS]; long t1 = System.currentTimeMillis(); for (int i = 0; i < NUM_ITERATIONS; ++i) { target[i] = gson.fromJson(json, ClassWithListOfObjects.class); } long t2 = System.currentTimeMillis(); long avg = (t2 - t1) / NUM_ITERATIONS; System.out.printf("Deserialize exposed classes avg time: %d ms\n", avg); } public void disabled_testLargeGsonMapRoundTrip() throws Exception { Map<Long, Long> original = new HashMap<Long, Long>(); for (long i = 0; i < 1000000; i++) { original.put(i, i + 1); } Gson gson = new Gson(); String json = gson.toJson(original); Type longToLong = new TypeToken<Map<Long, Long>>(){}.getType(); gson.fromJson(json, longToLong); } private String buildJsonForClassWithList() { StringBuilder sb = new StringBuilder("{"); sb.append("field:").append("'str',"); sb.append("list:["); boolean first = true; for (int i = 0; i < COLLECTION_SIZE; ++i) { if (first) { first = false; } else { sb.append(","); } sb.append("{field:'element-" + i + "'}"); } sb.append("]"); sb.append("}"); String json = sb.toString(); return json; } @SuppressWarnings("unused") private static final class ClassWithList { final String field; final List<ClassWithField> list = new ArrayList<ClassWithField>(COLLECTION_SIZE); ClassWithList() { this(null); } ClassWithList(String field) { this.field = field; } } @SuppressWarnings("unused") private static final class ClassWithField { final String field; ClassWithField() { this(""); } public ClassWithField(String field) { this.field = field; } } @SuppressWarnings("unused") private static final class ClassWithListOfObjects { @Expose final String field; @Expose final List<ClassWithExposedField> list = new ArrayList<ClassWithExposedField>(COLLECTION_SIZE); ClassWithListOfObjects() { this(null); } ClassWithListOfObjects(String field) { this.field = field; } } @SuppressWarnings("unused") private static final class ClassWithExposedField { @Expose final String field; ClassWithExposedField() { this(""); } ClassWithExposedField(String field) { this.field = field; } } }
testcenter/gson
gson/src/test/java/com/google/gson/metrics/PerformanceTest.java
Java
apache-2.0
10,750
// (C) Copyright 2015 Martin Dougiamas // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. angular.module('mm.addons.mod_assign') /** * Mod assign handlers. * * @module mm.addons.mod_assign * @ngdoc service * @name $mmaModAssignHandlers */ .factory('$mmaModAssignHandlers', function($mmCourse, $mmaModAssign, $state, $q, $mmContentLinksHelper, $mmUtil, mmCoreDownloading, mmCoreNotDownloaded, mmCoreOutdated, $mmEvents, mmCoreEventPackageStatusChanged, $mmSite, mmaModAssignComponent, $mmCoursePrefetchDelegate, $mmaModAssignPrefetchHandler, $mmaModAssignSync) { var self = {}; /** * Course content handler. * * @module mm.addons.mod_assign * @ngdoc method * @name $mmaModAssignHandlers#courseContent */ self.courseContent = function() { var self = {}; /** * Whether or not the handler is enabled for the site. * * @return {Promise} */ self.isEnabled = function() { return $mmaModAssign.isPluginEnabled(); }; /** * Get the controller. * * @param {Object} module The module info. * @param {Number} courseId The course ID. * @return {Function} */ self.getController = function(module, courseId) { return function($scope) { var downloadBtn = { hidden: true, icon: 'ion-ios-cloud-download-outline', label: 'mm.core.download', action: function(e) { if (e) { e.preventDefault(); e.stopPropagation(); } download(); } }, refreshBtn = { hidden: true, icon: 'ion-android-refresh', label: 'mm.core.refresh', action: function(e) { if (e) { e.preventDefault(); e.stopPropagation(); } $mmaModAssign.invalidateContent(module.id, courseId).finally(function() { download(); }); } }; $scope.title = module.name; $scope.icon = $mmCourse.getModuleIconSrc('assign'); $scope.class = 'mma-mod_assign-handler'; $scope.buttons = $mmaModAssign.isPrefetchEnabled() ? [downloadBtn, refreshBtn] : []; $scope.spinner = true; // Show spinner while calculating status. $scope.action = function(e) { if (e) { e.preventDefault(); e.stopPropagation(); } $state.go('site.mod_assign', {module: module, courseid: courseId}); }; function download() { $scope.spinner = true; // Show spinner since this operation might take a while. // We need to call getDownloadSize, the package might have been updated. $mmaModAssignPrefetchHandler.getDownloadSize(module, courseId).then(function(size) { $mmUtil.confirmDownloadSize(size).then(function() { $mmaModAssignPrefetchHandler.prefetch(module, courseId).catch(function() { if (!$scope.$$destroyed) { $mmUtil.showErrorModal('mm.core.errordownloading', true); } }); }).catch(function() { // User hasn't confirmed, stop spinner. $scope.spinner = false; }); }).catch(function(error) { $scope.spinner = false; if (error) { $mmUtil.showErrorModal(error); } else { $mmUtil.showErrorModal('mm.core.errordownloading', true); } }); } // Show buttons according to module status. function showStatus(status) { if (status) { $scope.spinner = status === mmCoreDownloading; downloadBtn.hidden = status !== mmCoreNotDownloaded; refreshBtn.hidden = status !== mmCoreOutdated; } } // Listen for changes on this module status. var statusObserver = $mmEvents.on(mmCoreEventPackageStatusChanged, function(data) { if (data.siteid === $mmSite.getId() && data.componentId === module.id && data.component === mmaModAssignComponent) { showStatus(data.status); } }); // Get current status to decide which icon should be shown. $mmCoursePrefetchDelegate.getModuleStatus(module, courseId).then(showStatus); $scope.$on('$destroy', function() { statusObserver && statusObserver.off && statusObserver.off(); }); }; }; return self; }; /** * Content links handler. * * @module mm.addons.mod_assign * @ngdoc method * @name $mmaModAssignHandlers#linksHandler */ self.linksHandler = function() { var self = {}; /** * Whether or not the handler is enabled for a certain site. * * @param {String} siteId Site ID. * @param {Number} [courseId] Course ID related to the URL. * @return {Promise} Promise resolved with true if enabled. */ function isEnabled(siteId, courseId) { return $mmaModAssign.isPluginEnabled(siteId).then(function(enabled) { if (!enabled) { return false; } return courseId || $mmCourse.canGetModuleWithoutCourseId(siteId); }); } /** * Get actions to perform with the link. * * @param {String[]} siteIds Site IDs the URL belongs to. * @param {String} url URL to treat. * @param {Number} [courseId] Course ID related to the URL. * @return {Promise} Promise resolved with the list of actions. * See {@link $mmContentLinksDelegate#registerLinkHandler}. */ self.getActions = function(siteIds, url, courseId) { // Check it's an assign URL. if (typeof self.handles(url) != 'undefined') { return $mmContentLinksHelper.treatModuleIndexUrl(siteIds, url, isEnabled, courseId); } return $q.when([]); }; /** * Check if the URL is handled by this handler. If so, returns the URL of the site. * * @param {String} url URL to check. * @return {String} Site URL. Undefined if the URL doesn't belong to this handler. */ self.handles = function(url) { var position = url.indexOf('/mod/assign/view.php'); if (position > -1) { return url.substr(0, position); } }; return self; }; /** * Synchronization handler. * * @module mm.addons.mod_assign * @ngdoc method * @name $mmaModAssignHandlers#syncHandler */ self.syncHandler = function() { var self = {}; /** * Execute the process. * Receives the ID of the site affected, undefined for all sites. * * @param {String} [siteId] ID of the site affected, undefined for all sites. * @return {Promise} Promise resolved when done, rejected if failure. */ self.execute = function(siteId) { return $mmaModAssignSync.syncAllAssignments(siteId); }; /** * Get the time between consecutive executions. * * @return {Number} Time between consecutive executions (in ms). */ self.getInterval = function() { return 600000; // 10 minutes. }; /** * Whether it's a synchronization process or not. * * @return {Boolean} True if is a sync process, false otherwise. */ self.isSync = function() { return true; }; /** * Whether the process uses network or not. * * @return {Boolean} True if uses network, false otherwise. */ self.usesNetwork = function() { return true; }; return self; }; return self; });
luky9601/SPISPGapBuild
www/addons/mod/assign/services/handlers.js
JavaScript
apache-2.0
9,678
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.ComponentModel.Composition; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Internal.Log; using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem; using Microsoft.VisualStudio.LanguageServices.Implementation.TaskList; using Roslyn.Utilities; namespace Microsoft.VisualStudio.LanguageServices.Implementation { [Export(typeof(AnalyzerDependencyCheckingService))] internal sealed class AnalyzerDependencyCheckingService { private static readonly object s_dependencyConflictErrorId = new object(); private static readonly IIgnorableAssemblyList s_systemPrefixList = new IgnorableAssemblyNamePrefixList("System"); private readonly VisualStudioWorkspaceImpl _workspace; private readonly HostDiagnosticUpdateSource _updateSource; private readonly BindingRedirectionService _bindingRedirectionService; private CancellationTokenSource _cancellationTokenSource = new CancellationTokenSource(); private Task<AnalyzerDependencyResults> _task = Task.FromResult(AnalyzerDependencyResults.Empty); private ImmutableHashSet<string> _analyzerPaths = ImmutableHashSet.Create<string>(StringComparer.OrdinalIgnoreCase); [ImportingConstructor] public AnalyzerDependencyCheckingService( VisualStudioWorkspaceImpl workspace, HostDiagnosticUpdateSource updateSource) { _workspace = workspace; _updateSource = updateSource; _bindingRedirectionService = new BindingRedirectionService(); } public async void CheckForConflictsAsync() { AnalyzerDependencyResults results = null; try { results = await GetConflictsAsync().ConfigureAwait(continueOnCapturedContext: true); } catch { return; } if (results == null) { return; } var builder = ImmutableArray.CreateBuilder<DiagnosticData>(); var conflicts = results.Conflicts; var missingDependencies = results.MissingDependencies; foreach (var project in _workspace.ProjectTracker.Projects) { builder.Clear(); foreach (var conflict in conflicts) { if (project.CurrentProjectAnalyzersContains(conflict.AnalyzerFilePath1) || project.CurrentProjectAnalyzersContains(conflict.AnalyzerFilePath2)) { builder.Add(CreateDiagnostic(project.Id, conflict)); } } foreach (var missingDependency in missingDependencies) { if (project.CurrentProjectAnalyzersContains(missingDependency.AnalyzerPath)) { builder.Add(CreateDiagnostic(project.Id, missingDependency)); } } _updateSource.UpdateDiagnosticsForProject(project.Id, s_dependencyConflictErrorId, builder.ToImmutable()); } foreach (var conflict in conflicts) { LogConflict(conflict); } foreach (var missingDependency in missingDependencies) { LogMissingDependency(missingDependency); } } private void LogConflict(AnalyzerDependencyConflict conflict) { Logger.Log( FunctionId.AnalyzerDependencyCheckingService_LogConflict, KeyValueLogMessage.Create(m => { m["Identity"] = conflict.Identity.ToString(); m["Analyzer1"] = conflict.AnalyzerFilePath1; m["Analyzer2"] = conflict.AnalyzerFilePath2; })); } private void LogMissingDependency(MissingAnalyzerDependency missingDependency) { Logger.Log( FunctionId.AnalyzerDependencyCheckingService_LogMissingDependency, KeyValueLogMessage.Create(m => { m["Analyzer"] = missingDependency.AnalyzerPath; m["Identity"] = missingDependency.DependencyIdentity; })); } private DiagnosticData CreateDiagnostic(ProjectId projectId, AnalyzerDependencyConflict conflict) { string id = ServicesVSResources.WRN_AnalyzerDependencyConflictId; string category = ServicesVSResources.ErrorCategory; string message = string.Format( ServicesVSResources.WRN_AnalyzerDependencyConflictMessage, conflict.AnalyzerFilePath1, conflict.AnalyzerFilePath2, conflict.Identity.ToString()); DiagnosticData data = new DiagnosticData( id, category, message, ServicesVSResources.WRN_AnalyzerDependencyConflictMessage, severity: DiagnosticSeverity.Warning, defaultSeverity: DiagnosticSeverity.Warning, isEnabledByDefault: true, warningLevel: 0, customTags: ImmutableArray<string>.Empty, properties: ImmutableDictionary<string, string>.Empty, workspace: _workspace, projectId: projectId); return data; } private DiagnosticData CreateDiagnostic(ProjectId projectId, MissingAnalyzerDependency missingDependency) { string id = ServicesVSResources.WRN_MissingAnalyzerReferenceId; string category = ServicesVSResources.ErrorCategory; string message = string.Format( ServicesVSResources.WRN_MissingAnalyzerReferenceMessage, missingDependency.AnalyzerPath, missingDependency.DependencyIdentity.ToString()); DiagnosticData data = new DiagnosticData( id, category, message, ServicesVSResources.WRN_MissingAnalyzerReferenceMessage, severity: DiagnosticSeverity.Warning, defaultSeverity: DiagnosticSeverity.Warning, isEnabledByDefault: true, warningLevel: 0, customTags: ImmutableArray<string>.Empty, properties: ImmutableDictionary<string, string>.Empty, workspace: _workspace, projectId: projectId); return data; } private Task<AnalyzerDependencyResults> GetConflictsAsync() { ImmutableHashSet<string> currentAnalyzerPaths = _workspace.CurrentSolution .Projects .SelectMany(p => p.AnalyzerReferences) .OfType<AnalyzerFileReference>() .Select(a => a.FullPath) .ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); if (currentAnalyzerPaths.SetEquals(_analyzerPaths)) { return _task; } _cancellationTokenSource.Cancel(); _cancellationTokenSource = new CancellationTokenSource(); _analyzerPaths = currentAnalyzerPaths; _task = _task.SafeContinueWith(_ => { IEnumerable<AssemblyIdentity> loadedAssemblies = AppDomain.CurrentDomain.GetAssemblies().Select(assembly => AssemblyIdentity.FromAssemblyDefinition(assembly)); IgnorableAssemblyIdentityList loadedAssembliesList = new IgnorableAssemblyIdentityList(loadedAssemblies); IIgnorableAssemblyList[] ignorableAssemblyLists = new[] { s_systemPrefixList, loadedAssembliesList }; return new AnalyzerDependencyChecker(currentAnalyzerPaths, ignorableAssemblyLists, _bindingRedirectionService).Run(_cancellationTokenSource.Token); }, TaskScheduler.Default); return _task; } private class BindingRedirectionService : IBindingRedirectionService { public AssemblyIdentity ApplyBindingRedirects(AssemblyIdentity originalIdentity) { string redirectedAssemblyName = AppDomain.CurrentDomain.ApplyPolicy(originalIdentity.ToString()); AssemblyIdentity redirectedAssemblyIdentity; if (AssemblyIdentity.TryParseDisplayName(redirectedAssemblyName, out redirectedAssemblyIdentity)) { return redirectedAssemblyIdentity; } return originalIdentity; } } } }
kuhlenh/roslyn
src/VisualStudio/Core/Def/Implementation/AnalyzerDependencyCheckingService.cs
C#
apache-2.0
9,091
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.thrift; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.thrift.generated.Hbase; /** * Converts a Hbase.Iface using InvocationHandler so that it reports process * time of each call to ThriftMetrics. */ public class HbaseHandlerMetricsProxy implements InvocationHandler { public static final Log LOG = LogFactory.getLog( HbaseHandlerMetricsProxy.class); private final Hbase.Iface handler; private final ThriftMetrics metrics; public static Hbase.Iface newInstance(Hbase.Iface handler, ThriftMetrics metrics, Configuration conf) { return (Hbase.Iface) Proxy.newProxyInstance( handler.getClass().getClassLoader(), new Class[]{Hbase.Iface.class}, new HbaseHandlerMetricsProxy(handler, metrics, conf)); } private HbaseHandlerMetricsProxy( Hbase.Iface handler, ThriftMetrics metrics, Configuration conf) { this.handler = handler; this.metrics = metrics; } @Override public Object invoke(Object proxy, Method m, Object[] args) throws Throwable { Object result; try { long start = now(); result = m.invoke(handler, args); int processTime = (int)(now() - start); metrics.incMethodTime(m.getName(), processTime); } catch (InvocationTargetException e) { throw e.getTargetException(); } catch (Exception e) { throw new RuntimeException( "unexpected invocation exception: " + e.getMessage()); } return result; } private static long now() { return System.nanoTime(); } }
xiaofu/apache-hbase-0.94.10-read
src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java
Java
apache-2.0
2,717
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2011 The Zed Attack Proxy Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.clientapi.ant; import org.apache.tools.ant.BuildException; public class NewSessionTask extends ZapTask { private String name; private String apikey; @Override public void execute() throws BuildException { try { this.getClientApi().core.newSession(apikey, name, "true"); } catch (Exception e) { throw new BuildException(e); } } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getApikey() { return apikey; } public void setApikey(String apikey) { this.apikey = apikey; } }
efdutra/zaproxy
src/org/zaproxy/clientapi/ant/NewSessionTask.java
Java
apache-2.0
1,392
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.test; import java.io.File; import java.util.List; import java.util.LinkedList; import org.apache.log4j.Logger; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.PortAssignment; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZKTestCase; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.ZooDefs.Ids; import org.apache.zookeeper.server.quorum.Leader.Proposal; import org.apache.zookeeper.server.ServerCnxnFactory; import org.apache.zookeeper.server.SyncRequestProcessor; import org.apache.zookeeper.server.ZooKeeperServer; import org.apache.zookeeper.server.persistence.FileTxnSnapLog; import org.junit.Assert; import org.junit.Test; /** After a replica starts, it should load commits in its committedLog list. * This test checks if committedLog != 0 after replica restarted. */ public class RestoreCommittedLogTest extends ZKTestCase implements Watcher { private static final Logger LOG = Logger.getLogger(RestoreCommittedLogTest.class); private static String HOSTPORT = "127.0.0.1:" + PortAssignment.unique(); private static final int CONNECTION_TIMEOUT = 3000; /** * test the purge * @throws Exception an exception might be thrown here */ @Test public void testRestoreCommittedLog() throws Exception { File tmpDir = ClientBase.createTmpDir(); ClientBase.setupTestEnv(); ZooKeeperServer zks = new ZooKeeperServer(tmpDir, tmpDir, 3000); SyncRequestProcessor.setSnapCount(100); final int PORT = Integer.parseInt(HOSTPORT.split(":")[1]); ServerCnxnFactory f = ServerCnxnFactory.createFactory(PORT, -1); f.startup(zks); Assert.assertTrue("waiting for server being up ", ClientBase.waitForServerUp(HOSTPORT,CONNECTION_TIMEOUT)); ZooKeeper zk = new ZooKeeper(HOSTPORT, CONNECTION_TIMEOUT, this); try { for (int i = 0; i< 2000; i++) { zk.create("/invalidsnap-" + i, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } } finally { zk.close(); } f.shutdown(); zks.shutdown(); Assert.assertTrue("waiting for server to shutdown", ClientBase.waitForServerDown(HOSTPORT, CONNECTION_TIMEOUT)); // start server again zks = new ZooKeeperServer(tmpDir, tmpDir, 3000); zks.startdata(); LinkedList<Proposal> committedLog = zks.getZKDatabase().getCommittedLog(); int logsize = committedLog.size(); LOG.info("committedLog size = " + logsize); Assert.assertTrue("log size != 0", (logsize != 0)); zks.shutdown(); } public void process(WatchedEvent event) { // do nothing } }
shayhatsor/zookeeper
src/java/test/org/apache/zookeeper/test/RestoreCommittedLogTest.java
Java
apache-2.0
3,654
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.file; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildRuleType; import com.facebook.buck.rules.Description; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.TargetGraph; import com.facebook.infer.annotation.SuppressFieldNotInitialized; import com.google.common.base.Optional; import com.google.common.hash.HashCode; import java.net.URI; public class RemoteFileDescription implements Description<RemoteFileDescription.Arg> { public static final BuildRuleType TYPE = BuildRuleType.of("remote_file"); private final Downloader downloader; public RemoteFileDescription(Downloader downloader) { this.downloader = downloader; } @Override public BuildRuleType getBuildRuleType() { return TYPE; } @Override public Arg createUnpopulatedConstructorArg() { return new Arg(); } @Override public <A extends Arg> BuildRule createBuildRule( TargetGraph targetGraph, BuildRuleParams params, BuildRuleResolver resolver, A args) { HashCode sha1 = HashCode.fromString(args.sha1); String out = args.out.or(params.getBuildTarget().getShortNameAndFlavorPostfix()); return new RemoteFile( params, new SourcePathResolver(resolver), downloader, args.url, sha1, out); } @SuppressFieldNotInitialized public class Arg { public URI url; public String sha1; public Optional<String> out; } }
Learn-Android-app/buck
src/com/facebook/buck/file/RemoteFileDescription.java
Java
apache-2.0
2,205
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package rest import ( "testing" "k8s.io/apimachinery/pkg/api/errors" genericapirequest "k8s.io/apiserver/pkg/endpoints/request" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/genericapiserver/registry/generic" genericregistry "k8s.io/kubernetes/pkg/genericapiserver/registry/generic/registry" "k8s.io/kubernetes/pkg/registry/registrytest" ) func TestPodLogValidates(t *testing.T) { config, server := registrytest.NewEtcdStorage(t, "") defer server.Terminate(t) s, destroyFunc := generic.NewRawStorage(config) defer destroyFunc() store := &genericregistry.Store{ Storage: s, } logRest := &LogREST{Store: store, KubeletConn: nil} negativeOne := int64(-1) testCases := []*api.PodLogOptions{ {SinceSeconds: &negativeOne}, {TailLines: &negativeOne}, } for _, tc := range testCases { _, err := logRest.Get(genericapirequest.NewDefaultContext(), "test", tc) if !errors.IsInvalid(err) { t.Fatalf("unexpected error: %v", err) } } }
rkouj/kubernetes
pkg/registry/core/pod/rest/log_test.go
GO
apache-2.0
1,537
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1 import ( "k8s.io/apimachinery/pkg/labels" "k8s.io/client-go/pkg/api/v1" ) // NodeConditionPredicate is a function that indicates whether the given node's conditions meet // some set of criteria defined by the function. type NodeConditionPredicate func(node *v1.Node) bool // NodeListerExpansion allows custom methods to be added to // NodeLister. type NodeListerExpansion interface { ListWithPredicate(predicate NodeConditionPredicate) ([]*v1.Node, error) } func (l *nodeLister) ListWithPredicate(predicate NodeConditionPredicate) ([]*v1.Node, error) { nodes, err := l.List(labels.Everything()) if err != nil { return nil, err } var filtered []*v1.Node for i := range nodes { if predicate(nodes[i]) { filtered = append(filtered, nodes[i]) } } return filtered, nil }
jjo/kubeless
vendor/k8s.io/client-go/listers/core/v1/node_expansion.go
GO
apache-2.0
1,373
var Stack = require('./_Stack'), equalArrays = require('./_equalArrays'), equalByTag = require('./_equalByTag'), equalObjects = require('./_equalObjects'), getTag = require('./_getTag'), isArray = require('./isArray'), isBuffer = require('./isBuffer'), isTypedArray = require('./isTypedArray'); /** Used to compose bitmasks for comparison styles. */ var PARTIAL_COMPARE_FLAG = 2; /** `Object#toString` result references. */ var argsTag = '[object Arguments]', arrayTag = '[object Array]', objectTag = '[object Object]'; /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * A specialized version of `baseIsEqual` for arrays and objects which performs * deep comparisons and tracks traversed objects enabling objects with circular * references to be compared. * * @private * @param {Object} object The object to compare. * @param {Object} other The other object to compare. * @param {Function} equalFunc The function to determine equivalents of values. * @param {Function} [customizer] The function to customize comparisons. * @param {number} [bitmask] The bitmask of comparison flags. See `baseIsEqual` * for more details. * @param {Object} [stack] Tracks traversed `object` and `other` objects. * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. */ function baseIsEqualDeep(object, other, equalFunc, customizer, bitmask, stack) { var objIsArr = isArray(object), othIsArr = isArray(other), objTag = arrayTag, othTag = arrayTag; if (!objIsArr) { objTag = getTag(object); objTag = objTag == argsTag ? objectTag : objTag; } if (!othIsArr) { othTag = getTag(other); othTag = othTag == argsTag ? objectTag : othTag; } var objIsObj = objTag == objectTag, othIsObj = othTag == objectTag, isSameTag = objTag == othTag; if (isSameTag && isBuffer(object)) { if (!isBuffer(other)) { return false; } objIsArr = true; objIsObj = false; } if (isSameTag && !objIsObj) { stack || (stack = new Stack); return (objIsArr || isTypedArray(object)) ? equalArrays(object, other, equalFunc, customizer, bitmask, stack) : equalByTag(object, other, objTag, equalFunc, customizer, bitmask, stack); } if (!(bitmask & PARTIAL_COMPARE_FLAG)) { var objIsWrapped = objIsObj && hasOwnProperty.call(object, '__wrapped__'), othIsWrapped = othIsObj && hasOwnProperty.call(other, '__wrapped__'); if (objIsWrapped || othIsWrapped) { var objUnwrapped = objIsWrapped ? object.value() : object, othUnwrapped = othIsWrapped ? other.value() : other; stack || (stack = new Stack); return equalFunc(objUnwrapped, othUnwrapped, customizer, bitmask, stack); } } if (!isSameTag) { return false; } stack || (stack = new Stack); return equalObjects(object, other, equalFunc, customizer, bitmask, stack); } module.exports = baseIsEqualDeep;
Chen-Hailin/iTCM.github.io
node_modules/babel-preset-es2015/node_modules/babel-plugin-transform-regenerator/node_modules/babel-types/node_modules/lodash/_baseIsEqualDeep.js
JavaScript
apache-2.0
3,083
/* * LogicPD i.MX31 SOM-LV development board support * * Copyright (c) 2009 Daniel Mack <daniel@caiaq.de> * * based on code for other MX31 boards, * * Copyright 2005-2007 Freescale Semiconductor * Copyright (c) 2009 Alberto Panizzo <maramaopercheseimorto@gmail.com> * Copyright (C) 2009 Valentin Longchamp, EPFL Mobots group * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ #include <linux/kernel.h> #include <linux/types.h> #include <linux/init.h> #include <linux/gpio.h> #include <linux/leds.h> #include <linux/platform_device.h> #include <asm/mach-types.h> #include <asm/mach/arch.h> #include <asm/mach/map.h> #include "board-mx31lite.h" #include "common.h" #include "devices-imx31.h" #include "hardware.h" #include "iomux-mx3.h" /* * This file contains board-specific initialization routines for the * LogicPD i.MX31 SOM-LV development board, aka 'LiteKit'. * If you design an own baseboard for the module, use this file as base * for support code. */ static unsigned int litekit_db_board_pins[] __initdata = { /* UART1 */ MX31_PIN_CTS1__CTS1, MX31_PIN_RTS1__RTS1, MX31_PIN_TXD1__TXD1, MX31_PIN_RXD1__RXD1, /* SPI 0 */ MX31_PIN_CSPI1_SCLK__SCLK, MX31_PIN_CSPI1_MOSI__MOSI, MX31_PIN_CSPI1_MISO__MISO, MX31_PIN_CSPI1_SPI_RDY__SPI_RDY, MX31_PIN_CSPI1_SS0__SS0, MX31_PIN_CSPI1_SS1__SS1, MX31_PIN_CSPI1_SS2__SS2, /* SDHC1 */ MX31_PIN_SD1_DATA0__SD1_DATA0, MX31_PIN_SD1_DATA1__SD1_DATA1, MX31_PIN_SD1_DATA2__SD1_DATA2, MX31_PIN_SD1_DATA3__SD1_DATA3, MX31_PIN_SD1_CLK__SD1_CLK, MX31_PIN_SD1_CMD__SD1_CMD, }; /* UART */ static const struct imxuart_platform_data uart_pdata __initconst = { .flags = IMXUART_HAVE_RTSCTS, }; /* MMC */ static int gpio_det, gpio_wp; #define MMC_PAD_CFG (PAD_CTL_DRV_MAX | PAD_CTL_SRE_FAST | PAD_CTL_HYS_CMOS | \ PAD_CTL_ODE_CMOS) static int mxc_mmc1_get_ro(struct device *dev) { return gpio_get_value(IOMUX_TO_GPIO(MX31_PIN_GPIO1_6)); } static int mxc_mmc1_init(struct device *dev, irq_handler_t detect_irq, void *data) { int ret; gpio_det = IOMUX_TO_GPIO(MX31_PIN_DCD_DCE1); gpio_wp = IOMUX_TO_GPIO(MX31_PIN_GPIO1_6); mxc_iomux_set_pad(MX31_PIN_SD1_DATA0, MMC_PAD_CFG | PAD_CTL_PUE_PUD | PAD_CTL_100K_PU); mxc_iomux_set_pad(MX31_PIN_SD1_DATA1, MMC_PAD_CFG | PAD_CTL_PUE_PUD | PAD_CTL_100K_PU); mxc_iomux_set_pad(MX31_PIN_SD1_DATA2, MMC_PAD_CFG | PAD_CTL_PUE_PUD | PAD_CTL_100K_PU); mxc_iomux_set_pad(MX31_PIN_SD1_DATA3, MMC_PAD_CFG | PAD_CTL_PUE_PUD | PAD_CTL_100K_PU); mxc_iomux_set_pad(MX31_PIN_SD1_CMD, MMC_PAD_CFG | PAD_CTL_PUE_PUD | PAD_CTL_100K_PU); mxc_iomux_set_pad(MX31_PIN_SD1_CLK, MMC_PAD_CFG); ret = gpio_request(gpio_det, "MMC detect"); if (ret) return ret; ret = gpio_request(gpio_wp, "MMC w/p"); if (ret) goto exit_free_det; gpio_direction_input(gpio_det); gpio_direction_input(gpio_wp); ret = request_irq(gpio_to_irq(IOMUX_TO_GPIO(MX31_PIN_DCD_DCE1)), detect_irq, IRQF_TRIGGER_RISING | IRQF_TRIGGER_FALLING, "MMC detect", data); if (ret) goto exit_free_wp; return 0; exit_free_wp: gpio_free(gpio_wp); exit_free_det: gpio_free(gpio_det); return ret; } static void mxc_mmc1_exit(struct device *dev, void *data) { gpio_free(gpio_det); gpio_free(gpio_wp); free_irq(gpio_to_irq(IOMUX_TO_GPIO(MX31_PIN_DCD_DCE1)), data); } static const struct imxmmc_platform_data mmc_pdata __initconst = { .get_ro = mxc_mmc1_get_ro, .init = mxc_mmc1_init, .exit = mxc_mmc1_exit, }; /* SPI */ static int spi_internal_chipselect[] = { MXC_SPI_CS(0), MXC_SPI_CS(1), MXC_SPI_CS(2), }; static const struct spi_imx_master spi0_pdata __initconst = { .chipselect = spi_internal_chipselect, .num_chipselect = ARRAY_SIZE(spi_internal_chipselect), }; /* GPIO LEDs */ static const struct gpio_led litekit_leds[] __initconst = { { .name = "GPIO0", .gpio = IOMUX_TO_GPIO(MX31_PIN_COMPARE), .active_low = 1, .default_state = LEDS_GPIO_DEFSTATE_OFF, }, { .name = "GPIO1", .gpio = IOMUX_TO_GPIO(MX31_PIN_CAPTURE), .active_low = 1, .default_state = LEDS_GPIO_DEFSTATE_OFF, } }; static const struct gpio_led_platform_data litekit_led_platform_data __initconst = { .leds = litekit_leds, .num_leds = ARRAY_SIZE(litekit_leds), }; void __init mx31lite_db_init(void) { mxc_iomux_setup_multiple_pins(litekit_db_board_pins, ARRAY_SIZE(litekit_db_board_pins), "development board pins"); imx31_add_imx_uart0(&uart_pdata); imx31_add_mxc_mmc(0, &mmc_pdata); imx31_add_spi_imx0(&spi0_pdata); gpio_led_register_device(-1, &litekit_led_platform_data); imx31_add_imx2_wdt(); imx31_add_mxc_rtc(); }
OmniEvo/omnievo_kernel_moto_shamu
arch/arm/mach-imx/mx31lite-db.c
C
apache-2.0
5,126
#!/bin/bash # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. function preamble { encoding="$1" cat <<PREAMBLE # *************************************************************************** # * # * Generated from index-$encoding.txt ( # * https://encoding.spec.whatwg.org/index-${encoding}.txt ) # * following the algorithm for the single byte legacy encoding # * described at http://encoding.spec.whatwg.org/#single-byte-decoder # * # *************************************************************************** <code_set_name> "${encoding}-html" <char_name_mask> "AXXXX" <mb_cur_max> 1 <mb_cur_min> 1 <uconv_class> "SBCS" <subchar> \x3F <icu:charsetFamily> "ASCII" CHARMAP PREAMBLE } # The list of html5 encodings. Note that iso-8859-8-i is not listed here # because its mapping table is exactly the same as iso-8859-8. The difference # is BiDi handling (logical vs visual). encodings="ibm866 iso-8859-2 iso-8859-3 iso-8859-4 iso-8859-5 iso-8859-6\ iso-8859-7 iso-8859-8 iso-8859-10 iso-8859-13 iso-8859-14\ iso-8859-15 iso-8859-16 koi8-r koi8-u macintosh\ windows-874 windows-1250 windows-1251 windows-1252 windows-1253\ windows-1254 windows-1255 windows-1256 windows-1257 windows-1258\ x-mac-cyrillic" ENCODING_DIR="$(dirname $0)/../source/data/mappings" for e in ${encodings} do output="${ENCODING_DIR}/${e}-html.ucm" index="index-${e}.txt" indexurl="https://encoding.spec.whatwg.org/index-${e}.txt" curl -o ${index} "${indexurl}" preamble ${e} > ${output} awk 'BEGIN \ { \ for (i=0; i < 0x80; ++i) \ { \ printf("<U%04X> \\x%02X |0\n", i, i);} \ } \ !/^#/ && !/^$/ \ { printf ("<U%4s> \\x%02X |0\n", substr($2, 3), $1 + 0x80); \ }' ${index} | sort >> ${output} echo 'END CHARMAP' >> ${output} rm ${index} done
aurelijusb/arangodb
3rdParty/V8-4.3.61/third_party/icu/scripts/single_byte_gen.sh
Shell
apache-2.0
2,093
/** * jQuery EasyUI 1.4.2 * * Copyright (c) 2009-2015 www.jeasyui.com. All rights reserved. * * Licensed under the GPL license: http://www.gnu.org/licenses/gpl.txt * To use it on other terms please contact us at info@jeasyui.com * */ (function($){ function _1(c){ var w=0; $(c).children().each(function(){ w+=$(this).outerWidth(true); }); return w; }; function _2(_3){ var _4=$.data(_3,"tabs").options; if(_4.tabPosition=="left"||_4.tabPosition=="right"||!_4.showHeader){ return; } var _5=$(_3).children("div.tabs-header"); var _6=_5.children("div.tabs-tool"); var _7=_5.children("div.tabs-scroller-left"); var _8=_5.children("div.tabs-scroller-right"); var _9=_5.children("div.tabs-wrap"); var _a=_5.outerHeight(); if(_4.plain){ _a-=_a-_5.height(); } _6._outerHeight(_a); var _b=_1(_5.find("ul.tabs")); var _c=_5.width()-_6._outerWidth(); if(_b>_c){ _7.add(_8).show()._outerHeight(_a); if(_4.toolPosition=="left"){ _6.css({left:_7.outerWidth(),right:""}); _9.css({marginLeft:_7.outerWidth()+_6._outerWidth(),marginRight:_8._outerWidth(),width:_c-_7.outerWidth()-_8.outerWidth()}); }else{ _6.css({left:"",right:_8.outerWidth()}); _9.css({marginLeft:_7.outerWidth(),marginRight:_8.outerWidth()+_6._outerWidth(),width:_c-_7.outerWidth()-_8.outerWidth()}); } }else{ _7.add(_8).hide(); if(_4.toolPosition=="left"){ _6.css({left:0,right:""}); _9.css({marginLeft:_6._outerWidth(),marginRight:0,width:_c}); }else{ _6.css({left:"",right:0}); _9.css({marginLeft:0,marginRight:_6._outerWidth(),width:_c}); } } }; function _d(_e){ var _f=$.data(_e,"tabs").options; var _10=$(_e).children("div.tabs-header"); if(_f.tools){ if(typeof _f.tools=="string"){ $(_f.tools).addClass("tabs-tool").appendTo(_10); $(_f.tools).show(); }else{ _10.children("div.tabs-tool").remove(); var _11=$("<div class=\"tabs-tool\"><table cellspacing=\"0\" cellpadding=\"0\" style=\"height:100%\"><tr></tr></table></div>").appendTo(_10); var tr=_11.find("tr"); for(var i=0;i<_f.tools.length;i++){ var td=$("<td></td>").appendTo(tr); var _12=$("<a href=\"javascript:void(0);\"></a>").appendTo(td); _12[0].onclick=eval(_f.tools[i].handler||function(){ }); _12.linkbutton($.extend({},_f.tools[i],{plain:true})); } } }else{ _10.children("div.tabs-tool").remove(); } }; function _13(_14,_15){ var _16=$.data(_14,"tabs"); var _17=_16.options; var cc=$(_14); if(!_17.doSize){ return; } if(_15){ $.extend(_17,{width:_15.width,height:_15.height}); } cc._size(_17); var _18=cc.children("div.tabs-header"); var _19=cc.children("div.tabs-panels"); var _1a=_18.find("div.tabs-wrap"); var ul=_1a.find(".tabs"); ul.children("li").removeClass("tabs-first tabs-last"); ul.children("li:first").addClass("tabs-first"); ul.children("li:last").addClass("tabs-last"); if(_17.tabPosition=="left"||_17.tabPosition=="right"){ _18._outerWidth(_17.showHeader?_17.headerWidth:0); _19._outerWidth(cc.width()-_18.outerWidth()); _18.add(_19)._outerHeight(_17.height); _1a._outerWidth(_18.width()); ul._outerWidth(_1a.width()).css("height",""); }else{ _18.children("div.tabs-scroller-left,div.tabs-scroller-right,div.tabs-tool").css("display",_17.showHeader?"block":"none"); _18._outerWidth(cc.width()).css("height",""); if(_17.showHeader){ _18.css("background-color",""); _1a.css("height",""); }else{ _18.css("background-color","transparent"); _18._outerHeight(0); _1a._outerHeight(0); } ul._outerHeight(_17.tabHeight).css("width",""); ul._outerHeight(ul.outerHeight()-ul.height()-1+_17.tabHeight).css("width",""); _19._size("height",isNaN(_17.height)?"":(_17.height-_18.outerHeight())); _19._size("width",isNaN(_17.width)?"":_17.width); } if(_16.tabs.length){ var d1=ul.outerWidth(true)-ul.width(); var li=ul.children("li:first"); var d2=li.outerWidth(true)-li.width(); var _1b=_18.width()-_18.children(".tabs-tool")._outerWidth(); var _1c=Math.floor((_1b-d1-d2*_16.tabs.length)/_16.tabs.length); $.map(_16.tabs,function(p){ _1d(p,(_17.justified&&$.inArray(_17.tabPosition,["top","bottom"])>=0)?_1c:undefined); }); if(_17.justified&&$.inArray(_17.tabPosition,["top","bottom"])>=0){ var _1e=_1b-d1-_1(ul); _1d(_16.tabs[_16.tabs.length-1],_1c+_1e); } } _2(_14); function _1d(p,_1f){ var _20=p.panel("options"); var p_t=_20.tab.find("a.tabs-inner"); var _1f=_1f?_1f:(parseInt(_20.tabWidth||_17.tabWidth||undefined)); if(_1f){ p_t._outerWidth(_1f); }else{ p_t.css("width",""); } p_t._outerHeight(_17.tabHeight); p_t.css("lineHeight",p_t.height()+"px"); p_t.find(".easyui-fluid:visible").triggerHandler("_resize"); }; }; function _21(_22){ var _23=$.data(_22,"tabs").options; var tab=_24(_22); if(tab){ var _25=$(_22).children("div.tabs-panels"); var _26=_23.width=="auto"?"auto":_25.width(); var _27=_23.height=="auto"?"auto":_25.height(); tab.panel("resize",{width:_26,height:_27}); } }; function _28(_29){ var _2a=$.data(_29,"tabs").tabs; var cc=$(_29).addClass("tabs-container"); var _2b=$("<div class=\"tabs-panels\"></div>").insertBefore(cc); cc.children("div").each(function(){ _2b[0].appendChild(this); }); cc[0].appendChild(_2b[0]); $("<div class=\"tabs-header\">"+"<div class=\"tabs-scroller-left\"></div>"+"<div class=\"tabs-scroller-right\"></div>"+"<div class=\"tabs-wrap\">"+"<ul class=\"tabs\"></ul>"+"</div>"+"</div>").prependTo(_29); cc.children("div.tabs-panels").children("div").each(function(i){ var _2c=$.extend({},$.parser.parseOptions(this),{selected:($(this).attr("selected")?true:undefined)}); _3c(_29,_2c,$(this)); }); cc.children("div.tabs-header").find(".tabs-scroller-left, .tabs-scroller-right").hover(function(){ $(this).addClass("tabs-scroller-over"); },function(){ $(this).removeClass("tabs-scroller-over"); }); cc.bind("_resize",function(e,_2d){ if($(this).hasClass("easyui-fluid")||_2d){ _13(_29); _21(_29); } return false; }); }; function _2e(_2f){ var _30=$.data(_2f,"tabs"); var _31=_30.options; $(_2f).children("div.tabs-header").unbind().bind("click",function(e){ if($(e.target).hasClass("tabs-scroller-left")){ $(_2f).tabs("scrollBy",-_31.scrollIncrement); }else{ if($(e.target).hasClass("tabs-scroller-right")){ $(_2f).tabs("scrollBy",_31.scrollIncrement); }else{ var li=$(e.target).closest("li"); if(li.hasClass("tabs-disabled")){ return false; } var a=$(e.target).closest("a.tabs-close"); if(a.length){ _5a(_2f,_32(li)); }else{ if(li.length){ var _33=_32(li); var _34=_30.tabs[_33].panel("options"); if(_34.collapsible){ _34.closed?_50(_2f,_33):_74(_2f,_33); }else{ _50(_2f,_33); } } } return false; } } }).bind("contextmenu",function(e){ var li=$(e.target).closest("li"); if(li.hasClass("tabs-disabled")){ return; } if(li.length){ _31.onContextMenu.call(_2f,e,li.find("span.tabs-title").html(),_32(li)); } }); function _32(li){ var _35=0; li.parent().children("li").each(function(i){ if(li[0]==this){ _35=i; return false; } }); return _35; }; }; function _36(_37){ var _38=$.data(_37,"tabs").options; var _39=$(_37).children("div.tabs-header"); var _3a=$(_37).children("div.tabs-panels"); _39.removeClass("tabs-header-top tabs-header-bottom tabs-header-left tabs-header-right"); _3a.removeClass("tabs-panels-top tabs-panels-bottom tabs-panels-left tabs-panels-right"); if(_38.tabPosition=="top"){ _39.insertBefore(_3a); }else{ if(_38.tabPosition=="bottom"){ _39.insertAfter(_3a); _39.addClass("tabs-header-bottom"); _3a.addClass("tabs-panels-top"); }else{ if(_38.tabPosition=="left"){ _39.addClass("tabs-header-left"); _3a.addClass("tabs-panels-right"); }else{ if(_38.tabPosition=="right"){ _39.addClass("tabs-header-right"); _3a.addClass("tabs-panels-left"); } } } } if(_38.plain==true){ _39.addClass("tabs-header-plain"); }else{ _39.removeClass("tabs-header-plain"); } _39.removeClass("tabs-header-narrow").addClass(_38.narrow?"tabs-header-narrow":""); var _3b=_39.find(".tabs"); _3b.removeClass("tabs-pill").addClass(_38.pill?"tabs-pill":""); _3b.removeClass("tabs-narrow").addClass(_38.narrow?"tabs-narrow":""); _3b.removeClass("tabs-justified").addClass(_38.justified?"tabs-justified":""); if(_38.border==true){ _39.removeClass("tabs-header-noborder"); _3a.removeClass("tabs-panels-noborder"); }else{ _39.addClass("tabs-header-noborder"); _3a.addClass("tabs-panels-noborder"); } _38.doSize=true; }; function _3c(_3d,_3e,pp){ _3e=_3e||{}; var _3f=$.data(_3d,"tabs"); var _40=_3f.tabs; if(_3e.index==undefined||_3e.index>_40.length){ _3e.index=_40.length; } if(_3e.index<0){ _3e.index=0; } var ul=$(_3d).children("div.tabs-header").find("ul.tabs"); var _41=$(_3d).children("div.tabs-panels"); var tab=$("<li>"+"<a href=\"javascript:void(0)\" class=\"tabs-inner\">"+"<span class=\"tabs-title\"></span>"+"<span class=\"tabs-icon\"></span>"+"</a>"+"</li>"); if(!pp){ pp=$("<div></div>"); } if(_3e.index>=_40.length){ tab.appendTo(ul); pp.appendTo(_41); _40.push(pp); }else{ tab.insertBefore(ul.children("li:eq("+_3e.index+")")); pp.insertBefore(_41.children("div.panel:eq("+_3e.index+")")); _40.splice(_3e.index,0,pp); } pp.panel($.extend({},_3e,{tab:tab,border:false,noheader:true,closed:true,doSize:false,iconCls:(_3e.icon?_3e.icon:undefined),onLoad:function(){ if(_3e.onLoad){ _3e.onLoad.call(this,arguments); } _3f.options.onLoad.call(_3d,$(this)); },onBeforeOpen:function(){ if(_3e.onBeforeOpen){ if(_3e.onBeforeOpen.call(this)==false){ return false; } } var p=$(_3d).tabs("getSelected"); if(p){ if(p[0]!=this){ $(_3d).tabs("unselect",_4a(_3d,p)); p=$(_3d).tabs("getSelected"); if(p){ return false; } }else{ _21(_3d); return false; } } var _42=$(this).panel("options"); _42.tab.addClass("tabs-selected"); var _43=$(_3d).find(">div.tabs-header>div.tabs-wrap"); var _44=_42.tab.position().left; var _45=_44+_42.tab.outerWidth(); if(_44<0||_45>_43.width()){ var _46=_44-(_43.width()-_42.tab.width())/2; $(_3d).tabs("scrollBy",_46); }else{ $(_3d).tabs("scrollBy",0); } var _47=$(this).panel("panel"); _47.css("display","block"); _21(_3d); _47.css("display","none"); },onOpen:function(){ if(_3e.onOpen){ _3e.onOpen.call(this); } var _48=$(this).panel("options"); _3f.selectHis.push(_48.title); _3f.options.onSelect.call(_3d,_48.title,_4a(_3d,this)); },onBeforeClose:function(){ if(_3e.onBeforeClose){ if(_3e.onBeforeClose.call(this)==false){ return false; } } $(this).panel("options").tab.removeClass("tabs-selected"); },onClose:function(){ if(_3e.onClose){ _3e.onClose.call(this); } var _49=$(this).panel("options"); _3f.options.onUnselect.call(_3d,_49.title,_4a(_3d,this)); }})); $(_3d).tabs("update",{tab:pp,options:pp.panel("options"),type:"header"}); }; function _4b(_4c,_4d){ var _4e=$.data(_4c,"tabs"); var _4f=_4e.options; if(_4d.selected==undefined){ _4d.selected=true; } _3c(_4c,_4d); _4f.onAdd.call(_4c,_4d.title,_4d.index); if(_4d.selected){ _50(_4c,_4d.index); } }; function _51(_52,_53){ _53.type=_53.type||"all"; var _54=$.data(_52,"tabs").selectHis; var pp=_53.tab; var _55=pp.panel("options").title; if(_53.type=="all"||_53=="body"){ pp.panel($.extend({},_53.options,{iconCls:(_53.options.icon?_53.options.icon:undefined)})); } if(_53.type=="all"||_53.type=="header"){ var _56=pp.panel("options"); var tab=_56.tab; if(_56.header){ tab.find(".tabs-inner").html($(_56.header)); }else{ var _57=tab.find("span.tabs-title"); var _58=tab.find("span.tabs-icon"); _57.html(_56.title); _58.attr("class","tabs-icon"); tab.find("a.tabs-close").remove(); if(_56.closable){ _57.addClass("tabs-closable"); $("<a href=\"javascript:void(0)\" class=\"tabs-close\"></a>").appendTo(tab); }else{ _57.removeClass("tabs-closable"); } if(_56.iconCls){ _57.addClass("tabs-with-icon"); _58.addClass(_56.iconCls); }else{ _57.removeClass("tabs-with-icon"); } if(_56.tools){ var _59=tab.find("span.tabs-p-tool"); if(!_59.length){ var _59=$("<span class=\"tabs-p-tool\"></span>").insertAfter(tab.find("a.tabs-inner")); } if($.isArray(_56.tools)){ for(var i=0;i<_56.tools.length;i++){ var t=$("<a href=\"javascript:void(0)\"></a>").appendTo(_59); t.addClass(_56.tools[i].iconCls); if(_56.tools[i].handler){ t.bind("click",{handler:_56.tools[i].handler},function(e){ if($(this).parents("li").hasClass("tabs-disabled")){ return; } e.data.handler.call(this); }); } } }else{ $(_56.tools).children().appendTo(_59); } var pr=_59.children().length*12; if(_56.closable){ pr+=8; }else{ pr-=3; _59.css("right","5px"); } _57.css("padding-right",pr+"px"); }else{ tab.find("span.tabs-p-tool").remove(); _57.css("padding-right",""); } } if(_55!=_56.title){ for(var i=0;i<_54.length;i++){ if(_54[i]==_55){ _54[i]=_56.title; } } } } _13(_52); $.data(_52,"tabs").options.onUpdate.call(_52,_56.title,_4a(_52,pp)); }; function _5a(_5b,_5c){ var _5d=$.data(_5b,"tabs").options; var _5e=$.data(_5b,"tabs").tabs; var _5f=$.data(_5b,"tabs").selectHis; if(!_60(_5b,_5c)){ return; } var tab=_61(_5b,_5c); var _62=tab.panel("options").title; var _63=_4a(_5b,tab); if(_5d.onBeforeClose.call(_5b,_62,_63)==false){ return; } var tab=_61(_5b,_5c,true); tab.panel("options").tab.remove(); tab.panel("destroy"); _5d.onClose.call(_5b,_62,_63); _13(_5b); for(var i=0;i<_5f.length;i++){ if(_5f[i]==_62){ _5f.splice(i,1); i--; } } var _64=_5f.pop(); if(_64){ _50(_5b,_64); }else{ if(_5e.length){ _50(_5b,0); } } }; function _61(_65,_66,_67){ var _68=$.data(_65,"tabs").tabs; if(typeof _66=="number"){ if(_66<0||_66>=_68.length){ return null; }else{ var tab=_68[_66]; if(_67){ _68.splice(_66,1); } return tab; } } for(var i=0;i<_68.length;i++){ var tab=_68[i]; if(tab.panel("options").title==_66){ if(_67){ _68.splice(i,1); } return tab; } } return null; }; function _4a(_69,tab){ var _6a=$.data(_69,"tabs").tabs; for(var i=0;i<_6a.length;i++){ if(_6a[i][0]==$(tab)[0]){ return i; } } return -1; }; function _24(_6b){ var _6c=$.data(_6b,"tabs").tabs; for(var i=0;i<_6c.length;i++){ var tab=_6c[i]; if(tab.panel("options").tab.hasClass("tabs-selected")){ return tab; } } return null; }; function _6d(_6e){ var _6f=$.data(_6e,"tabs"); var _70=_6f.tabs; for(var i=0;i<_70.length;i++){ if(_70[i].panel("options").selected){ _50(_6e,i); return; } } _50(_6e,_6f.options.selected); }; function _50(_71,_72){ var p=_61(_71,_72); if(p&&!p.is(":visible")){ _73(_71); p.panel("open"); } }; function _74(_75,_76){ var p=_61(_75,_76); if(p&&p.is(":visible")){ _73(_75); p.panel("close"); } }; function _73(_77){ $(_77).children("div.tabs-panels").each(function(){ $(this).stop(true,true); }); }; function _60(_78,_79){ return _61(_78,_79)!=null; }; function _7a(_7b,_7c){ var _7d=$.data(_7b,"tabs").options; _7d.showHeader=_7c; $(_7b).tabs("resize"); }; $.fn.tabs=function(_7e,_7f){ if(typeof _7e=="string"){ return $.fn.tabs.methods[_7e](this,_7f); } _7e=_7e||{}; return this.each(function(){ var _80=$.data(this,"tabs"); if(_80){ $.extend(_80.options,_7e); }else{ $.data(this,"tabs",{options:$.extend({},$.fn.tabs.defaults,$.fn.tabs.parseOptions(this),_7e),tabs:[],selectHis:[]}); _28(this); } _d(this); _36(this); _13(this); _2e(this); _6d(this); }); }; $.fn.tabs.methods={options:function(jq){ var cc=jq[0]; var _81=$.data(cc,"tabs").options; var s=_24(cc); _81.selected=s?_4a(cc,s):-1; return _81; },tabs:function(jq){ return $.data(jq[0],"tabs").tabs; },resize:function(jq,_82){ return jq.each(function(){ _13(this,_82); _21(this); }); },add:function(jq,_83){ return jq.each(function(){ _4b(this,_83); }); },close:function(jq,_84){ return jq.each(function(){ _5a(this,_84); }); },getTab:function(jq,_85){ return _61(jq[0],_85); },getTabIndex:function(jq,tab){ return _4a(jq[0],tab); },getSelected:function(jq){ return _24(jq[0]); },select:function(jq,_86){ return jq.each(function(){ _50(this,_86); }); },unselect:function(jq,_87){ return jq.each(function(){ _74(this,_87); }); },exists:function(jq,_88){ return _60(jq[0],_88); },update:function(jq,_89){ return jq.each(function(){ _51(this,_89); }); },enableTab:function(jq,_8a){ return jq.each(function(){ $(this).tabs("getTab",_8a).panel("options").tab.removeClass("tabs-disabled"); }); },disableTab:function(jq,_8b){ return jq.each(function(){ $(this).tabs("getTab",_8b).panel("options").tab.addClass("tabs-disabled"); }); },showHeader:function(jq){ return jq.each(function(){ _7a(this,true); }); },hideHeader:function(jq){ return jq.each(function(){ _7a(this,false); }); },scrollBy:function(jq,_8c){ return jq.each(function(){ var _8d=$(this).tabs("options"); var _8e=$(this).find(">div.tabs-header>div.tabs-wrap"); var pos=Math.min(_8e._scrollLeft()+_8c,_8f()); _8e.animate({scrollLeft:pos},_8d.scrollDuration); function _8f(){ var w=0; var ul=_8e.children("ul"); ul.children("li").each(function(){ w+=$(this).outerWidth(true); }); return w-_8e.width()+(ul.outerWidth()-ul.width()); }; }); }}; $.fn.tabs.parseOptions=function(_90){ return $.extend({},$.parser.parseOptions(_90,["tools","toolPosition","tabPosition",{fit:"boolean",border:"boolean",plain:"boolean"},{headerWidth:"number",tabWidth:"number",tabHeight:"number",selected:"number"},{showHeader:"boolean",justified:"boolean",narrow:"boolean",pill:"boolean"}])); }; $.fn.tabs.defaults={width:"auto",height:"auto",headerWidth:150,tabWidth:"auto",tabHeight:27,selected:0,showHeader:true,plain:false,fit:false,border:true,justified:false,narrow:false,pill:false,tools:null,toolPosition:"right",tabPosition:"top",scrollIncrement:100,scrollDuration:400,onLoad:function(_91){ },onSelect:function(_92,_93){ },onUnselect:function(_94,_95){ },onBeforeClose:function(_96,_97){ },onClose:function(_98,_99){ },onAdd:function(_9a,_9b){ },onUpdate:function(_9c,_9d){ },onContextMenu:function(e,_9e,_9f){ }}; })(jQuery);
West-M/easyui-knockoutjs-bindingHandlers
jquery-easyui-1.4.2/plugins/jquery.tabs.js
JavaScript
apache-2.0
17,245
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import json from nose.plugins.skip import SkipTest from nose.tools import assert_equal, assert_true from django.contrib.auth.models import User from django.core.urlresolvers import reverse from hadoop.pseudo_hdfs4 import is_live_cluster from desktop.lib.django_test_util import make_logged_in_client from desktop.lib.test_utils import add_to_group, grant_access from libsolr.api import SolrApi LOG = logging.getLogger(__name__) try: # App can be blacklisted from search.conf import SOLR_URL from search.models import Collection2 except: LOG.exception('Testing libsolr requires the search app to not be blacklisted') class TestLibSolrWithSolr: @classmethod def setup_class(cls): if not is_live_cluster(): raise SkipTest cls.client = make_logged_in_client(username='test', is_superuser=False) cls.user = User.objects.get(username='test') add_to_group('test') grant_access("test", "test", "libsolr") grant_access("test", "test", "search") cls.user.is_superuser = True cls.user.save() resp = cls.client.post(reverse('search:install_examples')) content = json.loads(resp.content) cls.user.is_superuser = False cls.user.save() assert_equal(content.get('status'), 0) @classmethod def teardown_class(cls): cls.user.is_superuser = False cls.user.save() def test_is_solr_cloud_mode(self): SolrApi(SOLR_URL.get(), self.user).collections() def test_query(self): collection = Collection2(user=self.user, name='log_analytics_demo') collection = json.loads(collection.get_json(self.user)) query = {'qs': [{'q': ''}], 'fqs': [], 'start': 0} SolrApi(SOLR_URL.get(), self.user).query(collection['collection'], query)
mapr/hue
desktop/libs/libsolr/src/libsolr/tests.py
Python
apache-2.0
2,534
/** * Copyright (c) Microsoft. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; var should = require('should'); var util = require('util'); var testUtils = require('../../../util/util'); var CLITest = require('../../../framework/arm-cli-test'); var VMTestUtil = require('../../../util/vmTestUtil'); var testprefix = 'arm-cli-availset-tests'; var groupPrefix = 'xplatTestGAvailCreate'; var requiredEnvironment = [{ name: 'AZURE_VM_TEST_LOCATION', defaultValue: 'eastus' }]; var groupName, location, availprefix = 'xplatTestaAvail'; describe('arm', function() { describe('compute', function() { var suite, retry = 5; var vmTest = new VMTestUtil(); before(function(done) { suite = new CLITest(this, testprefix, requiredEnvironment); suite.setupSuite(function() { location = process.env.AZURE_VM_TEST_LOCATION; location = location.replace(/\s/g, ''); groupName = suite.generateId(groupPrefix, null); availprefix = suite.generateId(availprefix, null); done(); }); }); after(function(done) { deleteUsedGroup(function() { suite.teardownSuite(done); }); }); beforeEach(function(done) { suite.setupTest(done); }); afterEach(function(done) { suite.teardownTest(done); }); describe('availset', function() { it('create should pass', function(done) { vmTest.createGroup(groupName, location, suite, function(result) { var cmd = util.format('availset create -g %s -n %s -l %s -a 3 -b 3 --json', groupName, availprefix, location).split(' '); testUtils.executeCommand(suite, retry, cmd, function(result) { result.exitStatus.should.equal(0); done(); }); }); }); it('list should display all availability sets in a resource group', function(done) { var cmd = util.format('availset list %s --json', groupName).split(' '); testUtils.executeCommand(suite, retry, cmd, function(result) { result.exitStatus.should.equal(0); var allResources = JSON.parse(result.text); allResources.some(function(res) { return res.name === availprefix; }).should.be.true; done(); }); }); it('show should display details about the availability set', function(done) { var cmd = util.format('availset show %s %s --json', groupName, availprefix).split(' '); testUtils.executeCommand(suite, retry, cmd, function(result) { result.exitStatus.should.equal(0); var allResources = JSON.parse(result.text); allResources.name.should.equal(availprefix); result.text.should.containEql('"platformUpdateDomainCount": 3,'); result.text.should.containEql('"platformFaultDomainCount": 3,'); done(); }); }); it('list-available-sizes should list the available VM sizes in availability set', function (done) { var cmd = util.format('availset list-available-sizes %s %s', groupName, availprefix).split(' '); testUtils.executeCommand(suite, retry, cmd, function (result) { result.exitStatus.should.equal(0); result.text.should.containEql('Standard_A1'); //result.text.should.containEql('Standard_D1'); done(); }); }); it('delete should delete the availability set', function (done) { var cmd = util.format('availset delete %s %s --quiet --json', groupName, availprefix).split(' '); testUtils.executeCommand(suite, retry, cmd, function (result) { result.exitStatus.should.equal(0); done(); }); }); }); function deleteUsedGroup(callback) { if (!suite.isPlayback()) { var cmd = util.format('group delete %s --quiet --json', groupName).split(' '); testUtils.executeCommand(suite, retry, cmd, function(result) { result.exitStatus.should.equal(0); callback(); }); } else callback(); } }); });
wastoresh/azure-xplat-cli
test/commands/arm/vm/arm.availset-tests.js
JavaScript
apache-2.0
4,602
package sa import ( "errors" "fmt" "io" "os" "github.com/spf13/cobra" kapi "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/client/unversioned" cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "github.com/openshift/origin/pkg/cmd/util" "github.com/openshift/origin/pkg/cmd/util/clientcmd" "github.com/openshift/origin/pkg/serviceaccounts" ) const ( GetServiceAccountTokenRecommendedName = "get-token" getServiceAccountTokenShort = `Get a token assigned to a service account.` getServiceAccountTokenLong = ` Get a token assigned to a service account. If the service account has multiple tokens, the first token found will be returned. Service account API tokens are used by service accounts to authenticate to the API. Client actions using a service account token will be executed as if the service account itself were making the actions. ` getServiceAccountTokenUsage = `%s SA-NAME` getServiceAccountTokenExamples = ` # Get the service account token from service account 'default' %[1]s 'default' ` ) type GetServiceAccountTokenOptions struct { SAName string SAClient unversioned.ServiceAccountsInterface SecretsClient unversioned.SecretsInterface Out io.Writer Err io.Writer } func NewCommandGetServiceAccountToken(name, fullname string, f *clientcmd.Factory, out io.Writer) *cobra.Command { options := &GetServiceAccountTokenOptions{ Out: out, Err: os.Stderr, } getServiceAccountTokenCommand := &cobra.Command{ Use: fmt.Sprintf(getServiceAccountTokenUsage, name), Short: getServiceAccountTokenShort, Long: getServiceAccountTokenLong, Example: fmt.Sprintf(getServiceAccountTokenExamples, fullname), Run: func(cmd *cobra.Command, args []string) { cmdutil.CheckErr(options.Complete(args, f, cmd)) cmdutil.CheckErr(options.Validate()) cmdutil.CheckErr(options.Run()) }, } return getServiceAccountTokenCommand } func (o *GetServiceAccountTokenOptions) Complete(args []string, f *clientcmd.Factory, cmd *cobra.Command) error { if len(args) != 1 { return cmdutil.UsageError(cmd, fmt.Sprintf("expected one service account name as an argument, got %q", args)) } o.SAName = args[0] client, err := f.Client() if err != nil { return err } namespace, _, err := f.DefaultNamespace() if err != nil { return err } o.SAClient = client.ServiceAccounts(namespace) o.SecretsClient = client.Secrets(namespace) return nil } func (o *GetServiceAccountTokenOptions) Validate() error { if o.SAName == "" { return errors.New("service account name cannot be empty") } if o.SAClient == nil || o.SecretsClient == nil { return errors.New("API clients must not be nil in order to create a new service account token") } if o.Out == nil || o.Err == nil { return errors.New("cannot proceed if output or error writers are nil") } return nil } func (o *GetServiceAccountTokenOptions) Run() error { serviceAccount, err := o.SAClient.Get(o.SAName) if err != nil { return err } for _, reference := range serviceAccount.Secrets { secret, err := o.SecretsClient.Get(reference.Name) if err != nil { continue } if serviceaccounts.IsValidServiceAccountToken(serviceAccount, secret) { token, exists := secret.Data[kapi.ServiceAccountTokenKey] if !exists { return fmt.Errorf("service account token %q for service account %q did not contain token data", secret.Name, serviceAccount.Name) } fmt.Fprintf(o.Out, string(token)) if util.IsTerminalWriter(o.Out) { // pretty-print for a TTY fmt.Fprintf(o.Out, "\n") } return nil } } return fmt.Errorf("could not find a service account token for service account %q", serviceAccount.Name) }
linearregression/origin
pkg/cmd/cli/sa/gettoken.go
GO
apache-2.0
3,680
package com.ecloud.trianglerectanglelabelview.demo; import android.os.Bundle; import android.support.v7.app.ActionBarActivity; import android.view.Menu; import android.view.MenuItem; import com.ecloud.trianglerectanglelabelview.TriangleRectangleLabelView; public class MainActivity extends ActionBarActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ((TriangleRectangleLabelView)findViewById(R.id.trlv1)).setLineMode(TriangleRectangleLabelView.LINE_MODE.START); ((TriangleRectangleLabelView)findViewById(R.id.trlv2)).setLineMode(TriangleRectangleLabelView.LINE_MODE.MIDDLE); ((TriangleRectangleLabelView)findViewById(R.id.trlv3)).setLineMode(TriangleRectangleLabelView.LINE_MODE.MIDDLE); ((TriangleRectangleLabelView)findViewById(R.id.trlv4)).setLineMode(TriangleRectangleLabelView.LINE_MODE.END); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } }
bestwpw/TriangleRectangleLabelView
app/src/main/java/com/ecloud/trianglerectanglelabelview/demo/MainActivity.java
Java
apache-2.0
1,695
// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package storage import ( "reflect" "testing" ) func TestIndexGet(t *testing.T) { ti := newTreeIndex() ti.Put([]byte("foo"), revision{main: 2}) ti.Put([]byte("foo"), revision{main: 4}) ti.Tombstone([]byte("foo"), revision{main: 6}) tests := []struct { rev int64 wrev revision wcreated revision wver int64 werr error }{ {0, revision{}, revision{}, 0, ErrRevisionNotFound}, {1, revision{}, revision{}, 0, ErrRevisionNotFound}, {2, revision{main: 2}, revision{main: 2}, 1, nil}, {3, revision{main: 2}, revision{main: 2}, 1, nil}, {4, revision{main: 4}, revision{main: 2}, 2, nil}, {5, revision{main: 4}, revision{main: 2}, 2, nil}, {6, revision{}, revision{}, 0, ErrRevisionNotFound}, } for i, tt := range tests { rev, created, ver, err := ti.Get([]byte("foo"), tt.rev) if err != tt.werr { t.Errorf("#%d: err = %v, want %v", i, err, tt.werr) } if rev != tt.wrev { t.Errorf("#%d: rev = %+v, want %+v", i, rev, tt.wrev) } if created != tt.wcreated { t.Errorf("#%d: created = %+v, want %+v", i, created, tt.wcreated) } if ver != tt.wver { t.Errorf("#%d: ver = %d, want %d", i, ver, tt.wver) } } } func TestIndexRange(t *testing.T) { allKeys := [][]byte{[]byte("foo"), []byte("foo1"), []byte("foo2")} allRevs := []revision{{main: 1}, {main: 2}, {main: 3}} ti := newTreeIndex() for i := range allKeys { ti.Put(allKeys[i], allRevs[i]) } atRev := int64(3) tests := []struct { key, end []byte wkeys [][]byte wrevs []revision }{ // single key that not found { []byte("bar"), nil, nil, nil, }, // single key that found { []byte("foo"), nil, allKeys[:1], allRevs[:1], }, // range keys, return first member { []byte("foo"), []byte("foo1"), allKeys[:1], allRevs[:1], }, // range keys, return first two members { []byte("foo"), []byte("foo2"), allKeys[:2], allRevs[:2], }, // range keys, return all members { []byte("foo"), []byte("fop"), allKeys, allRevs, }, // range keys, return last two members { []byte("foo1"), []byte("fop"), allKeys[1:], allRevs[1:], }, // range keys, return last member { []byte("foo2"), []byte("fop"), allKeys[2:], allRevs[2:], }, // range keys, return nothing { []byte("foo3"), []byte("fop"), nil, nil, }, } for i, tt := range tests { keys, revs := ti.Range(tt.key, tt.end, atRev) if !reflect.DeepEqual(keys, tt.wkeys) { t.Errorf("#%d: keys = %+v, want %+v", i, keys, tt.wkeys) } if !reflect.DeepEqual(revs, tt.wrevs) { t.Errorf("#%d: revs = %+v, want %+v", i, revs, tt.wrevs) } } } func TestIndexTombstone(t *testing.T) { ti := newTreeIndex() ti.Put([]byte("foo"), revision{main: 1}) err := ti.Tombstone([]byte("foo"), revision{main: 2}) if err != nil { t.Errorf("tombstone error = %v, want nil", err) } _, _, _, err = ti.Get([]byte("foo"), 2) if err != ErrRevisionNotFound { t.Errorf("get error = %v, want nil", err) } err = ti.Tombstone([]byte("foo"), revision{main: 3}) if err != ErrRevisionNotFound { t.Errorf("tombstone error = %v, want %v", err, ErrRevisionNotFound) } } func TestIndexRangeSince(t *testing.T) { allKeys := [][]byte{[]byte("foo"), []byte("foo1"), []byte("foo2"), []byte("foo2"), []byte("foo1"), []byte("foo")} allRevs := []revision{{main: 1}, {main: 2}, {main: 3}, {main: 4}, {main: 5}, {main: 6}} ti := newTreeIndex() for i := range allKeys { ti.Put(allKeys[i], allRevs[i]) } atRev := int64(1) tests := []struct { key, end []byte wrevs []revision }{ // single key that not found { []byte("bar"), nil, nil, }, // single key that found { []byte("foo"), nil, []revision{{main: 1}, {main: 6}}, }, // range keys, return first member { []byte("foo"), []byte("foo1"), []revision{{main: 1}, {main: 6}}, }, // range keys, return first two members { []byte("foo"), []byte("foo2"), []revision{{main: 1}, {main: 2}, {main: 5}, {main: 6}}, }, // range keys, return all members { []byte("foo"), []byte("fop"), allRevs, }, // range keys, return last two members { []byte("foo1"), []byte("fop"), []revision{{main: 2}, {main: 3}, {main: 4}, {main: 5}}, }, // range keys, return last member { []byte("foo2"), []byte("fop"), []revision{{main: 3}, {main: 4}}, }, // range keys, return nothing { []byte("foo3"), []byte("fop"), nil, }, } for i, tt := range tests { revs := ti.RangeSince(tt.key, tt.end, atRev) if !reflect.DeepEqual(revs, tt.wrevs) { t.Errorf("#%d: revs = %+v, want %+v", i, revs, tt.wrevs) } } } func TestIndexCompact(t *testing.T) { maxRev := int64(20) tests := []struct { key []byte remove bool rev revision created revision ver int64 }{ {[]byte("foo"), false, revision{main: 1}, revision{main: 1}, 1}, {[]byte("foo1"), false, revision{main: 2}, revision{main: 2}, 1}, {[]byte("foo2"), false, revision{main: 3}, revision{main: 3}, 1}, {[]byte("foo2"), false, revision{main: 4}, revision{main: 3}, 2}, {[]byte("foo"), false, revision{main: 5}, revision{main: 1}, 2}, {[]byte("foo1"), false, revision{main: 6}, revision{main: 2}, 2}, {[]byte("foo1"), true, revision{main: 7}, revision{}, 0}, {[]byte("foo2"), true, revision{main: 8}, revision{}, 0}, {[]byte("foo"), true, revision{main: 9}, revision{}, 0}, {[]byte("foo"), false, revision{10, 0}, revision{10, 0}, 1}, {[]byte("foo1"), false, revision{10, 1}, revision{10, 1}, 1}, } // Continuous Compact ti := newTreeIndex() for _, tt := range tests { if tt.remove { ti.Tombstone(tt.key, tt.rev) } else { ti.Put(tt.key, tt.rev) } } for i := int64(1); i < maxRev; i++ { am := ti.Compact(i) wti := newTreeIndex() for _, tt := range tests { if _, ok := am[tt.rev]; ok || tt.rev.GreaterThan(revision{main: i}) { if tt.remove { wti.Tombstone(tt.key, tt.rev) } else { wti.Restore(tt.key, tt.created, tt.rev, tt.ver) } } } if !ti.Equal(wti) { t.Errorf("#%d: not equal ti", i) } } // Once Compact for i := int64(1); i < maxRev; i++ { ti := newTreeIndex() for _, tt := range tests { if tt.remove { ti.Tombstone(tt.key, tt.rev) } else { ti.Put(tt.key, tt.rev) } } am := ti.Compact(i) wti := newTreeIndex() for _, tt := range tests { if _, ok := am[tt.rev]; ok || tt.rev.GreaterThan(revision{main: i}) { if tt.remove { wti.Tombstone(tt.key, tt.rev) } else { wti.Restore(tt.key, tt.created, tt.rev, tt.ver) } } } if !ti.Equal(wti) { t.Errorf("#%d: not equal ti", i) } } } func TestIndexRestore(t *testing.T) { key := []byte("foo") tests := []struct { created revision modified revision ver int64 }{ {revision{1, 0}, revision{1, 0}, 1}, {revision{1, 0}, revision{1, 1}, 2}, {revision{1, 0}, revision{2, 0}, 3}, } // Continuous Restore ti := newTreeIndex() for i, tt := range tests { ti.Restore(key, tt.created, tt.modified, tt.ver) modified, created, ver, err := ti.Get(key, tt.modified.main) if modified != tt.modified { t.Errorf("#%d: modified = %v, want %v", i, modified, tt.modified) } if created != tt.created { t.Errorf("#%d: created = %v, want %v", i, created, tt.created) } if ver != tt.ver { t.Errorf("#%d: ver = %d, want %d", i, ver, tt.ver) } if err != nil { t.Errorf("#%d: err = %v, want nil", i, err) } } // Once Restore for i, tt := range tests { ti := newTreeIndex() ti.Restore(key, tt.created, tt.modified, tt.ver) modified, created, ver, err := ti.Get(key, tt.modified.main) if modified != tt.modified { t.Errorf("#%d: modified = %v, want %v", i, modified, tt.modified) } if created != tt.created { t.Errorf("#%d: created = %v, want %v", i, created, tt.created) } if ver != tt.ver { t.Errorf("#%d: ver = %d, want %d", i, ver, tt.ver) } if err != nil { t.Errorf("#%d: err = %v, want nil", i, err) } } }
gruiz17/origin
Godeps/_workspace/src/github.com/coreos/etcd/storage/index_test.go
GO
apache-2.0
8,480
/* * Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.authentication.framework.exception; import org.wso2.carbon.identity.base.IdentityException; public class ApplicationAuthenticationException extends IdentityException { private static final long serialVersionUID = -476728056175360072L; public ApplicationAuthenticationException(String message) { super(message); } public ApplicationAuthenticationException(String message, Throwable cause) { super(message, cause); } }
dharshanaw/carbon-identity-framework
components/authentication-framework/org.wso2.carbon.identity.application.authentication.framework/src/main/java/org/wso2/carbon/identity/application/authentication/framework/exception/ApplicationAuthenticationException.java
Java
apache-2.0
1,174
/*! * classie - class helper functions * from bonzo https://github.com/ded/bonzo * * classie.has( elem, 'my-class' ) -> true/false * classie.add( elem, 'my-new-class' ) * classie.remove( elem, 'my-unwanted-class' ) * classie.toggle( elem, 'my-class' ) */ /*jshint browser: true, strict: true, undef: true */ /*global define: false */ (function (window) { 'use strict'; // class helper functions from bonzo https://github.com/ded/bonzo function classReg(className) { return new RegExp("(^|\\s+)" + className + "(\\s+|$)"); } // classList support for class management // altho to be fair, the api sucks because it won't accept multiple classes at once var hasClass, addClass, removeClass; if ('classList' in document.documentElement) { hasClass = function (elem, c) { return elem.classList.contains(c); }; addClass = function (elem, c) { elem.classList.add(c); }; removeClass = function (elem, c) { elem.classList.remove(c); }; } else { hasClass = function (elem, c) { return classReg(c).test(elem.className); }; addClass = function (elem, c) { if (!hasClass(elem, c)) { elem.className = elem.className + ' ' + c; } }; removeClass = function (elem, c) { elem.className = elem.className.replace(classReg(c), ' '); }; } function toggleClass(elem, c) { var fn = hasClass(elem, c) ? removeClass : addClass; fn(elem, c); } var classie = { // full names hasClass: hasClass, addClass: addClass, removeClass: removeClass, toggleClass: toggleClass, // short names has: hasClass, add: addClass, remove: removeClass, toggle: toggleClass }; // transport if (typeof define === 'function' && define.amd) { // AMD define(classie); } else { // browser global window.classie = classie; } })(window);
Caspar12/zh.sw
zh-site-manager-web-ui-admin/src/main/resources/static/libs/zh/widget/pagetransitions/svgloader/libs/classie.js
JavaScript
apache-2.0
2,093
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.spring.patterns; import org.apache.camel.EndpointInject; import org.apache.camel.Produce; import org.apache.camel.ProducerTemplate; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.spring.SpringRunWithTestSupport; import org.junit.jupiter.api.Test; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; /** * Tests filtering using Spring Test and XML Config */ // START SNIPPET: example // tag::example[] @ContextConfiguration public class FilterTest extends SpringRunWithTestSupport { @EndpointInject("mock:result") protected MockEndpoint resultEndpoint; @Produce("direct:start") protected ProducerTemplate template; @DirtiesContext @Test public void testSendMatchingMessage() throws Exception { String expectedBody = "<matched/>"; resultEndpoint.expectedBodiesReceived(expectedBody); template.sendBodyAndHeader(expectedBody, "foo", "bar"); resultEndpoint.assertIsSatisfied(); } @DirtiesContext @Test public void testSendNotMatchingMessage() throws Exception { resultEndpoint.expectedMessageCount(0); template.sendBodyAndHeader("<notMatched/>", "foo", "notMatchedHeaderValue"); resultEndpoint.assertIsSatisfied(); } } // end::example[] // END SNIPPET: example
tdiesler/camel
components/camel-spring-xml/src/test/java/org/apache/camel/spring/patterns/FilterTest.java
Java
apache-2.0
2,201
#!/usr/bin/env bash # Copyright 2017 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -o errexit set -o nounset set -o pipefail # Send the file content to the server if command -v curl &>/dev/null; then curl -s -k -XPOST --data-binary "@${1}" -o "${1}.result" "${KUBE_EDITOR_CALLBACK}" elif command -v wget &>/dev/null; then wget --post-file="${1}" -O "${1}.result" "${KUBE_EDITOR_CALLBACK}" else echo "curl and wget are unavailable" >&2 exit 1 fi # Use the response as the edited version mv "${1}.result" "${1}"
axbaretto/kubernetes
pkg/kubectl/cmd/edit/testdata/test_editor.sh
Shell
apache-2.0
1,058
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.cluster.failover; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.activemq.artemis.api.core.ActiveMQNotConnectedException; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.client.ClientConsumer; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClientProducer; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.core.client.impl.ClientSessionInternal; import org.apache.activemq.artemis.core.server.cluster.impl.MessageLoadBalancingType; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.tests.integration.cluster.distribution.ClusterTestBase; import org.apache.activemq.artemis.tests.util.CountDownSessionFailureListener; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class ReplicatedDistributionTest extends ClusterTestBase { private static final SimpleString ADDRESS = new SimpleString("test.SomeAddress"); private ClientSession sessionOne; private ClientSession sessionThree; private ClientConsumer consThree; private ClientProducer producer; @Test public void testRedistribution() throws Exception { commonTestCode(); for (int i = 0; i < 50; i++) { ClientMessage msg = consThree.receive(15000); Assert.assertNotNull(msg); // System.out.println(i + " msg = " + msg); int received = msg.getIntProperty("key"); Assert.assertEquals(i, received); msg.acknowledge(); } sessionThree.commit(); // consThree.close(); // TODO: Remove this sleep: If a node fail, // Redistribution may loose messages between the nodes. Thread.sleep(500); fail(sessionThree); // sessionThree.close(); // // setupSessionFactory(2, -1, true); // // sessionThree = sfs[2].createSession(true, true); // // sessionThree.start(); // consThree = sessionThree.createConsumer(ADDRESS); for (int i = 50; i < 100; i++) { ClientMessage msg = consThree.receive(15000); Assert.assertNotNull(msg); // System.out.println(i + " msg = " + msg); int received = (Integer) msg.getObjectProperty(new SimpleString("key")); Assert.assertEquals(i, received); msg.acknowledge(); } Assert.assertNull(consThree.receiveImmediate()); sessionThree.commit(); sessionOne.start(); ClientConsumer consOne = sessionOne.createConsumer(ReplicatedDistributionTest.ADDRESS); Assert.assertNull(consOne.receiveImmediate()); } @Test public void testSimpleRedistribution() throws Exception { commonTestCode(); for (int i = 0; i < 100; i++) { ClientMessage msg = consThree.receive(15000); Assert.assertNotNull(msg); // System.out.println(i + " msg = " + msg); int received = msg.getIntProperty("key"); if (i != received) { // Shouldn't this be a failure? System.out.println(i + "!=" + received); } msg.acknowledge(); } sessionThree.commit(); sessionOne.start(); ClientConsumer consOne = sessionOne.createConsumer(ReplicatedDistributionTest.ADDRESS); Assert.assertNull(consOne.receiveImmediate()); } private void commonTestCode() throws Exception { waitForBindings(3, "test.SomeAddress", 1, 1, true); waitForBindings(1, "test.SomeAddress", 1, 1, false); producer = sessionOne.createProducer(ReplicatedDistributionTest.ADDRESS); for (int i = 0; i < 100; i++) { ClientMessage msg = sessionOne.createMessage(true); msg.putIntProperty(new SimpleString("key"), i); producer.send(msg); } sessionOne.commit(); } /** * @param session * @throws InterruptedException */ private void fail(final ClientSession session) throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); session.addFailureListener(new CountDownSessionFailureListener(latch, session)); RemotingConnection conn = ((ClientSessionInternal) session).getConnection(); // Simulate failure on connection conn.fail(new ActiveMQNotConnectedException()); // Wait to be informed of failure boolean ok = latch.await(1000, TimeUnit.MILLISECONDS); Assert.assertTrue(ok); } @Override @Before public void setUp() throws Exception { super.setUp(); setupLiveServer(1, true, isSharedStore(), true, false); setupLiveServer(3, true, isSharedStore(), true, false); setupBackupServer(2, 3, true, isSharedStore(), true); final String address = ReplicatedDistributionTest.ADDRESS.toString(); // notice the abuse of the method call, '3' is not a backup for '1' setupClusterConnectionWithBackups("test", address, MessageLoadBalancingType.ON_DEMAND, 1, true, 1, new int[]{3}); setupClusterConnectionWithBackups("test", address, MessageLoadBalancingType.ON_DEMAND, 1, true, 3, new int[]{2, 1}); setupClusterConnectionWithBackups("test", address, MessageLoadBalancingType.ON_DEMAND, 1, true, 2, new int[]{3}); AddressSettings as = new AddressSettings().setRedistributionDelay(0); for (int i : new int[]{1, 2, 3}) { getServer(i).getAddressSettingsRepository().addMatch("test.*", as); getServer(i).start(); } setupSessionFactory(1, -1, true, true); setupSessionFactory(3, 2, true, true); sessionOne = sfs[1].createSession(true, true); sessionThree = sfs[3].createSession(false, false); sessionOne.createQueue(ReplicatedDistributionTest.ADDRESS, ReplicatedDistributionTest.ADDRESS, true); sessionThree.createQueue(ReplicatedDistributionTest.ADDRESS, ReplicatedDistributionTest.ADDRESS, true); consThree = sessionThree.createConsumer(ReplicatedDistributionTest.ADDRESS); sessionThree.start(); } @Override protected boolean isSharedStore() { return false; } }
gaohoward/activemq-artemis
tests/integration-tests/src/test/java/org/apache/activemq/artemis/tests/integration/cluster/failover/ReplicatedDistributionTest.java
Java
apache-2.0
7,156
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "flag" "time" kubeinformers "k8s.io/client-go/informers" "k8s.io/client-go/kubernetes" "k8s.io/client-go/tools/clientcmd" "k8s.io/klog" // Uncomment the following line to load the gcp plugin (only required to authenticate against GKE clusters). // _ "k8s.io/client-go/plugin/pkg/client/auth/gcp" clientset "k8s.io/sample-controller/pkg/client/clientset/versioned" informers "k8s.io/sample-controller/pkg/client/informers/externalversions" "k8s.io/sample-controller/pkg/signals" ) var ( masterURL string kubeconfig string ) func main() { flag.Parse() // set up signals so we handle the first shutdown signal gracefully stopCh := signals.SetupSignalHandler() cfg, err := clientcmd.BuildConfigFromFlags(masterURL, kubeconfig) if err != nil { klog.Fatalf("Error building kubeconfig: %s", err.Error()) } kubeClient, err := kubernetes.NewForConfig(cfg) if err != nil { klog.Fatalf("Error building kubernetes clientset: %s", err.Error()) } exampleClient, err := clientset.NewForConfig(cfg) if err != nil { klog.Fatalf("Error building example clientset: %s", err.Error()) } kubeInformerFactory := kubeinformers.NewSharedInformerFactory(kubeClient, time.Second*30) exampleInformerFactory := informers.NewSharedInformerFactory(exampleClient, time.Second*30) controller := NewController(kubeClient, exampleClient, kubeInformerFactory.Apps().V1().Deployments(), exampleInformerFactory.Samplecontroller().V1alpha1().Foos()) // notice that there is no need to run Start methods in a separate goroutine. (i.e. go kubeInformerFactory.Start(stopCh) // Start method is non-blocking and runs all registered informers in a dedicated goroutine. kubeInformerFactory.Start(stopCh) exampleInformerFactory.Start(stopCh) if err = controller.Run(2, stopCh); err != nil { klog.Fatalf("Error running controller: %s", err.Error()) } } func init() { flag.StringVar(&kubeconfig, "kubeconfig", "", "Path to a kubeconfig. Only required if out-of-cluster.") flag.StringVar(&masterURL, "master", "", "The address of the Kubernetes API server. Overrides any value in kubeconfig. Only required if out-of-cluster.") }
imcsk8/kubernetes
staging/src/k8s.io/sample-controller/main.go
GO
apache-2.0
2,735
# Copyright 2014 Software freedom conservancy # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .command import Command from selenium.common.exceptions import WebDriverException class Mobile(object): class ConnectionType(object): def __init__(self, mask): self.mask = mask @property def airplane_mode(self): return self.mask % 2 == 1 @property def wifi(self): return (self.mask / 2) % 2 == 1 @property def data(self): return (self.mask / 4) > 0 ALL_NETWORK = ConnectionType(6) WIFI_NETWORK = ConnectionType(2) DATA_NETWORK = ConnectionType(4) AIRPLANE_MODE = ConnectionType(1) def __init__(self, driver): self._driver = driver @property def network_connection(self): return self.ConnectionType(self._driver.execute(Command.GET_NETWORK_CONNECTION)['value']) def set_network_connection(self, network): """ Set the network connection for the remote device. Example of setting airplane mode: driver.mobile.set_network_connection(driver.mobile.AIRPLANE_MODE) """ mode = network.mask if isinstance(network, self.ConnectionType) else network return self.ConnectionType(self._driver.execute(Command.SET_NETWORK_CONNECTION, {'name':'network_connection', 'parameters':{'type': mode}})['value'])
temyers/selenium
py/selenium/webdriver/remote/mobile.py
Python
apache-2.0
2,022
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.authorization.composite; import java.util.Map; import java.util.Set; import javax.jcr.Session; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.api.JackrabbitSession; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.tree.TreeLocation; import org.apache.jackrabbit.oak.spi.security.authorization.permission.AggregatedPermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions; import org.apache.jackrabbit.oak.spi.security.authorization.permission.RepositoryPermission; import org.apache.jackrabbit.oak.spi.security.authorization.permission.TreePermission; import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBitsProvider; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * Test the effect of the combination of * * - default permission provider * - custom provider that grants JCR_NAMESPACE_MANAGEMENT on repository level * and REP_READ_NODES only * * both for the set of principals associated with the test user and with the admin session. * The expected outcome is that * - test user can only read nodes where this is also granted by the default provider * but has no other access granted * - admin user can only read nodes and register namespaces */ public class CompositeProviderFullScopeTest extends AbstractCompositeProviderTest { private CompositePermissionProvider cppTestUser; private CompositePermissionProvider cppAdminUser; @Override public void before() throws Exception { super.before(); cppTestUser = createPermissionProvider(getTestUser().getPrincipal(), EveryonePrincipal.getInstance()); cppAdminUser = createPermissionProvider(root.getContentSession().getAuthInfo().getPrincipals()); } @Override protected AggregatedPermissionProvider getTestPermissionProvider() { return new FullScopeProvider(readOnlyRoot); } @Test public void testGetPrivileges() throws Exception { PrivilegeBitsProvider pbp = new PrivilegeBitsProvider(readOnlyRoot); PrivilegeBits readNodes = pbp.getBits(REP_READ_NODES); Set<String> expected = ImmutableSet.of(REP_READ_NODES); for (String path : defPrivileges.keySet()) { Set<String> defaultPrivs = defPrivileges.get(path); Tree tree = readOnlyRoot.getTree(path); Set<String> privNames = cppTestUser.getPrivileges(tree); if (pbp.getBits(defaultPrivs).includes(readNodes)) { assertEquals(expected, privNames); } else { assertTrue(privNames.isEmpty()); } } } @Test public void testGetPrivilegesAdmin() throws Exception { Set<String> expected = ImmutableSet.of(REP_READ_NODES); for (String path : NODE_PATHS) { Tree tree = readOnlyRoot.getTree(path); assertEquals(expected, cppAdminUser.getPrivileges(tree)); } } @Test public void testGetPrivilegesOnRepo() throws Exception { Set<String> expected = ImmutableSet.of(JCR_NAMESPACE_MANAGEMENT); assertEquals(expected, cppTestUser.getPrivileges(null)); } @Test public void testGetPrivilegesOnRepoAdmin() throws Exception { Set<String> expected = ImmutableSet.of(JCR_NAMESPACE_MANAGEMENT); assertEquals(expected, cppAdminUser.getPrivileges(null)); } @Test public void testHasPrivileges() throws Exception { PrivilegeBitsProvider pbp = new PrivilegeBitsProvider(readOnlyRoot); PrivilegeBits readNodes = pbp.getBits(REP_READ_NODES); for (String path : defPrivileges.keySet()) { Set<String> defaultPrivs = defPrivileges.get(path); PrivilegeBits defaultBits = pbp.getBits(defaultPrivs); Tree tree = readOnlyRoot.getTree(path); if (defaultPrivs.isEmpty()) { assertFalse(path, cppTestUser.hasPrivileges(tree, REP_READ_NODES)); } else if (defaultBits.includes(readNodes)) { assertTrue(path, cppTestUser.hasPrivileges(tree, REP_READ_NODES)); if (!readNodes.equals(defaultBits)) { assertFalse(path, cppTestUser.hasPrivileges(tree, defaultPrivs.toArray(new String[defaultPrivs.size()]))); } } else { assertFalse(path, cppTestUser.hasPrivileges(tree, REP_READ_NODES)); assertFalse(path, cppTestUser.hasPrivileges(tree, defaultPrivs.toArray(new String[defaultPrivs.size()]))); } } } @Test public void testHasPrivilegesAdmin() throws Exception { for (String path : NODE_PATHS) { Tree tree = readOnlyRoot.getTree(path); assertTrue(cppAdminUser.hasPrivileges(tree, REP_READ_NODES)); assertFalse(cppAdminUser.hasPrivileges(tree, JCR_READ)); assertFalse(cppAdminUser.hasPrivileges(tree, JCR_ALL)); assertFalse(cppAdminUser.hasPrivileges(tree, JCR_WRITE)); assertFalse(cppAdminUser.hasPrivileges(tree, REP_READ_NODES, REP_READ_PROPERTIES)); assertFalse(cppAdminUser.hasPrivileges(tree, JCR_MODIFY_PROPERTIES)); assertFalse(cppAdminUser.hasPrivileges(tree, JCR_LOCK_MANAGEMENT)); } } @Test public void testHasPrivilegesOnRepo() throws Exception { assertTrue(cppTestUser.hasPrivileges(null, JCR_NAMESPACE_MANAGEMENT)); assertFalse(cppTestUser.hasPrivileges(null, JCR_NAMESPACE_MANAGEMENT, JCR_NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(cppTestUser.hasPrivileges(null, JCR_ALL)); assertTrue(cppTestUser.hasPrivileges(null)); } @Test public void testHasPrivilegeOnRepoAdmin() throws Exception { assertTrue(cppAdminUser.hasPrivileges(null, JCR_NAMESPACE_MANAGEMENT)); assertFalse(cppAdminUser.hasPrivileges(null, JCR_NAMESPACE_MANAGEMENT, JCR_NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(cppAdminUser.hasPrivileges(null, JCR_ALL)); assertTrue(cppAdminUser.hasPrivileges(null)); } @Test public void testIsGranted() throws Exception { for (String p : defPermissions.keySet()) { long defaultPerms = defPermissions.get(p); Tree tree = readOnlyRoot.getTree(p); if (Permissions.READ_NODE != defaultPerms) { assertFalse(p, cppTestUser.isGranted(tree, null, defaultPerms)); } boolean expectedReadNode = Permissions.includes(defaultPerms, Permissions.READ_NODE); assertEquals(p, expectedReadNode, cppTestUser.isGranted(tree, null, Permissions.READ_NODE)); } } @Test public void testIsGrantedAdmin() throws Exception { for (String p : NODE_PATHS) { Tree tree = readOnlyRoot.getTree(p); assertTrue(p, cppAdminUser.isGranted(tree, null, Permissions.READ_NODE)); assertFalse(p, cppAdminUser.isGranted(tree, null, Permissions.READ)); assertFalse(p, cppAdminUser.isGranted(tree, null, Permissions.WRITE)); assertFalse(p, cppAdminUser.isGranted(tree, null, Permissions.ALL)); } } @Test public void testIsGrantedProperty() throws Exception { for (String p : NODE_PATHS) { Tree tree = readOnlyRoot.getTree(p); assertFalse(p, cppTestUser.isGranted(tree, PROPERTY_STATE, Permissions.READ_PROPERTY)); assertFalse(p, cppTestUser.isGranted(tree, PROPERTY_STATE, Permissions.SET_PROPERTY)); } } @Test public void testIsGrantedPropertyAdmin() throws Exception { for (String p : NODE_PATHS) { Tree tree = readOnlyRoot.getTree(p); assertFalse(p, cppAdminUser.isGranted(tree, PROPERTY_STATE, Permissions.READ_PROPERTY)); assertFalse(p, cppAdminUser.isGranted(tree, PROPERTY_STATE, Permissions.SET_PROPERTY)); assertFalse(p, cppAdminUser.isGranted(tree, PROPERTY_STATE, Permissions.ALL)); } } @Test public void testIsGrantedAction() throws Exception { for (String p : defActionsGranted.keySet()) { String[] actions = defActionsGranted.get(p); if (ImmutableList.copyOf(actions).contains(Session.ACTION_READ)) { TreeLocation tl = TreeLocation.create(readOnlyRoot, p); assertEquals(p, tl.getTree() != null, cppTestUser.isGranted(p, Session.ACTION_READ)); } else { assertFalse(p, cppTestUser.isGranted(p, Session.ACTION_READ)); } if (actions.length > 1) { assertFalse(p, cppTestUser.isGranted(p, getActionString(actions))); } } } @Test public void testIsGrantedAction2() throws Exception { Map<String, String[]> noAccess = ImmutableMap.<String, String[]>builder(). put(ROOT_PATH, new String[]{Session.ACTION_READ}). put(ROOT_PATH + "jcr:primaryType", new String[]{Session.ACTION_READ, Session.ACTION_SET_PROPERTY}). put("/nonexisting", new String[]{Session.ACTION_READ, Session.ACTION_ADD_NODE}). put(TEST_PATH_2, new String[]{Session.ACTION_READ, Session.ACTION_REMOVE}). put(TEST_PATH_2 + "/jcr:primaryType", new String[]{Session.ACTION_READ, Session.ACTION_SET_PROPERTY}). put(TEST_A_B_C_PATH, new String[]{Session.ACTION_READ, Session.ACTION_REMOVE}). put(TEST_A_B_C_PATH + "/noneExisting", new String[]{Session.ACTION_READ, JackrabbitSession.ACTION_REMOVE_NODE}). put(TEST_A_B_C_PATH + "/jcr:primaryType", new String[]{JackrabbitSession.ACTION_REMOVE_PROPERTY}).build(); for (String p : noAccess.keySet()) { assertFalse(p, cppTestUser.isGranted(p, getActionString(noAccess.get(p)))); } } @Test public void testIsGrantedActionAdmin() throws Exception { for (String p : defActionsGranted.keySet()) { boolean expectedRead = readOnlyRoot.getTree(p).exists(); assertEquals(p, expectedRead, cppAdminUser.isGranted(p, Session.ACTION_READ)); assertFalse(p, cppAdminUser.isGranted(p, getActionString(ALL_ACTIONS))); } } @Test public void testRepositoryPermissionIsGranted() throws Exception { RepositoryPermission rp = cppTestUser.getRepositoryPermission(); assertTrue(rp.isGranted(Permissions.NAMESPACE_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.NAMESPACE_MANAGEMENT | Permissions.NODE_TYPE_DEFINITION_MANAGEMENT)); } @Test public void testRepositoryPermissionIsGrantedAdminUser() throws Exception { RepositoryPermission rp = cppAdminUser.getRepositoryPermission(); assertTrue(rp.isGranted(Permissions.NAMESPACE_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.NAMESPACE_MANAGEMENT | Permissions.NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.PRIVILEGE_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.NAMESPACE_MANAGEMENT|Permissions.PRIVILEGE_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.ALL)); } @Test public void testTreePermissionIsGranted() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { TreePermission tp = cppTestUser.getTreePermission(readOnlyRoot.getTree(path), parentPermission); Long toTest = (defPermissions.containsKey(path)) ? defPermissions.get(path) : defPermissions.get(PathUtils.getAncestorPath(path, 1)); if (toTest != null) { if (Permissions.READ_NODE == toTest) { assertTrue(path, tp.isGranted(toTest)); } else { boolean canRead = Permissions.includes(toTest, Permissions.READ_NODE); assertEquals(path, canRead, tp.isGranted(Permissions.READ_NODE)); assertFalse(path, tp.isGranted(toTest)); } } parentPermission = tp; } } @Test public void testTreePermissionIsGrantedProperty() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { TreePermission tp = cppTestUser.getTreePermission(readOnlyRoot.getTree(path), parentPermission); Long toTest = (defPermissions.containsKey(path)) ? defPermissions.get(path) : defPermissions.get(PathUtils.getAncestorPath(path, 1)); if (toTest != null) { boolean granted = (toTest == Permissions.READ_NODE); assertEquals(path, granted, tp.isGranted(toTest, PROPERTY_STATE)); } assertFalse(tp.isGranted(Permissions.READ_PROPERTY, PROPERTY_STATE)); parentPermission = tp; } } @Test public void testTreePermissionCanRead() throws Exception { Map<String, Boolean> readMap = ImmutableMap.<String, Boolean>builder(). put(ROOT_PATH, false). put(TEST_PATH, true). put(TEST_A_PATH, true). put(TEST_A_B_PATH, true). put(TEST_A_B_C_PATH, false). put(TEST_A_B_C_PATH + "/nonexisting", false). build(); TreePermission parentPermission = TreePermission.EMPTY; for (String nodePath : readMap.keySet()) { Tree tree = readOnlyRoot.getTree(nodePath); TreePermission tp = cppTestUser.getTreePermission(tree, parentPermission); boolean expectedResult = readMap.get(nodePath); assertEquals(nodePath, expectedResult, tp.canRead()); parentPermission = tp; } } @Test public void testTreePermissionCanReadProperty() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String nodePath : TP_PATHS) { Tree tree = readOnlyRoot.getTree(nodePath); TreePermission tp = cppTestUser.getTreePermission(tree, parentPermission); assertFalse(nodePath, tp.canRead(PROPERTY_STATE)); parentPermission = tp; } } @Test public void testTreePermissionCanReadAdmin() { TreePermission parentPermission = TreePermission.EMPTY; for (String nodePath : TP_PATHS) { Tree tree = readOnlyRoot.getTree(nodePath); TreePermission tp = cppAdminUser.getTreePermission(tree, parentPermission); assertTrue(nodePath, tp.canRead()); assertFalse(nodePath, tp.canRead(PROPERTY_STATE)); parentPermission = tp; } } }
mreutegg/jackrabbit-oak
oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/composite/CompositeProviderFullScopeTest.java
Java
apache-2.0
16,116
// +build !windows package daemon import ( "os" "path/filepath" "sort" "strings" "github.com/docker/docker/daemon/execdriver" "github.com/docker/docker/pkg/system" "github.com/docker/docker/volume" "github.com/docker/docker/volume/local" ) // copyOwnership copies the permissions and uid:gid of the source file // into the destination file func copyOwnership(source, destination string) error { stat, err := system.Stat(source) if err != nil { return err } if err := os.Chown(destination, int(stat.Uid()), int(stat.Gid())); err != nil { return err } return os.Chmod(destination, os.FileMode(stat.Mode())) } func (container *Container) setupMounts() ([]execdriver.Mount, error) { var mounts []execdriver.Mount for _, m := range container.MountPoints { path, err := m.Setup() if err != nil { return nil, err } if !container.trySetNetworkMount(m.Destination, path) { mounts = append(mounts, execdriver.Mount{ Source: path, Destination: m.Destination, Writable: m.RW, }) } } mounts = sortMounts(mounts) return append(mounts, container.networkMounts()...), nil } func sortMounts(m []execdriver.Mount) []execdriver.Mount { sort.Sort(mounts(m)) return m } type mounts []execdriver.Mount func (m mounts) Len() int { return len(m) } func (m mounts) Less(i, j int) bool { return m.parts(i) < m.parts(j) } func (m mounts) Swap(i, j int) { m[i], m[j] = m[j], m[i] } func (m mounts) parts(i int) int { return len(strings.Split(filepath.Clean(m[i].Destination), string(os.PathSeparator))) } // migrateVolume links the contents of a volume created pre Docker 1.7 // into the location expected by the local driver. // It creates a symlink from DOCKER_ROOT/vfs/dir/VOLUME_ID to DOCKER_ROOT/volumes/VOLUME_ID/_container_data. // It preserves the volume json configuration generated pre Docker 1.7 to be able to // downgrade from Docker 1.7 to Docker 1.6 without losing volume compatibility. func migrateVolume(id, vfs string) error { l, err := getVolumeDriver(volume.DefaultDriverName) if err != nil { return err } newDataPath := l.(*local.Root).DataPath(id) fi, err := os.Stat(newDataPath) if err != nil && !os.IsNotExist(err) { return err } if fi != nil && fi.IsDir() { return nil } return os.Symlink(vfs, newDataPath) } // validVolumeLayout checks whether the volume directory layout // is valid to work with Docker post 1.7 or not. func validVolumeLayout(files []os.FileInfo) bool { if len(files) == 1 && files[0].Name() == local.VolumeDataPathName && files[0].IsDir() { return true } if len(files) != 2 { return false } for _, f := range files { if f.Name() == "config.json" || (f.Name() == local.VolumeDataPathName && f.Mode()&os.ModeSymlink == os.ModeSymlink) { // Old volume configuration, we ignore it continue } return false } return true }
chrisseto/docker
daemon/volumes_linux.go
GO
apache-2.0
2,865
package com.commonsware.empublite; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; public class EmPubLiteActivity extends Activity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.options, menu); return (super.onCreateOptionsMenu(menu)); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: return (true); case R.id.about: Intent i = new Intent(this, SimpleContentActivity.class); startActivity(i); return (true); case R.id.help: i = new Intent(this, SimpleContentActivity.class); startActivity(i); return (true); } return (super.onOptionsItemSelected(item)); } }
immuvijay/cw-omnibus
EmPubLite-AndroidStudio/T9-Fragments/EmPubLite/app/src/main/java/com/commonsware/empublite/EmPubLiteActivity.java
Java
apache-2.0
1,027
/** * $RCSfile: PEPPubSub.java,v $ * $Revision: 1.2 $ * $Date: 2007/11/03 04:46:52 $ * * Copyright 2003-2007 Jive Software. * * All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.packet; import org.jivesoftware.smack.packet.IQ; /** * Represents XMPP PEP/XEP-163 pubsub packets.<p> * * The 'http://jabber.org/protocol/pubsub' namespace is used to publish personal events items from one client * to subscribed clients (See XEP-163). * * @author Jeff Williams */ public class PEPPubSub extends IQ { PEPItem item; /** * Creates a new PubSub. * */ public PEPPubSub(PEPItem item) { super(); this.item = item; } /** * Returns the XML element name of the extension sub-packet root element. * Always returns "x" * * @return the XML element name of the packet extension. */ public String getElementName() { return "pubsub"; } /** * Returns the XML namespace of the extension sub-packet root element. * According the specification the namespace is always "jabber:x:roster" * (which is not to be confused with the 'jabber:iq:roster' namespace * * @return the XML namespace of the packet extension. */ public String getNamespace() { return "http://jabber.org/protocol/pubsub"; } /** * Returns the XML representation of a Personal Event Publish according the specification. * * Usually the XML representation will be inside of a Message XML representation like * in the following example: * <pre> * &lt;message id="MlIpV-4" to="gato1@gato.home" from="gato3@gato.home/Smack"&gt; * &lt;subject&gt;Any subject you want&lt;/subject&gt; * &lt;body&gt;This message contains roster items.&lt;/body&gt; * &lt;x xmlns="jabber:x:roster"&gt; * &lt;item jid="gato1@gato.home"/&gt; * &lt;item jid="gato2@gato.home"/&gt; * &lt;/x&gt; * &lt;/message&gt; * </pre> * */ public String getChildElementXML() { StringBuilder buf = new StringBuilder(); buf.append("<").append(getElementName()).append(" xmlns=\"").append(getNamespace()).append("\">"); buf.append("<publish node=\"").append(item.getNode()).append("\">"); buf.append(item.toXML()); buf.append("</publish>"); buf.append("</").append(getElementName()).append(">"); return buf.toString(); } }
mcaprari/smack
source/org/jivesoftware/smackx/packet/PEPPubSub.java
Java
apache-2.0
3,033
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.j2objc.ast; import com.google.devtools.j2objc.types.Types; import org.eclipse.jdt.core.dom.ITypeBinding; /** * Type literal node type. */ public class TypeLiteral extends Expression { private final ITypeBinding typeBinding; private ChildLink<Type> type = ChildLink.create(Type.class, this); public TypeLiteral(org.eclipse.jdt.core.dom.TypeLiteral jdtNode) { super(jdtNode); typeBinding = jdtNode.resolveTypeBinding(); type.set((Type) TreeConverter.convert(jdtNode.getType())); } public TypeLiteral(TypeLiteral other) { super(other); typeBinding = other.getTypeBinding(); type.copyFrom(other.getType()); } public TypeLiteral(ITypeBinding literalType, Types typeEnv) { typeBinding = typeEnv.resolveJavaType("java.lang.Class"); type.set(Type.newType(literalType)); } @Override public Kind getKind() { return Kind.TYPE_LITERAL; } @Override public ITypeBinding getTypeBinding() { return typeBinding; } public Type getType() { return type.get(); } @Override protected void acceptInner(TreeVisitor visitor) { if (visitor.visit(this)) { type.accept(visitor); } visitor.endVisit(this); } @Override public TypeLiteral copy() { return new TypeLiteral(this); } }
shabbirh/j2objc
translator/src/main/java/com/google/devtools/j2objc/ast/TypeLiteral.java
Java
apache-2.0
1,862
/* * #%L * BroadleafCommerce Open Admin Platform * %% * Copyright (C) 2009 - 2015 Broadleaf Commerce * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.broadleafcommerce.openadmin.web.service; import org.broadleafcommerce.common.extension.ExtensionHandler; import org.broadleafcommerce.common.extension.ExtensionResultStatusType; import org.broadleafcommerce.openadmin.dto.Entity; import org.broadleafcommerce.openadmin.web.form.component.ListGridRecord; /** * An extension handler to allow a custom error key or error message to be added to the ListGridRecord. * @author kellytisdell * */ public interface ListGridErrorMessageExtensionHandler extends ExtensionHandler { /** * Allows the extension handler to determine a custom error message or error message key for the entity. * Implementors should determine if they can handle the entity in question. If not, they should return * ExtensionResultStatusType.NOT_HANDLED. * * Otherwise, they should either set the error message or the error key on the ListGrid on the entity. If both * are set the error message will win. * * Implementors can use the BroadleafRequestContext to try to determine Locale, or get a MessageSource, etc. * * @param entity * @param lgr * @return */ public ExtensionResultStatusType determineErrorMessageForEntity(Entity entity, ListGridRecord lgr); }
macielbombonato/BroadleafCommerce
admin/broadleaf-open-admin-platform/src/main/java/org/broadleafcommerce/openadmin/web/service/ListGridErrorMessageExtensionHandler.java
Java
apache-2.0
1,956
/* * #%L * BroadleafCommerce Open Admin Platform * %% * Copyright (C) 2009 - 2013 Broadleaf Commerce * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.broadleafcommerce.openadmin.server.factory; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.broadleafcommerce.common.exception.ExceptionHelper; import org.broadleafcommerce.common.presentation.client.OperationType; import org.broadleafcommerce.common.presentation.client.PersistencePerspectiveItemType; import org.broadleafcommerce.common.util.dao.DynamicDaoHelper; import org.broadleafcommerce.common.util.dao.DynamicDaoHelperImpl; import org.broadleafcommerce.openadmin.dto.OperationTypes; import org.broadleafcommerce.openadmin.dto.PersistencePackage; import org.broadleafcommerce.openadmin.dto.PersistencePerspective; import org.broadleafcommerce.openadmin.dto.SectionCrumb; import org.broadleafcommerce.openadmin.server.domain.PersistencePackageRequest; import org.broadleafcommerce.openadmin.server.security.domain.AdminSection; import org.broadleafcommerce.openadmin.server.security.service.navigation.AdminNavigationService; import org.hibernate.Session; import org.springframework.stereotype.Service; import java.util.Map; import javax.annotation.Resource; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; /** * @author Andre Azzolini (apazzolini) */ @Service("blPersistencePackageFactory") public class PersistencePackageFactoryImpl implements PersistencePackageFactory { @Resource(name = "blAdminNavigationService") protected AdminNavigationService adminNavigationService; @PersistenceContext(unitName = "blPU") protected EntityManager em; protected DynamicDaoHelper dynamicDaoHelper = new DynamicDaoHelperImpl(); @Override public PersistencePackage create(PersistencePackageRequest request) { PersistencePerspective persistencePerspective = new PersistencePerspective(); persistencePerspective.setAdditionalForeignKeys(request.getAdditionalForeignKeys()); persistencePerspective.setAdditionalNonPersistentProperties(new String[] {}); if (request.getForeignKey() != null) { persistencePerspective.addPersistencePerspectiveItem(PersistencePerspectiveItemType.FOREIGNKEY, request.getForeignKey()); } switch (request.getType()) { case STANDARD: persistencePerspective.setOperationTypes(getDefaultOperationTypes()); break; case ADORNED: if (request.getAdornedList() == null) { throw new IllegalArgumentException("ADORNED type requires the adornedList to be set"); } persistencePerspective.setOperationTypes(getOperationTypes(OperationType.ADORNEDTARGETLIST)); persistencePerspective.addPersistencePerspectiveItem(PersistencePerspectiveItemType.ADORNEDTARGETLIST, request.getAdornedList()); break; case MAP: if (request.getMapStructure() == null) { throw new IllegalArgumentException("MAP type requires the mapStructure to be set"); } persistencePerspective.setOperationTypes(getOperationTypes(OperationType.MAP)); persistencePerspective.addPersistencePerspectiveItem(PersistencePerspectiveItemType.MAPSTRUCTURE, request.getMapStructure()); break; } if (request.getOperationTypesOverride() != null) { persistencePerspective.setOperationTypes(request.getOperationTypesOverride()); } PersistencePackage pp = new PersistencePackage(); pp.setCeilingEntityFullyQualifiedClassname(request.getCeilingEntityClassname()); if (!StringUtils.isEmpty(request.getSecurityCeilingEntityClassname())) { pp.setSecurityCeilingEntityFullyQualifiedClassname(request.getSecurityCeilingEntityClassname()); } if (!ArrayUtils.isEmpty(request.getSectionCrumbs())) { SectionCrumb[] converted = new SectionCrumb[request.getSectionCrumbs().length]; int index = 0; for (SectionCrumb crumb : request.getSectionCrumbs()) { SectionCrumb temp = new SectionCrumb(); String originalSectionIdentifier = crumb.getSectionIdentifier(); String sectionAsClassName; try { sectionAsClassName = getClassNameForSection(crumb.getSectionIdentifier()); } catch (Exception e) { sectionAsClassName = request.getCeilingEntityClassname(); } if (sectionAsClassName != null && !sectionAsClassName.equals(originalSectionIdentifier)) { temp.setOriginalSectionIdentifier(originalSectionIdentifier); } temp.setSectionIdentifier(sectionAsClassName); temp.setSectionId(crumb.getSectionId()); converted[index] = temp; index++; } pp.setSectionCrumbs(converted); } pp.setSectionEntityField(request.getSectionEntityField()); pp.setFetchTypeFullyQualifiedClassname(null); pp.setPersistencePerspective(persistencePerspective); pp.setCustomCriteria(request.getCustomCriteria()); pp.setCsrfToken(null); pp.setRequestingEntityName(request.getRequestingEntityName()); pp.setValidateUnsubmittedProperties(request.isValidateUnsubmittedProperties()); if (request.getEntity() != null) { pp.setEntity(request.getEntity()); } for (Map.Entry<String, PersistencePackageRequest> subRequest : request.getSubRequests().entrySet()) { pp.getSubPackages().put(subRequest.getKey(), create(subRequest.getValue())); } return pp; } protected OperationTypes getDefaultOperationTypes() { OperationTypes operationTypes = new OperationTypes(); operationTypes.setFetchType(OperationType.BASIC); operationTypes.setRemoveType(OperationType.BASIC); operationTypes.setAddType(OperationType.BASIC); operationTypes.setUpdateType(OperationType.BASIC); operationTypes.setInspectType(OperationType.BASIC); return operationTypes; } protected OperationTypes getOperationTypes(OperationType nonInspectOperationType) { OperationTypes operationTypes = new OperationTypes(); operationTypes.setFetchType(nonInspectOperationType); operationTypes.setRemoveType(nonInspectOperationType); operationTypes.setAddType(nonInspectOperationType); operationTypes.setUpdateType(nonInspectOperationType); operationTypes.setInspectType(OperationType.BASIC); return operationTypes; } protected String getClassNameForSection(String sectionKey) { try { AdminSection section = adminNavigationService.findAdminSectionByURI("/" + sectionKey); String className = (section == null) ? sectionKey : section.getCeilingEntity(); Class<?>[] entities = dynamicDaoHelper.getAllPolymorphicEntitiesFromCeiling(Class.forName(className), em.unwrap(Session.class).getSessionFactory(), true, true); return entities[entities.length - 1].getName(); } catch (ClassNotFoundException e) { throw ExceptionHelper.refineException(RuntimeException.class, RuntimeException.class, e); } } }
ljshj/BroadleafCommerce
admin/broadleaf-open-admin-platform/src/main/java/org/broadleafcommerce/openadmin/server/factory/PersistencePackageFactoryImpl.java
Java
apache-2.0
8,138
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package jenkins import "github.com/prometheus/client_golang/prometheus" var ( requests = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "jenkins_requests", Help: "Number of Jenkins requests made from prow.", }, []string{ // http verb of the request "verb", // path of the request "handler", // http status code of the request "code", }) requestRetries = prometheus.NewCounter(prometheus.CounterOpts{ Name: "jenkins_request_retries", Help: "Number of Jenkins request retries made from prow.", }) requestLatency = prometheus.NewHistogramVec(prometheus.HistogramOpts{ Name: "jenkins_request_latency", Help: "Time for a request to roundtrip between prow and Jenkins.", Buckets: prometheus.DefBuckets, }, []string{ // http verb of the request "verb", // path of the request "handler", }) resyncPeriod = prometheus.NewHistogram(prometheus.HistogramOpts{ Name: "resync_period_seconds", Help: "Time the controller takes to complete one reconciliation loop.", Buckets: prometheus.ExponentialBuckets(1, 3, 5), }) ) func init() { prometheus.MustRegister(requests) prometheus.MustRegister(requestRetries) prometheus.MustRegister(requestLatency) prometheus.MustRegister(resyncPeriod) } // ClientMetrics is a set of metrics gathered by the Jenkins client. type ClientMetrics struct { Requests *prometheus.CounterVec RequestRetries prometheus.Counter RequestLatency *prometheus.HistogramVec } // Metrics is a set of metrics gathered by the Jenkins operator. // It includes client metrics and metrics related to the controller loop. type Metrics struct { ClientMetrics *ClientMetrics ResyncPeriod prometheus.Histogram } // NewMetrics creates a new set of metrics for the Jenkins operator. func NewMetrics() *Metrics { return &Metrics{ ClientMetrics: &ClientMetrics{ Requests: requests, RequestRetries: requestRetries, RequestLatency: requestLatency, }, ResyncPeriod: resyncPeriod, } }
mindprince/test-infra
prow/jenkins/metrics.go
GO
apache-2.0
2,555
/** * Autogenerated by Thrift Compiler (0.9.2) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package com.navercorp.pinpoint.thrift.dto; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-21") public class TSpan implements org.apache.thrift.TBase<TSpan, TSpan._Fields>, java.io.Serializable, Cloneable, Comparable<TSpan> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TSpan"); private static final org.apache.thrift.protocol.TField AGENT_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("agentId", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField APPLICATION_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("applicationName", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField AGENT_START_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("agentStartTime", org.apache.thrift.protocol.TType.I64, (short)3); private static final org.apache.thrift.protocol.TField TRANSACTION_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("transactionId", org.apache.thrift.protocol.TType.STRING, (short)4); private static final org.apache.thrift.protocol.TField SPAN_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("spanId", org.apache.thrift.protocol.TType.I64, (short)7); private static final org.apache.thrift.protocol.TField PARENT_SPAN_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("parentSpanId", org.apache.thrift.protocol.TType.I64, (short)8); private static final org.apache.thrift.protocol.TField START_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("startTime", org.apache.thrift.protocol.TType.I64, (short)9); private static final org.apache.thrift.protocol.TField ELAPSED_FIELD_DESC = new org.apache.thrift.protocol.TField("elapsed", org.apache.thrift.protocol.TType.I32, (short)10); private static final org.apache.thrift.protocol.TField RPC_FIELD_DESC = new org.apache.thrift.protocol.TField("rpc", org.apache.thrift.protocol.TType.STRING, (short)11); private static final org.apache.thrift.protocol.TField SERVICE_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("serviceType", org.apache.thrift.protocol.TType.I16, (short)12); private static final org.apache.thrift.protocol.TField END_POINT_FIELD_DESC = new org.apache.thrift.protocol.TField("endPoint", org.apache.thrift.protocol.TType.STRING, (short)13); private static final org.apache.thrift.protocol.TField REMOTE_ADDR_FIELD_DESC = new org.apache.thrift.protocol.TField("remoteAddr", org.apache.thrift.protocol.TType.STRING, (short)14); private static final org.apache.thrift.protocol.TField ANNOTATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("annotations", org.apache.thrift.protocol.TType.LIST, (short)15); private static final org.apache.thrift.protocol.TField FLAG_FIELD_DESC = new org.apache.thrift.protocol.TField("flag", org.apache.thrift.protocol.TType.I16, (short)16); private static final org.apache.thrift.protocol.TField ERR_FIELD_DESC = new org.apache.thrift.protocol.TField("err", org.apache.thrift.protocol.TType.I32, (short)17); private static final org.apache.thrift.protocol.TField SPAN_EVENT_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("spanEventList", org.apache.thrift.protocol.TType.LIST, (short)18); private static final org.apache.thrift.protocol.TField PARENT_APPLICATION_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("parentApplicationName", org.apache.thrift.protocol.TType.STRING, (short)19); private static final org.apache.thrift.protocol.TField PARENT_APPLICATION_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("parentApplicationType", org.apache.thrift.protocol.TType.I16, (short)20); private static final org.apache.thrift.protocol.TField ACCEPTOR_HOST_FIELD_DESC = new org.apache.thrift.protocol.TField("acceptorHost", org.apache.thrift.protocol.TType.STRING, (short)21); private static final org.apache.thrift.protocol.TField API_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("apiId", org.apache.thrift.protocol.TType.I32, (short)25); private static final org.apache.thrift.protocol.TField EXCEPTION_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("exceptionInfo", org.apache.thrift.protocol.TType.STRUCT, (short)26); private static final org.apache.thrift.protocol.TField APPLICATION_SERVICE_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("applicationServiceType", org.apache.thrift.protocol.TType.I16, (short)30); private static final org.apache.thrift.protocol.TField LOGGING_TRANSACTION_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("loggingTransactionInfo", org.apache.thrift.protocol.TType.BYTE, (short)31); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TSpanStandardSchemeFactory()); schemes.put(TupleScheme.class, new TSpanTupleSchemeFactory()); } public String agentId; // required public String applicationName; // required public long agentStartTime; // required public ByteBuffer transactionId; // required public long spanId; // required public long parentSpanId; // optional public long startTime; // required public int elapsed; // optional public String rpc; // optional public short serviceType; // required public String endPoint; // optional public String remoteAddr; // optional public List<TAnnotation> annotations; // optional public short flag; // optional public int err; // optional public List<TSpanEvent> spanEventList; // optional public String parentApplicationName; // optional public short parentApplicationType; // optional public String acceptorHost; // optional public int apiId; // optional public TIntStringValue exceptionInfo; // optional public short applicationServiceType; // optional public byte loggingTransactionInfo; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { AGENT_ID((short)1, "agentId"), APPLICATION_NAME((short)2, "applicationName"), AGENT_START_TIME((short)3, "agentStartTime"), TRANSACTION_ID((short)4, "transactionId"), SPAN_ID((short)7, "spanId"), PARENT_SPAN_ID((short)8, "parentSpanId"), START_TIME((short)9, "startTime"), ELAPSED((short)10, "elapsed"), RPC((short)11, "rpc"), SERVICE_TYPE((short)12, "serviceType"), END_POINT((short)13, "endPoint"), REMOTE_ADDR((short)14, "remoteAddr"), ANNOTATIONS((short)15, "annotations"), FLAG((short)16, "flag"), ERR((short)17, "err"), SPAN_EVENT_LIST((short)18, "spanEventList"), PARENT_APPLICATION_NAME((short)19, "parentApplicationName"), PARENT_APPLICATION_TYPE((short)20, "parentApplicationType"), ACCEPTOR_HOST((short)21, "acceptorHost"), API_ID((short)25, "apiId"), EXCEPTION_INFO((short)26, "exceptionInfo"), APPLICATION_SERVICE_TYPE((short)30, "applicationServiceType"), LOGGING_TRANSACTION_INFO((short)31, "loggingTransactionInfo"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // AGENT_ID return AGENT_ID; case 2: // APPLICATION_NAME return APPLICATION_NAME; case 3: // AGENT_START_TIME return AGENT_START_TIME; case 4: // TRANSACTION_ID return TRANSACTION_ID; case 7: // SPAN_ID return SPAN_ID; case 8: // PARENT_SPAN_ID return PARENT_SPAN_ID; case 9: // START_TIME return START_TIME; case 10: // ELAPSED return ELAPSED; case 11: // RPC return RPC; case 12: // SERVICE_TYPE return SERVICE_TYPE; case 13: // END_POINT return END_POINT; case 14: // REMOTE_ADDR return REMOTE_ADDR; case 15: // ANNOTATIONS return ANNOTATIONS; case 16: // FLAG return FLAG; case 17: // ERR return ERR; case 18: // SPAN_EVENT_LIST return SPAN_EVENT_LIST; case 19: // PARENT_APPLICATION_NAME return PARENT_APPLICATION_NAME; case 20: // PARENT_APPLICATION_TYPE return PARENT_APPLICATION_TYPE; case 21: // ACCEPTOR_HOST return ACCEPTOR_HOST; case 25: // API_ID return API_ID; case 26: // EXCEPTION_INFO return EXCEPTION_INFO; case 30: // APPLICATION_SERVICE_TYPE return APPLICATION_SERVICE_TYPE; case 31: // LOGGING_TRANSACTION_INFO return LOGGING_TRANSACTION_INFO; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __AGENTSTARTTIME_ISSET_ID = 0; private static final int __SPANID_ISSET_ID = 1; private static final int __PARENTSPANID_ISSET_ID = 2; private static final int __STARTTIME_ISSET_ID = 3; private static final int __ELAPSED_ISSET_ID = 4; private static final int __SERVICETYPE_ISSET_ID = 5; private static final int __FLAG_ISSET_ID = 6; private static final int __ERR_ISSET_ID = 7; private static final int __PARENTAPPLICATIONTYPE_ISSET_ID = 8; private static final int __APIID_ISSET_ID = 9; private static final int __APPLICATIONSERVICETYPE_ISSET_ID = 10; private static final int __LOGGINGTRANSACTIONINFO_ISSET_ID = 11; private short __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.PARENT_SPAN_ID,_Fields.ELAPSED,_Fields.RPC,_Fields.END_POINT,_Fields.REMOTE_ADDR,_Fields.ANNOTATIONS,_Fields.FLAG,_Fields.ERR,_Fields.SPAN_EVENT_LIST,_Fields.PARENT_APPLICATION_NAME,_Fields.PARENT_APPLICATION_TYPE,_Fields.ACCEPTOR_HOST,_Fields.API_ID,_Fields.EXCEPTION_INFO,_Fields.APPLICATION_SERVICE_TYPE,_Fields.LOGGING_TRANSACTION_INFO}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.AGENT_ID, new org.apache.thrift.meta_data.FieldMetaData("agentId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.APPLICATION_NAME, new org.apache.thrift.meta_data.FieldMetaData("applicationName", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.AGENT_START_TIME, new org.apache.thrift.meta_data.FieldMetaData("agentStartTime", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.TRANSACTION_ID, new org.apache.thrift.meta_data.FieldMetaData("transactionId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); tmpMap.put(_Fields.SPAN_ID, new org.apache.thrift.meta_data.FieldMetaData("spanId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.PARENT_SPAN_ID, new org.apache.thrift.meta_data.FieldMetaData("parentSpanId", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.START_TIME, new org.apache.thrift.meta_data.FieldMetaData("startTime", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); tmpMap.put(_Fields.ELAPSED, new org.apache.thrift.meta_data.FieldMetaData("elapsed", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.RPC, new org.apache.thrift.meta_data.FieldMetaData("rpc", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.SERVICE_TYPE, new org.apache.thrift.meta_data.FieldMetaData("serviceType", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16))); tmpMap.put(_Fields.END_POINT, new org.apache.thrift.meta_data.FieldMetaData("endPoint", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.REMOTE_ADDR, new org.apache.thrift.meta_data.FieldMetaData("remoteAddr", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.ANNOTATIONS, new org.apache.thrift.meta_data.FieldMetaData("annotations", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TAnnotation.class)))); tmpMap.put(_Fields.FLAG, new org.apache.thrift.meta_data.FieldMetaData("flag", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16))); tmpMap.put(_Fields.ERR, new org.apache.thrift.meta_data.FieldMetaData("err", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.SPAN_EVENT_LIST, new org.apache.thrift.meta_data.FieldMetaData("spanEventList", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TSpanEvent.class)))); tmpMap.put(_Fields.PARENT_APPLICATION_NAME, new org.apache.thrift.meta_data.FieldMetaData("parentApplicationName", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.PARENT_APPLICATION_TYPE, new org.apache.thrift.meta_data.FieldMetaData("parentApplicationType", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16))); tmpMap.put(_Fields.ACCEPTOR_HOST, new org.apache.thrift.meta_data.FieldMetaData("acceptorHost", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.API_ID, new org.apache.thrift.meta_data.FieldMetaData("apiId", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.EXCEPTION_INFO, new org.apache.thrift.meta_data.FieldMetaData("exceptionInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TIntStringValue.class))); tmpMap.put(_Fields.APPLICATION_SERVICE_TYPE, new org.apache.thrift.meta_data.FieldMetaData("applicationServiceType", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16))); tmpMap.put(_Fields.LOGGING_TRANSACTION_INFO, new org.apache.thrift.meta_data.FieldMetaData("loggingTransactionInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BYTE))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TSpan.class, metaDataMap); } public TSpan() { this.parentSpanId = -1L; this.elapsed = 0; this.flag = (short)0; } public TSpan( String agentId, String applicationName, long agentStartTime, ByteBuffer transactionId, long spanId, long startTime, short serviceType) { this(); this.agentId = agentId; this.applicationName = applicationName; this.agentStartTime = agentStartTime; setAgentStartTimeIsSet(true); this.transactionId = org.apache.thrift.TBaseHelper.copyBinary(transactionId); this.spanId = spanId; setSpanIdIsSet(true); this.startTime = startTime; setStartTimeIsSet(true); this.serviceType = serviceType; setServiceTypeIsSet(true); } /** * Performs a deep copy on <i>other</i>. */ public TSpan(TSpan other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetAgentId()) { this.agentId = other.agentId; } if (other.isSetApplicationName()) { this.applicationName = other.applicationName; } this.agentStartTime = other.agentStartTime; if (other.isSetTransactionId()) { this.transactionId = org.apache.thrift.TBaseHelper.copyBinary(other.transactionId); } this.spanId = other.spanId; this.parentSpanId = other.parentSpanId; this.startTime = other.startTime; this.elapsed = other.elapsed; if (other.isSetRpc()) { this.rpc = other.rpc; } this.serviceType = other.serviceType; if (other.isSetEndPoint()) { this.endPoint = other.endPoint; } if (other.isSetRemoteAddr()) { this.remoteAddr = other.remoteAddr; } if (other.isSetAnnotations()) { List<TAnnotation> __this__annotations = new ArrayList<TAnnotation>(other.annotations.size()); for (TAnnotation other_element : other.annotations) { __this__annotations.add(new TAnnotation(other_element)); } this.annotations = __this__annotations; } this.flag = other.flag; this.err = other.err; if (other.isSetSpanEventList()) { List<TSpanEvent> __this__spanEventList = new ArrayList<TSpanEvent>(other.spanEventList.size()); for (TSpanEvent other_element : other.spanEventList) { __this__spanEventList.add(new TSpanEvent(other_element)); } this.spanEventList = __this__spanEventList; } if (other.isSetParentApplicationName()) { this.parentApplicationName = other.parentApplicationName; } this.parentApplicationType = other.parentApplicationType; if (other.isSetAcceptorHost()) { this.acceptorHost = other.acceptorHost; } this.apiId = other.apiId; if (other.isSetExceptionInfo()) { this.exceptionInfo = new TIntStringValue(other.exceptionInfo); } this.applicationServiceType = other.applicationServiceType; this.loggingTransactionInfo = other.loggingTransactionInfo; } public TSpan deepCopy() { return new TSpan(this); } @Override public void clear() { this.agentId = null; this.applicationName = null; setAgentStartTimeIsSet(false); this.agentStartTime = 0; this.transactionId = null; setSpanIdIsSet(false); this.spanId = 0; this.parentSpanId = -1L; setStartTimeIsSet(false); this.startTime = 0; this.elapsed = 0; this.rpc = null; setServiceTypeIsSet(false); this.serviceType = 0; this.endPoint = null; this.remoteAddr = null; this.annotations = null; this.flag = (short)0; setErrIsSet(false); this.err = 0; this.spanEventList = null; this.parentApplicationName = null; setParentApplicationTypeIsSet(false); this.parentApplicationType = 0; this.acceptorHost = null; setApiIdIsSet(false); this.apiId = 0; this.exceptionInfo = null; setApplicationServiceTypeIsSet(false); this.applicationServiceType = 0; setLoggingTransactionInfoIsSet(false); this.loggingTransactionInfo = 0; } public String getAgentId() { return this.agentId; } public TSpan setAgentId(String agentId) { this.agentId = agentId; return this; } public void unsetAgentId() { this.agentId = null; } /** Returns true if field agentId is set (has been assigned a value) and false otherwise */ public boolean isSetAgentId() { return this.agentId != null; } public void setAgentIdIsSet(boolean value) { if (!value) { this.agentId = null; } } public String getApplicationName() { return this.applicationName; } public TSpan setApplicationName(String applicationName) { this.applicationName = applicationName; return this; } public void unsetApplicationName() { this.applicationName = null; } /** Returns true if field applicationName is set (has been assigned a value) and false otherwise */ public boolean isSetApplicationName() { return this.applicationName != null; } public void setApplicationNameIsSet(boolean value) { if (!value) { this.applicationName = null; } } public long getAgentStartTime() { return this.agentStartTime; } public TSpan setAgentStartTime(long agentStartTime) { this.agentStartTime = agentStartTime; setAgentStartTimeIsSet(true); return this; } public void unsetAgentStartTime() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __AGENTSTARTTIME_ISSET_ID); } /** Returns true if field agentStartTime is set (has been assigned a value) and false otherwise */ public boolean isSetAgentStartTime() { return EncodingUtils.testBit(__isset_bitfield, __AGENTSTARTTIME_ISSET_ID); } public void setAgentStartTimeIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __AGENTSTARTTIME_ISSET_ID, value); } public byte[] getTransactionId() { setTransactionId(org.apache.thrift.TBaseHelper.rightSize(transactionId)); return transactionId == null ? null : transactionId.array(); } public ByteBuffer bufferForTransactionId() { return org.apache.thrift.TBaseHelper.copyBinary(transactionId); } public TSpan setTransactionId(byte[] transactionId) { this.transactionId = transactionId == null ? (ByteBuffer)null : ByteBuffer.wrap(Arrays.copyOf(transactionId, transactionId.length)); return this; } public TSpan setTransactionId(ByteBuffer transactionId) { this.transactionId = org.apache.thrift.TBaseHelper.copyBinary(transactionId); return this; } public void unsetTransactionId() { this.transactionId = null; } /** Returns true if field transactionId is set (has been assigned a value) and false otherwise */ public boolean isSetTransactionId() { return this.transactionId != null; } public void setTransactionIdIsSet(boolean value) { if (!value) { this.transactionId = null; } } public long getSpanId() { return this.spanId; } public TSpan setSpanId(long spanId) { this.spanId = spanId; setSpanIdIsSet(true); return this; } public void unsetSpanId() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SPANID_ISSET_ID); } /** Returns true if field spanId is set (has been assigned a value) and false otherwise */ public boolean isSetSpanId() { return EncodingUtils.testBit(__isset_bitfield, __SPANID_ISSET_ID); } public void setSpanIdIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SPANID_ISSET_ID, value); } public long getParentSpanId() { return this.parentSpanId; } public TSpan setParentSpanId(long parentSpanId) { this.parentSpanId = parentSpanId; setParentSpanIdIsSet(true); return this; } public void unsetParentSpanId() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __PARENTSPANID_ISSET_ID); } /** Returns true if field parentSpanId is set (has been assigned a value) and false otherwise */ public boolean isSetParentSpanId() { return EncodingUtils.testBit(__isset_bitfield, __PARENTSPANID_ISSET_ID); } public void setParentSpanIdIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __PARENTSPANID_ISSET_ID, value); } public long getStartTime() { return this.startTime; } public TSpan setStartTime(long startTime) { this.startTime = startTime; setStartTimeIsSet(true); return this; } public void unsetStartTime() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __STARTTIME_ISSET_ID); } /** Returns true if field startTime is set (has been assigned a value) and false otherwise */ public boolean isSetStartTime() { return EncodingUtils.testBit(__isset_bitfield, __STARTTIME_ISSET_ID); } public void setStartTimeIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __STARTTIME_ISSET_ID, value); } public int getElapsed() { return this.elapsed; } public TSpan setElapsed(int elapsed) { this.elapsed = elapsed; setElapsedIsSet(true); return this; } public void unsetElapsed() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ELAPSED_ISSET_ID); } /** Returns true if field elapsed is set (has been assigned a value) and false otherwise */ public boolean isSetElapsed() { return EncodingUtils.testBit(__isset_bitfield, __ELAPSED_ISSET_ID); } public void setElapsedIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ELAPSED_ISSET_ID, value); } public String getRpc() { return this.rpc; } public TSpan setRpc(String rpc) { this.rpc = rpc; return this; } public void unsetRpc() { this.rpc = null; } /** Returns true if field rpc is set (has been assigned a value) and false otherwise */ public boolean isSetRpc() { return this.rpc != null; } public void setRpcIsSet(boolean value) { if (!value) { this.rpc = null; } } public short getServiceType() { return this.serviceType; } public TSpan setServiceType(short serviceType) { this.serviceType = serviceType; setServiceTypeIsSet(true); return this; } public void unsetServiceType() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SERVICETYPE_ISSET_ID); } /** Returns true if field serviceType is set (has been assigned a value) and false otherwise */ public boolean isSetServiceType() { return EncodingUtils.testBit(__isset_bitfield, __SERVICETYPE_ISSET_ID); } public void setServiceTypeIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SERVICETYPE_ISSET_ID, value); } public String getEndPoint() { return this.endPoint; } public TSpan setEndPoint(String endPoint) { this.endPoint = endPoint; return this; } public void unsetEndPoint() { this.endPoint = null; } /** Returns true if field endPoint is set (has been assigned a value) and false otherwise */ public boolean isSetEndPoint() { return this.endPoint != null; } public void setEndPointIsSet(boolean value) { if (!value) { this.endPoint = null; } } public String getRemoteAddr() { return this.remoteAddr; } public TSpan setRemoteAddr(String remoteAddr) { this.remoteAddr = remoteAddr; return this; } public void unsetRemoteAddr() { this.remoteAddr = null; } /** Returns true if field remoteAddr is set (has been assigned a value) and false otherwise */ public boolean isSetRemoteAddr() { return this.remoteAddr != null; } public void setRemoteAddrIsSet(boolean value) { if (!value) { this.remoteAddr = null; } } public int getAnnotationsSize() { return (this.annotations == null) ? 0 : this.annotations.size(); } public java.util.Iterator<TAnnotation> getAnnotationsIterator() { return (this.annotations == null) ? null : this.annotations.iterator(); } public void addToAnnotations(TAnnotation elem) { if (this.annotations == null) { this.annotations = new ArrayList<TAnnotation>(); } this.annotations.add(elem); } public List<TAnnotation> getAnnotations() { return this.annotations; } public TSpan setAnnotations(List<TAnnotation> annotations) { this.annotations = annotations; return this; } public void unsetAnnotations() { this.annotations = null; } /** Returns true if field annotations is set (has been assigned a value) and false otherwise */ public boolean isSetAnnotations() { return this.annotations != null; } public void setAnnotationsIsSet(boolean value) { if (!value) { this.annotations = null; } } public short getFlag() { return this.flag; } public TSpan setFlag(short flag) { this.flag = flag; setFlagIsSet(true); return this; } public void unsetFlag() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __FLAG_ISSET_ID); } /** Returns true if field flag is set (has been assigned a value) and false otherwise */ public boolean isSetFlag() { return EncodingUtils.testBit(__isset_bitfield, __FLAG_ISSET_ID); } public void setFlagIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __FLAG_ISSET_ID, value); } public int getErr() { return this.err; } public TSpan setErr(int err) { this.err = err; setErrIsSet(true); return this; } public void unsetErr() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ERR_ISSET_ID); } /** Returns true if field err is set (has been assigned a value) and false otherwise */ public boolean isSetErr() { return EncodingUtils.testBit(__isset_bitfield, __ERR_ISSET_ID); } public void setErrIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ERR_ISSET_ID, value); } public int getSpanEventListSize() { return (this.spanEventList == null) ? 0 : this.spanEventList.size(); } public java.util.Iterator<TSpanEvent> getSpanEventListIterator() { return (this.spanEventList == null) ? null : this.spanEventList.iterator(); } public void addToSpanEventList(TSpanEvent elem) { if (this.spanEventList == null) { this.spanEventList = new ArrayList<TSpanEvent>(); } this.spanEventList.add(elem); } public List<TSpanEvent> getSpanEventList() { return this.spanEventList; } public TSpan setSpanEventList(List<TSpanEvent> spanEventList) { this.spanEventList = spanEventList; return this; } public void unsetSpanEventList() { this.spanEventList = null; } /** Returns true if field spanEventList is set (has been assigned a value) and false otherwise */ public boolean isSetSpanEventList() { return this.spanEventList != null; } public void setSpanEventListIsSet(boolean value) { if (!value) { this.spanEventList = null; } } public String getParentApplicationName() { return this.parentApplicationName; } public TSpan setParentApplicationName(String parentApplicationName) { this.parentApplicationName = parentApplicationName; return this; } public void unsetParentApplicationName() { this.parentApplicationName = null; } /** Returns true if field parentApplicationName is set (has been assigned a value) and false otherwise */ public boolean isSetParentApplicationName() { return this.parentApplicationName != null; } public void setParentApplicationNameIsSet(boolean value) { if (!value) { this.parentApplicationName = null; } } public short getParentApplicationType() { return this.parentApplicationType; } public TSpan setParentApplicationType(short parentApplicationType) { this.parentApplicationType = parentApplicationType; setParentApplicationTypeIsSet(true); return this; } public void unsetParentApplicationType() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __PARENTAPPLICATIONTYPE_ISSET_ID); } /** Returns true if field parentApplicationType is set (has been assigned a value) and false otherwise */ public boolean isSetParentApplicationType() { return EncodingUtils.testBit(__isset_bitfield, __PARENTAPPLICATIONTYPE_ISSET_ID); } public void setParentApplicationTypeIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __PARENTAPPLICATIONTYPE_ISSET_ID, value); } public String getAcceptorHost() { return this.acceptorHost; } public TSpan setAcceptorHost(String acceptorHost) { this.acceptorHost = acceptorHost; return this; } public void unsetAcceptorHost() { this.acceptorHost = null; } /** Returns true if field acceptorHost is set (has been assigned a value) and false otherwise */ public boolean isSetAcceptorHost() { return this.acceptorHost != null; } public void setAcceptorHostIsSet(boolean value) { if (!value) { this.acceptorHost = null; } } public int getApiId() { return this.apiId; } public TSpan setApiId(int apiId) { this.apiId = apiId; setApiIdIsSet(true); return this; } public void unsetApiId() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __APIID_ISSET_ID); } /** Returns true if field apiId is set (has been assigned a value) and false otherwise */ public boolean isSetApiId() { return EncodingUtils.testBit(__isset_bitfield, __APIID_ISSET_ID); } public void setApiIdIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __APIID_ISSET_ID, value); } public TIntStringValue getExceptionInfo() { return this.exceptionInfo; } public TSpan setExceptionInfo(TIntStringValue exceptionInfo) { this.exceptionInfo = exceptionInfo; return this; } public void unsetExceptionInfo() { this.exceptionInfo = null; } /** Returns true if field exceptionInfo is set (has been assigned a value) and false otherwise */ public boolean isSetExceptionInfo() { return this.exceptionInfo != null; } public void setExceptionInfoIsSet(boolean value) { if (!value) { this.exceptionInfo = null; } } public short getApplicationServiceType() { return this.applicationServiceType; } public TSpan setApplicationServiceType(short applicationServiceType) { this.applicationServiceType = applicationServiceType; setApplicationServiceTypeIsSet(true); return this; } public void unsetApplicationServiceType() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __APPLICATIONSERVICETYPE_ISSET_ID); } /** Returns true if field applicationServiceType is set (has been assigned a value) and false otherwise */ public boolean isSetApplicationServiceType() { return EncodingUtils.testBit(__isset_bitfield, __APPLICATIONSERVICETYPE_ISSET_ID); } public void setApplicationServiceTypeIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __APPLICATIONSERVICETYPE_ISSET_ID, value); } public byte getLoggingTransactionInfo() { return this.loggingTransactionInfo; } public TSpan setLoggingTransactionInfo(byte loggingTransactionInfo) { this.loggingTransactionInfo = loggingTransactionInfo; setLoggingTransactionInfoIsSet(true); return this; } public void unsetLoggingTransactionInfo() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __LOGGINGTRANSACTIONINFO_ISSET_ID); } /** Returns true if field loggingTransactionInfo is set (has been assigned a value) and false otherwise */ public boolean isSetLoggingTransactionInfo() { return EncodingUtils.testBit(__isset_bitfield, __LOGGINGTRANSACTIONINFO_ISSET_ID); } public void setLoggingTransactionInfoIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __LOGGINGTRANSACTIONINFO_ISSET_ID, value); } public void setFieldValue(_Fields field, Object value) { switch (field) { case AGENT_ID: if (value == null) { unsetAgentId(); } else { setAgentId((String)value); } break; case APPLICATION_NAME: if (value == null) { unsetApplicationName(); } else { setApplicationName((String)value); } break; case AGENT_START_TIME: if (value == null) { unsetAgentStartTime(); } else { setAgentStartTime((Long)value); } break; case TRANSACTION_ID: if (value == null) { unsetTransactionId(); } else { setTransactionId((ByteBuffer)value); } break; case SPAN_ID: if (value == null) { unsetSpanId(); } else { setSpanId((Long)value); } break; case PARENT_SPAN_ID: if (value == null) { unsetParentSpanId(); } else { setParentSpanId((Long)value); } break; case START_TIME: if (value == null) { unsetStartTime(); } else { setStartTime((Long)value); } break; case ELAPSED: if (value == null) { unsetElapsed(); } else { setElapsed((Integer)value); } break; case RPC: if (value == null) { unsetRpc(); } else { setRpc((String)value); } break; case SERVICE_TYPE: if (value == null) { unsetServiceType(); } else { setServiceType((Short)value); } break; case END_POINT: if (value == null) { unsetEndPoint(); } else { setEndPoint((String)value); } break; case REMOTE_ADDR: if (value == null) { unsetRemoteAddr(); } else { setRemoteAddr((String)value); } break; case ANNOTATIONS: if (value == null) { unsetAnnotations(); } else { setAnnotations((List<TAnnotation>)value); } break; case FLAG: if (value == null) { unsetFlag(); } else { setFlag((Short)value); } break; case ERR: if (value == null) { unsetErr(); } else { setErr((Integer)value); } break; case SPAN_EVENT_LIST: if (value == null) { unsetSpanEventList(); } else { setSpanEventList((List<TSpanEvent>)value); } break; case PARENT_APPLICATION_NAME: if (value == null) { unsetParentApplicationName(); } else { setParentApplicationName((String)value); } break; case PARENT_APPLICATION_TYPE: if (value == null) { unsetParentApplicationType(); } else { setParentApplicationType((Short)value); } break; case ACCEPTOR_HOST: if (value == null) { unsetAcceptorHost(); } else { setAcceptorHost((String)value); } break; case API_ID: if (value == null) { unsetApiId(); } else { setApiId((Integer)value); } break; case EXCEPTION_INFO: if (value == null) { unsetExceptionInfo(); } else { setExceptionInfo((TIntStringValue)value); } break; case APPLICATION_SERVICE_TYPE: if (value == null) { unsetApplicationServiceType(); } else { setApplicationServiceType((Short)value); } break; case LOGGING_TRANSACTION_INFO: if (value == null) { unsetLoggingTransactionInfo(); } else { setLoggingTransactionInfo((Byte)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case AGENT_ID: return getAgentId(); case APPLICATION_NAME: return getApplicationName(); case AGENT_START_TIME: return Long.valueOf(getAgentStartTime()); case TRANSACTION_ID: return getTransactionId(); case SPAN_ID: return Long.valueOf(getSpanId()); case PARENT_SPAN_ID: return Long.valueOf(getParentSpanId()); case START_TIME: return Long.valueOf(getStartTime()); case ELAPSED: return Integer.valueOf(getElapsed()); case RPC: return getRpc(); case SERVICE_TYPE: return Short.valueOf(getServiceType()); case END_POINT: return getEndPoint(); case REMOTE_ADDR: return getRemoteAddr(); case ANNOTATIONS: return getAnnotations(); case FLAG: return Short.valueOf(getFlag()); case ERR: return Integer.valueOf(getErr()); case SPAN_EVENT_LIST: return getSpanEventList(); case PARENT_APPLICATION_NAME: return getParentApplicationName(); case PARENT_APPLICATION_TYPE: return Short.valueOf(getParentApplicationType()); case ACCEPTOR_HOST: return getAcceptorHost(); case API_ID: return Integer.valueOf(getApiId()); case EXCEPTION_INFO: return getExceptionInfo(); case APPLICATION_SERVICE_TYPE: return Short.valueOf(getApplicationServiceType()); case LOGGING_TRANSACTION_INFO: return Byte.valueOf(getLoggingTransactionInfo()); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case AGENT_ID: return isSetAgentId(); case APPLICATION_NAME: return isSetApplicationName(); case AGENT_START_TIME: return isSetAgentStartTime(); case TRANSACTION_ID: return isSetTransactionId(); case SPAN_ID: return isSetSpanId(); case PARENT_SPAN_ID: return isSetParentSpanId(); case START_TIME: return isSetStartTime(); case ELAPSED: return isSetElapsed(); case RPC: return isSetRpc(); case SERVICE_TYPE: return isSetServiceType(); case END_POINT: return isSetEndPoint(); case REMOTE_ADDR: return isSetRemoteAddr(); case ANNOTATIONS: return isSetAnnotations(); case FLAG: return isSetFlag(); case ERR: return isSetErr(); case SPAN_EVENT_LIST: return isSetSpanEventList(); case PARENT_APPLICATION_NAME: return isSetParentApplicationName(); case PARENT_APPLICATION_TYPE: return isSetParentApplicationType(); case ACCEPTOR_HOST: return isSetAcceptorHost(); case API_ID: return isSetApiId(); case EXCEPTION_INFO: return isSetExceptionInfo(); case APPLICATION_SERVICE_TYPE: return isSetApplicationServiceType(); case LOGGING_TRANSACTION_INFO: return isSetLoggingTransactionInfo(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof TSpan) return this.equals((TSpan)that); return false; } public boolean equals(TSpan that) { if (that == null) return false; boolean this_present_agentId = true && this.isSetAgentId(); boolean that_present_agentId = true && that.isSetAgentId(); if (this_present_agentId || that_present_agentId) { if (!(this_present_agentId && that_present_agentId)) return false; if (!this.agentId.equals(that.agentId)) return false; } boolean this_present_applicationName = true && this.isSetApplicationName(); boolean that_present_applicationName = true && that.isSetApplicationName(); if (this_present_applicationName || that_present_applicationName) { if (!(this_present_applicationName && that_present_applicationName)) return false; if (!this.applicationName.equals(that.applicationName)) return false; } boolean this_present_agentStartTime = true; boolean that_present_agentStartTime = true; if (this_present_agentStartTime || that_present_agentStartTime) { if (!(this_present_agentStartTime && that_present_agentStartTime)) return false; if (this.agentStartTime != that.agentStartTime) return false; } boolean this_present_transactionId = true && this.isSetTransactionId(); boolean that_present_transactionId = true && that.isSetTransactionId(); if (this_present_transactionId || that_present_transactionId) { if (!(this_present_transactionId && that_present_transactionId)) return false; if (!this.transactionId.equals(that.transactionId)) return false; } boolean this_present_spanId = true; boolean that_present_spanId = true; if (this_present_spanId || that_present_spanId) { if (!(this_present_spanId && that_present_spanId)) return false; if (this.spanId != that.spanId) return false; } boolean this_present_parentSpanId = true && this.isSetParentSpanId(); boolean that_present_parentSpanId = true && that.isSetParentSpanId(); if (this_present_parentSpanId || that_present_parentSpanId) { if (!(this_present_parentSpanId && that_present_parentSpanId)) return false; if (this.parentSpanId != that.parentSpanId) return false; } boolean this_present_startTime = true; boolean that_present_startTime = true; if (this_present_startTime || that_present_startTime) { if (!(this_present_startTime && that_present_startTime)) return false; if (this.startTime != that.startTime) return false; } boolean this_present_elapsed = true && this.isSetElapsed(); boolean that_present_elapsed = true && that.isSetElapsed(); if (this_present_elapsed || that_present_elapsed) { if (!(this_present_elapsed && that_present_elapsed)) return false; if (this.elapsed != that.elapsed) return false; } boolean this_present_rpc = true && this.isSetRpc(); boolean that_present_rpc = true && that.isSetRpc(); if (this_present_rpc || that_present_rpc) { if (!(this_present_rpc && that_present_rpc)) return false; if (!this.rpc.equals(that.rpc)) return false; } boolean this_present_serviceType = true; boolean that_present_serviceType = true; if (this_present_serviceType || that_present_serviceType) { if (!(this_present_serviceType && that_present_serviceType)) return false; if (this.serviceType != that.serviceType) return false; } boolean this_present_endPoint = true && this.isSetEndPoint(); boolean that_present_endPoint = true && that.isSetEndPoint(); if (this_present_endPoint || that_present_endPoint) { if (!(this_present_endPoint && that_present_endPoint)) return false; if (!this.endPoint.equals(that.endPoint)) return false; } boolean this_present_remoteAddr = true && this.isSetRemoteAddr(); boolean that_present_remoteAddr = true && that.isSetRemoteAddr(); if (this_present_remoteAddr || that_present_remoteAddr) { if (!(this_present_remoteAddr && that_present_remoteAddr)) return false; if (!this.remoteAddr.equals(that.remoteAddr)) return false; } boolean this_present_annotations = true && this.isSetAnnotations(); boolean that_present_annotations = true && that.isSetAnnotations(); if (this_present_annotations || that_present_annotations) { if (!(this_present_annotations && that_present_annotations)) return false; if (!this.annotations.equals(that.annotations)) return false; } boolean this_present_flag = true && this.isSetFlag(); boolean that_present_flag = true && that.isSetFlag(); if (this_present_flag || that_present_flag) { if (!(this_present_flag && that_present_flag)) return false; if (this.flag != that.flag) return false; } boolean this_present_err = true && this.isSetErr(); boolean that_present_err = true && that.isSetErr(); if (this_present_err || that_present_err) { if (!(this_present_err && that_present_err)) return false; if (this.err != that.err) return false; } boolean this_present_spanEventList = true && this.isSetSpanEventList(); boolean that_present_spanEventList = true && that.isSetSpanEventList(); if (this_present_spanEventList || that_present_spanEventList) { if (!(this_present_spanEventList && that_present_spanEventList)) return false; if (!this.spanEventList.equals(that.spanEventList)) return false; } boolean this_present_parentApplicationName = true && this.isSetParentApplicationName(); boolean that_present_parentApplicationName = true && that.isSetParentApplicationName(); if (this_present_parentApplicationName || that_present_parentApplicationName) { if (!(this_present_parentApplicationName && that_present_parentApplicationName)) return false; if (!this.parentApplicationName.equals(that.parentApplicationName)) return false; } boolean this_present_parentApplicationType = true && this.isSetParentApplicationType(); boolean that_present_parentApplicationType = true && that.isSetParentApplicationType(); if (this_present_parentApplicationType || that_present_parentApplicationType) { if (!(this_present_parentApplicationType && that_present_parentApplicationType)) return false; if (this.parentApplicationType != that.parentApplicationType) return false; } boolean this_present_acceptorHost = true && this.isSetAcceptorHost(); boolean that_present_acceptorHost = true && that.isSetAcceptorHost(); if (this_present_acceptorHost || that_present_acceptorHost) { if (!(this_present_acceptorHost && that_present_acceptorHost)) return false; if (!this.acceptorHost.equals(that.acceptorHost)) return false; } boolean this_present_apiId = true && this.isSetApiId(); boolean that_present_apiId = true && that.isSetApiId(); if (this_present_apiId || that_present_apiId) { if (!(this_present_apiId && that_present_apiId)) return false; if (this.apiId != that.apiId) return false; } boolean this_present_exceptionInfo = true && this.isSetExceptionInfo(); boolean that_present_exceptionInfo = true && that.isSetExceptionInfo(); if (this_present_exceptionInfo || that_present_exceptionInfo) { if (!(this_present_exceptionInfo && that_present_exceptionInfo)) return false; if (!this.exceptionInfo.equals(that.exceptionInfo)) return false; } boolean this_present_applicationServiceType = true && this.isSetApplicationServiceType(); boolean that_present_applicationServiceType = true && that.isSetApplicationServiceType(); if (this_present_applicationServiceType || that_present_applicationServiceType) { if (!(this_present_applicationServiceType && that_present_applicationServiceType)) return false; if (this.applicationServiceType != that.applicationServiceType) return false; } boolean this_present_loggingTransactionInfo = true && this.isSetLoggingTransactionInfo(); boolean that_present_loggingTransactionInfo = true && that.isSetLoggingTransactionInfo(); if (this_present_loggingTransactionInfo || that_present_loggingTransactionInfo) { if (!(this_present_loggingTransactionInfo && that_present_loggingTransactionInfo)) return false; if (this.loggingTransactionInfo != that.loggingTransactionInfo) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_agentId = true && (isSetAgentId()); list.add(present_agentId); if (present_agentId) list.add(agentId); boolean present_applicationName = true && (isSetApplicationName()); list.add(present_applicationName); if (present_applicationName) list.add(applicationName); boolean present_agentStartTime = true; list.add(present_agentStartTime); if (present_agentStartTime) list.add(agentStartTime); boolean present_transactionId = true && (isSetTransactionId()); list.add(present_transactionId); if (present_transactionId) list.add(transactionId); boolean present_spanId = true; list.add(present_spanId); if (present_spanId) list.add(spanId); boolean present_parentSpanId = true && (isSetParentSpanId()); list.add(present_parentSpanId); if (present_parentSpanId) list.add(parentSpanId); boolean present_startTime = true; list.add(present_startTime); if (present_startTime) list.add(startTime); boolean present_elapsed = true && (isSetElapsed()); list.add(present_elapsed); if (present_elapsed) list.add(elapsed); boolean present_rpc = true && (isSetRpc()); list.add(present_rpc); if (present_rpc) list.add(rpc); boolean present_serviceType = true; list.add(present_serviceType); if (present_serviceType) list.add(serviceType); boolean present_endPoint = true && (isSetEndPoint()); list.add(present_endPoint); if (present_endPoint) list.add(endPoint); boolean present_remoteAddr = true && (isSetRemoteAddr()); list.add(present_remoteAddr); if (present_remoteAddr) list.add(remoteAddr); boolean present_annotations = true && (isSetAnnotations()); list.add(present_annotations); if (present_annotations) list.add(annotations); boolean present_flag = true && (isSetFlag()); list.add(present_flag); if (present_flag) list.add(flag); boolean present_err = true && (isSetErr()); list.add(present_err); if (present_err) list.add(err); boolean present_spanEventList = true && (isSetSpanEventList()); list.add(present_spanEventList); if (present_spanEventList) list.add(spanEventList); boolean present_parentApplicationName = true && (isSetParentApplicationName()); list.add(present_parentApplicationName); if (present_parentApplicationName) list.add(parentApplicationName); boolean present_parentApplicationType = true && (isSetParentApplicationType()); list.add(present_parentApplicationType); if (present_parentApplicationType) list.add(parentApplicationType); boolean present_acceptorHost = true && (isSetAcceptorHost()); list.add(present_acceptorHost); if (present_acceptorHost) list.add(acceptorHost); boolean present_apiId = true && (isSetApiId()); list.add(present_apiId); if (present_apiId) list.add(apiId); boolean present_exceptionInfo = true && (isSetExceptionInfo()); list.add(present_exceptionInfo); if (present_exceptionInfo) list.add(exceptionInfo); boolean present_applicationServiceType = true && (isSetApplicationServiceType()); list.add(present_applicationServiceType); if (present_applicationServiceType) list.add(applicationServiceType); boolean present_loggingTransactionInfo = true && (isSetLoggingTransactionInfo()); list.add(present_loggingTransactionInfo); if (present_loggingTransactionInfo) list.add(loggingTransactionInfo); return list.hashCode(); } @Override public int compareTo(TSpan other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetAgentId()).compareTo(other.isSetAgentId()); if (lastComparison != 0) { return lastComparison; } if (isSetAgentId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.agentId, other.agentId); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetApplicationName()).compareTo(other.isSetApplicationName()); if (lastComparison != 0) { return lastComparison; } if (isSetApplicationName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.applicationName, other.applicationName); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetAgentStartTime()).compareTo(other.isSetAgentStartTime()); if (lastComparison != 0) { return lastComparison; } if (isSetAgentStartTime()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.agentStartTime, other.agentStartTime); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetTransactionId()).compareTo(other.isSetTransactionId()); if (lastComparison != 0) { return lastComparison; } if (isSetTransactionId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.transactionId, other.transactionId); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetSpanId()).compareTo(other.isSetSpanId()); if (lastComparison != 0) { return lastComparison; } if (isSetSpanId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.spanId, other.spanId); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetParentSpanId()).compareTo(other.isSetParentSpanId()); if (lastComparison != 0) { return lastComparison; } if (isSetParentSpanId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.parentSpanId, other.parentSpanId); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetStartTime()).compareTo(other.isSetStartTime()); if (lastComparison != 0) { return lastComparison; } if (isSetStartTime()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.startTime, other.startTime); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetElapsed()).compareTo(other.isSetElapsed()); if (lastComparison != 0) { return lastComparison; } if (isSetElapsed()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.elapsed, other.elapsed); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetRpc()).compareTo(other.isSetRpc()); if (lastComparison != 0) { return lastComparison; } if (isSetRpc()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rpc, other.rpc); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetServiceType()).compareTo(other.isSetServiceType()); if (lastComparison != 0) { return lastComparison; } if (isSetServiceType()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.serviceType, other.serviceType); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetEndPoint()).compareTo(other.isSetEndPoint()); if (lastComparison != 0) { return lastComparison; } if (isSetEndPoint()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.endPoint, other.endPoint); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetRemoteAddr()).compareTo(other.isSetRemoteAddr()); if (lastComparison != 0) { return lastComparison; } if (isSetRemoteAddr()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.remoteAddr, other.remoteAddr); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetAnnotations()).compareTo(other.isSetAnnotations()); if (lastComparison != 0) { return lastComparison; } if (isSetAnnotations()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.annotations, other.annotations); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetFlag()).compareTo(other.isSetFlag()); if (lastComparison != 0) { return lastComparison; } if (isSetFlag()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.flag, other.flag); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetErr()).compareTo(other.isSetErr()); if (lastComparison != 0) { return lastComparison; } if (isSetErr()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.err, other.err); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetSpanEventList()).compareTo(other.isSetSpanEventList()); if (lastComparison != 0) { return lastComparison; } if (isSetSpanEventList()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.spanEventList, other.spanEventList); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetParentApplicationName()).compareTo(other.isSetParentApplicationName()); if (lastComparison != 0) { return lastComparison; } if (isSetParentApplicationName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.parentApplicationName, other.parentApplicationName); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetParentApplicationType()).compareTo(other.isSetParentApplicationType()); if (lastComparison != 0) { return lastComparison; } if (isSetParentApplicationType()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.parentApplicationType, other.parentApplicationType); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetAcceptorHost()).compareTo(other.isSetAcceptorHost()); if (lastComparison != 0) { return lastComparison; } if (isSetAcceptorHost()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.acceptorHost, other.acceptorHost); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetApiId()).compareTo(other.isSetApiId()); if (lastComparison != 0) { return lastComparison; } if (isSetApiId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.apiId, other.apiId); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetExceptionInfo()).compareTo(other.isSetExceptionInfo()); if (lastComparison != 0) { return lastComparison; } if (isSetExceptionInfo()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.exceptionInfo, other.exceptionInfo); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetApplicationServiceType()).compareTo(other.isSetApplicationServiceType()); if (lastComparison != 0) { return lastComparison; } if (isSetApplicationServiceType()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.applicationServiceType, other.applicationServiceType); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetLoggingTransactionInfo()).compareTo(other.isSetLoggingTransactionInfo()); if (lastComparison != 0) { return lastComparison; } if (isSetLoggingTransactionInfo()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.loggingTransactionInfo, other.loggingTransactionInfo); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("TSpan("); boolean first = true; sb.append("agentId:"); if (this.agentId == null) { sb.append("null"); } else { sb.append(this.agentId); } first = false; if (!first) sb.append(", "); sb.append("applicationName:"); if (this.applicationName == null) { sb.append("null"); } else { sb.append(this.applicationName); } first = false; if (!first) sb.append(", "); sb.append("agentStartTime:"); sb.append(this.agentStartTime); first = false; if (!first) sb.append(", "); sb.append("transactionId:"); if (this.transactionId == null) { sb.append("null"); } else { org.apache.thrift.TBaseHelper.toString(this.transactionId, sb); } first = false; if (!first) sb.append(", "); sb.append("spanId:"); sb.append(this.spanId); first = false; if (isSetParentSpanId()) { if (!first) sb.append(", "); sb.append("parentSpanId:"); sb.append(this.parentSpanId); first = false; } if (!first) sb.append(", "); sb.append("startTime:"); sb.append(this.startTime); first = false; if (isSetElapsed()) { if (!first) sb.append(", "); sb.append("elapsed:"); sb.append(this.elapsed); first = false; } if (isSetRpc()) { if (!first) sb.append(", "); sb.append("rpc:"); if (this.rpc == null) { sb.append("null"); } else { sb.append(this.rpc); } first = false; } if (!first) sb.append(", "); sb.append("serviceType:"); sb.append(this.serviceType); first = false; if (isSetEndPoint()) { if (!first) sb.append(", "); sb.append("endPoint:"); if (this.endPoint == null) { sb.append("null"); } else { sb.append(this.endPoint); } first = false; } if (isSetRemoteAddr()) { if (!first) sb.append(", "); sb.append("remoteAddr:"); if (this.remoteAddr == null) { sb.append("null"); } else { sb.append(this.remoteAddr); } first = false; } if (isSetAnnotations()) { if (!first) sb.append(", "); sb.append("annotations:"); if (this.annotations == null) { sb.append("null"); } else { sb.append(this.annotations); } first = false; } if (isSetFlag()) { if (!first) sb.append(", "); sb.append("flag:"); sb.append(this.flag); first = false; } if (isSetErr()) { if (!first) sb.append(", "); sb.append("err:"); sb.append(this.err); first = false; } if (isSetSpanEventList()) { if (!first) sb.append(", "); sb.append("spanEventList:"); if (this.spanEventList == null) { sb.append("null"); } else { sb.append(this.spanEventList); } first = false; } if (isSetParentApplicationName()) { if (!first) sb.append(", "); sb.append("parentApplicationName:"); if (this.parentApplicationName == null) { sb.append("null"); } else { sb.append(this.parentApplicationName); } first = false; } if (isSetParentApplicationType()) { if (!first) sb.append(", "); sb.append("parentApplicationType:"); sb.append(this.parentApplicationType); first = false; } if (isSetAcceptorHost()) { if (!first) sb.append(", "); sb.append("acceptorHost:"); if (this.acceptorHost == null) { sb.append("null"); } else { sb.append(this.acceptorHost); } first = false; } if (isSetApiId()) { if (!first) sb.append(", "); sb.append("apiId:"); sb.append(this.apiId); first = false; } if (isSetExceptionInfo()) { if (!first) sb.append(", "); sb.append("exceptionInfo:"); if (this.exceptionInfo == null) { sb.append("null"); } else { sb.append(this.exceptionInfo); } first = false; } if (isSetApplicationServiceType()) { if (!first) sb.append(", "); sb.append("applicationServiceType:"); sb.append(this.applicationServiceType); first = false; } if (isSetLoggingTransactionInfo()) { if (!first) sb.append(", "); sb.append("loggingTransactionInfo:"); sb.append(this.loggingTransactionInfo); first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity if (exceptionInfo != null) { exceptionInfo.validate(); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class TSpanStandardSchemeFactory implements SchemeFactory { public TSpanStandardScheme getScheme() { return new TSpanStandardScheme(); } } private static class TSpanStandardScheme extends StandardScheme<TSpan> { public void read(org.apache.thrift.protocol.TProtocol iprot, TSpan struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // AGENT_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.agentId = iprot.readString(); struct.setAgentIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // APPLICATION_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.applicationName = iprot.readString(); struct.setApplicationNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // AGENT_START_TIME if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.agentStartTime = iprot.readI64(); struct.setAgentStartTimeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // TRANSACTION_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.transactionId = iprot.readBinary(); struct.setTransactionIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 7: // SPAN_ID if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.spanId = iprot.readI64(); struct.setSpanIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 8: // PARENT_SPAN_ID if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.parentSpanId = iprot.readI64(); struct.setParentSpanIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 9: // START_TIME if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.startTime = iprot.readI64(); struct.setStartTimeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 10: // ELAPSED if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.elapsed = iprot.readI32(); struct.setElapsedIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 11: // RPC if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.rpc = iprot.readString(); struct.setRpcIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 12: // SERVICE_TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I16) { struct.serviceType = iprot.readI16(); struct.setServiceTypeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 13: // END_POINT if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.endPoint = iprot.readString(); struct.setEndPointIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 14: // REMOTE_ADDR if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.remoteAddr = iprot.readString(); struct.setRemoteAddrIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 15: // ANNOTATIONS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list8 = iprot.readListBegin(); struct.annotations = new ArrayList<TAnnotation>(_list8.size); TAnnotation _elem9; for (int _i10 = 0; _i10 < _list8.size; ++_i10) { _elem9 = new TAnnotation(); _elem9.read(iprot); struct.annotations.add(_elem9); } iprot.readListEnd(); } struct.setAnnotationsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 16: // FLAG if (schemeField.type == org.apache.thrift.protocol.TType.I16) { struct.flag = iprot.readI16(); struct.setFlagIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 17: // ERR if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.err = iprot.readI32(); struct.setErrIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 18: // SPAN_EVENT_LIST if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list11 = iprot.readListBegin(); struct.spanEventList = new ArrayList<TSpanEvent>(_list11.size); TSpanEvent _elem12; for (int _i13 = 0; _i13 < _list11.size; ++_i13) { _elem12 = new TSpanEvent(); _elem12.read(iprot); struct.spanEventList.add(_elem12); } iprot.readListEnd(); } struct.setSpanEventListIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 19: // PARENT_APPLICATION_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.parentApplicationName = iprot.readString(); struct.setParentApplicationNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 20: // PARENT_APPLICATION_TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I16) { struct.parentApplicationType = iprot.readI16(); struct.setParentApplicationTypeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 21: // ACCEPTOR_HOST if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.acceptorHost = iprot.readString(); struct.setAcceptorHostIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 25: // API_ID if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.apiId = iprot.readI32(); struct.setApiIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 26: // EXCEPTION_INFO if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.exceptionInfo = new TIntStringValue(); struct.exceptionInfo.read(iprot); struct.setExceptionInfoIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 30: // APPLICATION_SERVICE_TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I16) { struct.applicationServiceType = iprot.readI16(); struct.setApplicationServiceTypeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 31: // LOGGING_TRANSACTION_INFO if (schemeField.type == org.apache.thrift.protocol.TType.BYTE) { struct.loggingTransactionInfo = iprot.readByte(); struct.setLoggingTransactionInfoIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, TSpan struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.agentId != null) { oprot.writeFieldBegin(AGENT_ID_FIELD_DESC); oprot.writeString(struct.agentId); oprot.writeFieldEnd(); } if (struct.applicationName != null) { oprot.writeFieldBegin(APPLICATION_NAME_FIELD_DESC); oprot.writeString(struct.applicationName); oprot.writeFieldEnd(); } oprot.writeFieldBegin(AGENT_START_TIME_FIELD_DESC); oprot.writeI64(struct.agentStartTime); oprot.writeFieldEnd(); if (struct.transactionId != null) { oprot.writeFieldBegin(TRANSACTION_ID_FIELD_DESC); oprot.writeBinary(struct.transactionId); oprot.writeFieldEnd(); } oprot.writeFieldBegin(SPAN_ID_FIELD_DESC); oprot.writeI64(struct.spanId); oprot.writeFieldEnd(); if (struct.isSetParentSpanId()) { oprot.writeFieldBegin(PARENT_SPAN_ID_FIELD_DESC); oprot.writeI64(struct.parentSpanId); oprot.writeFieldEnd(); } oprot.writeFieldBegin(START_TIME_FIELD_DESC); oprot.writeI64(struct.startTime); oprot.writeFieldEnd(); if (struct.isSetElapsed()) { oprot.writeFieldBegin(ELAPSED_FIELD_DESC); oprot.writeI32(struct.elapsed); oprot.writeFieldEnd(); } if (struct.rpc != null) { if (struct.isSetRpc()) { oprot.writeFieldBegin(RPC_FIELD_DESC); oprot.writeString(struct.rpc); oprot.writeFieldEnd(); } } oprot.writeFieldBegin(SERVICE_TYPE_FIELD_DESC); oprot.writeI16(struct.serviceType); oprot.writeFieldEnd(); if (struct.endPoint != null) { if (struct.isSetEndPoint()) { oprot.writeFieldBegin(END_POINT_FIELD_DESC); oprot.writeString(struct.endPoint); oprot.writeFieldEnd(); } } if (struct.remoteAddr != null) { if (struct.isSetRemoteAddr()) { oprot.writeFieldBegin(REMOTE_ADDR_FIELD_DESC); oprot.writeString(struct.remoteAddr); oprot.writeFieldEnd(); } } if (struct.annotations != null) { if (struct.isSetAnnotations()) { oprot.writeFieldBegin(ANNOTATIONS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.annotations.size())); for (TAnnotation _iter14 : struct.annotations) { _iter14.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.isSetFlag()) { oprot.writeFieldBegin(FLAG_FIELD_DESC); oprot.writeI16(struct.flag); oprot.writeFieldEnd(); } if (struct.isSetErr()) { oprot.writeFieldBegin(ERR_FIELD_DESC); oprot.writeI32(struct.err); oprot.writeFieldEnd(); } if (struct.spanEventList != null) { if (struct.isSetSpanEventList()) { oprot.writeFieldBegin(SPAN_EVENT_LIST_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.spanEventList.size())); for (TSpanEvent _iter15 : struct.spanEventList) { _iter15.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.parentApplicationName != null) { if (struct.isSetParentApplicationName()) { oprot.writeFieldBegin(PARENT_APPLICATION_NAME_FIELD_DESC); oprot.writeString(struct.parentApplicationName); oprot.writeFieldEnd(); } } if (struct.isSetParentApplicationType()) { oprot.writeFieldBegin(PARENT_APPLICATION_TYPE_FIELD_DESC); oprot.writeI16(struct.parentApplicationType); oprot.writeFieldEnd(); } if (struct.acceptorHost != null) { if (struct.isSetAcceptorHost()) { oprot.writeFieldBegin(ACCEPTOR_HOST_FIELD_DESC); oprot.writeString(struct.acceptorHost); oprot.writeFieldEnd(); } } if (struct.isSetApiId()) { oprot.writeFieldBegin(API_ID_FIELD_DESC); oprot.writeI32(struct.apiId); oprot.writeFieldEnd(); } if (struct.exceptionInfo != null) { if (struct.isSetExceptionInfo()) { oprot.writeFieldBegin(EXCEPTION_INFO_FIELD_DESC); struct.exceptionInfo.write(oprot); oprot.writeFieldEnd(); } } if (struct.isSetApplicationServiceType()) { oprot.writeFieldBegin(APPLICATION_SERVICE_TYPE_FIELD_DESC); oprot.writeI16(struct.applicationServiceType); oprot.writeFieldEnd(); } if (struct.isSetLoggingTransactionInfo()) { oprot.writeFieldBegin(LOGGING_TRANSACTION_INFO_FIELD_DESC); oprot.writeByte(struct.loggingTransactionInfo); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class TSpanTupleSchemeFactory implements SchemeFactory { public TSpanTupleScheme getScheme() { return new TSpanTupleScheme(); } } private static class TSpanTupleScheme extends TupleScheme<TSpan> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, TSpan struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetAgentId()) { optionals.set(0); } if (struct.isSetApplicationName()) { optionals.set(1); } if (struct.isSetAgentStartTime()) { optionals.set(2); } if (struct.isSetTransactionId()) { optionals.set(3); } if (struct.isSetSpanId()) { optionals.set(4); } if (struct.isSetParentSpanId()) { optionals.set(5); } if (struct.isSetStartTime()) { optionals.set(6); } if (struct.isSetElapsed()) { optionals.set(7); } if (struct.isSetRpc()) { optionals.set(8); } if (struct.isSetServiceType()) { optionals.set(9); } if (struct.isSetEndPoint()) { optionals.set(10); } if (struct.isSetRemoteAddr()) { optionals.set(11); } if (struct.isSetAnnotations()) { optionals.set(12); } if (struct.isSetFlag()) { optionals.set(13); } if (struct.isSetErr()) { optionals.set(14); } if (struct.isSetSpanEventList()) { optionals.set(15); } if (struct.isSetParentApplicationName()) { optionals.set(16); } if (struct.isSetParentApplicationType()) { optionals.set(17); } if (struct.isSetAcceptorHost()) { optionals.set(18); } if (struct.isSetApiId()) { optionals.set(19); } if (struct.isSetExceptionInfo()) { optionals.set(20); } if (struct.isSetApplicationServiceType()) { optionals.set(21); } if (struct.isSetLoggingTransactionInfo()) { optionals.set(22); } oprot.writeBitSet(optionals, 23); if (struct.isSetAgentId()) { oprot.writeString(struct.agentId); } if (struct.isSetApplicationName()) { oprot.writeString(struct.applicationName); } if (struct.isSetAgentStartTime()) { oprot.writeI64(struct.agentStartTime); } if (struct.isSetTransactionId()) { oprot.writeBinary(struct.transactionId); } if (struct.isSetSpanId()) { oprot.writeI64(struct.spanId); } if (struct.isSetParentSpanId()) { oprot.writeI64(struct.parentSpanId); } if (struct.isSetStartTime()) { oprot.writeI64(struct.startTime); } if (struct.isSetElapsed()) { oprot.writeI32(struct.elapsed); } if (struct.isSetRpc()) { oprot.writeString(struct.rpc); } if (struct.isSetServiceType()) { oprot.writeI16(struct.serviceType); } if (struct.isSetEndPoint()) { oprot.writeString(struct.endPoint); } if (struct.isSetRemoteAddr()) { oprot.writeString(struct.remoteAddr); } if (struct.isSetAnnotations()) { { oprot.writeI32(struct.annotations.size()); for (TAnnotation _iter16 : struct.annotations) { _iter16.write(oprot); } } } if (struct.isSetFlag()) { oprot.writeI16(struct.flag); } if (struct.isSetErr()) { oprot.writeI32(struct.err); } if (struct.isSetSpanEventList()) { { oprot.writeI32(struct.spanEventList.size()); for (TSpanEvent _iter17 : struct.spanEventList) { _iter17.write(oprot); } } } if (struct.isSetParentApplicationName()) { oprot.writeString(struct.parentApplicationName); } if (struct.isSetParentApplicationType()) { oprot.writeI16(struct.parentApplicationType); } if (struct.isSetAcceptorHost()) { oprot.writeString(struct.acceptorHost); } if (struct.isSetApiId()) { oprot.writeI32(struct.apiId); } if (struct.isSetExceptionInfo()) { struct.exceptionInfo.write(oprot); } if (struct.isSetApplicationServiceType()) { oprot.writeI16(struct.applicationServiceType); } if (struct.isSetLoggingTransactionInfo()) { oprot.writeByte(struct.loggingTransactionInfo); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TSpan struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(23); if (incoming.get(0)) { struct.agentId = iprot.readString(); struct.setAgentIdIsSet(true); } if (incoming.get(1)) { struct.applicationName = iprot.readString(); struct.setApplicationNameIsSet(true); } if (incoming.get(2)) { struct.agentStartTime = iprot.readI64(); struct.setAgentStartTimeIsSet(true); } if (incoming.get(3)) { struct.transactionId = iprot.readBinary(); struct.setTransactionIdIsSet(true); } if (incoming.get(4)) { struct.spanId = iprot.readI64(); struct.setSpanIdIsSet(true); } if (incoming.get(5)) { struct.parentSpanId = iprot.readI64(); struct.setParentSpanIdIsSet(true); } if (incoming.get(6)) { struct.startTime = iprot.readI64(); struct.setStartTimeIsSet(true); } if (incoming.get(7)) { struct.elapsed = iprot.readI32(); struct.setElapsedIsSet(true); } if (incoming.get(8)) { struct.rpc = iprot.readString(); struct.setRpcIsSet(true); } if (incoming.get(9)) { struct.serviceType = iprot.readI16(); struct.setServiceTypeIsSet(true); } if (incoming.get(10)) { struct.endPoint = iprot.readString(); struct.setEndPointIsSet(true); } if (incoming.get(11)) { struct.remoteAddr = iprot.readString(); struct.setRemoteAddrIsSet(true); } if (incoming.get(12)) { { org.apache.thrift.protocol.TList _list18 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.annotations = new ArrayList<TAnnotation>(_list18.size); TAnnotation _elem19; for (int _i20 = 0; _i20 < _list18.size; ++_i20) { _elem19 = new TAnnotation(); _elem19.read(iprot); struct.annotations.add(_elem19); } } struct.setAnnotationsIsSet(true); } if (incoming.get(13)) { struct.flag = iprot.readI16(); struct.setFlagIsSet(true); } if (incoming.get(14)) { struct.err = iprot.readI32(); struct.setErrIsSet(true); } if (incoming.get(15)) { { org.apache.thrift.protocol.TList _list21 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.spanEventList = new ArrayList<TSpanEvent>(_list21.size); TSpanEvent _elem22; for (int _i23 = 0; _i23 < _list21.size; ++_i23) { _elem22 = new TSpanEvent(); _elem22.read(iprot); struct.spanEventList.add(_elem22); } } struct.setSpanEventListIsSet(true); } if (incoming.get(16)) { struct.parentApplicationName = iprot.readString(); struct.setParentApplicationNameIsSet(true); } if (incoming.get(17)) { struct.parentApplicationType = iprot.readI16(); struct.setParentApplicationTypeIsSet(true); } if (incoming.get(18)) { struct.acceptorHost = iprot.readString(); struct.setAcceptorHostIsSet(true); } if (incoming.get(19)) { struct.apiId = iprot.readI32(); struct.setApiIdIsSet(true); } if (incoming.get(20)) { struct.exceptionInfo = new TIntStringValue(); struct.exceptionInfo.read(iprot); struct.setExceptionInfoIsSet(true); } if (incoming.get(21)) { struct.applicationServiceType = iprot.readI16(); struct.setApplicationServiceTypeIsSet(true); } if (incoming.get(22)) { struct.loggingTransactionInfo = iprot.readByte(); struct.setLoggingTransactionInfoIsSet(true); } } } }
wziyong/pinpoint
thrift/src/main/java/com/navercorp/pinpoint/thrift/dto/TSpan.java
Java
apache-2.0
96,642
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.CodeAnalysis.Debugging; using Microsoft.CodeAnalysis.Symbols; namespace Microsoft.CodeAnalysis.ExpressionEvaluator { internal readonly struct ImportRecord { public readonly ImportTargetKind TargetKind; public readonly string? Alias; // target type of a type import (C#) public readonly ITypeSymbolInternal? TargetType; // target of an import (type, namespace or XML namespace) that needs to be bound (C#, VB) public readonly string? TargetString; // target assembly of a namespace import (C#, Portable) public readonly IAssemblySymbolInternal? TargetAssembly; // target assembly of a namespace import is identified by an extern alias which needs to be bound in the context (C#, native PDB) public readonly string? TargetAssemblyAlias; public ImportRecord( ImportTargetKind targetKind, string? alias = null, ITypeSymbolInternal? targetType = null, string? targetString = null, IAssemblySymbolInternal? targetAssembly = null, string? targetAssemblyAlias = null) { TargetKind = targetKind; Alias = alias; TargetType = targetType; TargetString = targetString; TargetAssembly = targetAssembly; TargetAssemblyAlias = targetAssemblyAlias; } } }
heejaechang/roslyn
src/ExpressionEvaluator/Core/Source/ExpressionCompiler/PDB/ImportRecord.cs
C#
apache-2.0
1,627
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: DTMStringPool.java 468653 2006-10-28 07:07:05Z minchau $ */ package org.apache.xml.dtm.ref; import java.util.Vector; import org.apache.xml.utils.IntVector; /** <p>DTMStringPool is an "interning" mechanism for strings. It will * create a stable 1:1 mapping between a set of string values and a set of * integer index values, so the integers can be used to reliably and * uniquely identify (and when necessary retrieve) the strings.</p> * * <p>Design Priorities: * <ul> * <li>String-to-index lookup speed is critical.</li> * <li>Index-to-String lookup speed is slightly less so.</li> * <li>Threadsafety is not guaranteed at this level. * Enforce that in the application if needed.</li> * <li>Storage efficiency is an issue but not a huge one. * It is expected that string pools won't exceed about 2000 entries.</li> * </ul> * </p> * * <p>Implementation detail: A standard Hashtable is relatively * inefficient when looking up primitive int values, especially when * we're already maintaining an int-to-string vector. So I'm * maintaining a simple hash chain within this class.</p> * * <p>NOTE: There is nothing in the code that has a real dependency upon * String. It would work with any object type that implements reliable * .hashCode() and .equals() operations. The API enforces Strings because * it's safer that way, but this could trivially be turned into a general * ObjectPool if one was needed.</p> * * <p>Status: Passed basic test in main().</p> * */ public class DTMStringPool { Vector m_intToString; static final int HASHPRIME=101; int[] m_hashStart=new int[HASHPRIME]; IntVector m_hashChain; public static final int NULL=-1; /** * Create a DTMStringPool using the given chain size * * @param chainSize The size of the hash chain vector */ public DTMStringPool(int chainSize) { m_intToString=new Vector(); m_hashChain=new IntVector(chainSize); removeAllElements(); // -sb Add this to force empty strings to be index 0. stringToIndex(""); } public DTMStringPool() { this(512); } public void removeAllElements() { m_intToString.removeAllElements(); for(int i=0;i<HASHPRIME;++i) m_hashStart[i]=NULL; m_hashChain.removeAllElements(); } /** @return string whose value is uniquely identified by this integer index. * @throws java.lang.ArrayIndexOutOfBoundsException * if index doesn't map to a string. * */ public String indexToString(int i) throws java.lang.ArrayIndexOutOfBoundsException { if(i==NULL) return null; return (String) m_intToString.elementAt(i); } /** @return integer index uniquely identifying the value of this string. */ public int stringToIndex(String s) { if(s==null) return NULL; int hashslot=s.hashCode()%HASHPRIME; if(hashslot<0) hashslot=-hashslot; // Is it one we already know? int hashlast=m_hashStart[hashslot]; int hashcandidate=hashlast; while(hashcandidate!=NULL) { if(m_intToString.elementAt(hashcandidate).equals(s)) return hashcandidate; hashlast=hashcandidate; hashcandidate=m_hashChain.elementAt(hashcandidate); } // New value. Add to tables. int newIndex=m_intToString.size(); m_intToString.addElement(s); m_hashChain.addElement(NULL); // Initialize to no-following-same-hash if(hashlast==NULL) // First for this hash m_hashStart[hashslot]=newIndex; else // Link from previous with same hash m_hashChain.setElementAt(newIndex,hashlast); return newIndex; } /** Command-line unit test driver. This test relies on the fact that * this version of the pool assigns indices consecutively, starting * from zero, as new unique strings are encountered. */ public static void main(String[] args) { String[] word={ "Zero","One","Two","Three","Four","Five", "Six","Seven","Eight","Nine","Ten", "Eleven","Twelve","Thirteen","Fourteen","Fifteen", "Sixteen","Seventeen","Eighteen","Nineteen","Twenty", "Twenty-One","Twenty-Two","Twenty-Three","Twenty-Four", "Twenty-Five","Twenty-Six","Twenty-Seven","Twenty-Eight", "Twenty-Nine","Thirty","Thirty-One","Thirty-Two", "Thirty-Three","Thirty-Four","Thirty-Five","Thirty-Six", "Thirty-Seven","Thirty-Eight","Thirty-Nine"}; DTMStringPool pool=new DTMStringPool(); System.out.println("If no complaints are printed below, we passed initial test."); for(int pass=0;pass<=1;++pass) { int i; for(i=0;i<word.length;++i) { int j=pool.stringToIndex(word[i]); if(j!=i) System.out.println("\tMismatch populating pool: assigned "+ j+" for create "+i); } for(i=0;i<word.length;++i) { int j=pool.stringToIndex(word[i]); if(j!=i) System.out.println("\tMismatch in stringToIndex: returned "+ j+" for lookup "+i); } for(i=0;i<word.length;++i) { String w=pool.indexToString(i); if(!word[i].equals(w)) System.out.println("\tMismatch in indexToString: returned"+ w+" for lookup "+i); } pool.removeAllElements(); System.out.println("\nPass "+pass+" complete\n"); } // end pass loop } }
doppllib/j2objc
xalan/third_party/android/platform/external/apache-xml/src/main/java/org/apache/xml/dtm/ref/DTMStringPool.java
Java
apache-2.0
6,394
// Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // NOTE: These type checking functions intentionally don't use `instanceof` // because it is fragile and can be easily faked with `Object.create()`. function isArray(arg) { if (Array.isArray) { return Array.isArray(arg); } return objectToString(arg) === '[object Array]'; } exports.isArray = isArray; function isBoolean(arg) { return typeof arg === 'boolean'; } exports.isBoolean = isBoolean; function isNull(arg) { return arg === null; } exports.isNull = isNull; function isNullOrUndefined(arg) { return arg == null; } exports.isNullOrUndefined = isNullOrUndefined; function isNumber(arg) { return typeof arg === 'number'; } exports.isNumber = isNumber; function isString(arg) { return typeof arg === 'string'; } exports.isString = isString; function isSymbol(arg) { return typeof arg === 'symbol'; } exports.isSymbol = isSymbol; function isUndefined(arg) { return arg === void 0; } exports.isUndefined = isUndefined; function isRegExp(re) { return objectToString(re) === '[object RegExp]'; } exports.isRegExp = isRegExp; function isObject(arg) { return typeof arg === 'object' && arg !== null; } exports.isObject = isObject; function isDate(d) { return objectToString(d) === '[object Date]'; } exports.isDate = isDate; function isError(e) { return (objectToString(e) === '[object Error]' || e instanceof Error); } exports.isError = isError; function isFunction(arg) { return typeof arg === 'function'; } exports.isFunction = isFunction; function isPrimitive(arg) { return arg === null || typeof arg === 'boolean' || typeof arg === 'number' || typeof arg === 'string' || typeof arg === 'symbol' || // ES6 symbol typeof arg === 'undefined'; } exports.isPrimitive = isPrimitive; exports.isBuffer = Buffer.isBuffer; function objectToString(o) { return Object.prototype.toString.call(o); }
romero1989/suge-project
node_modules/laravel-elixir/node_modules/gulp-phpspec/node_modules/gulp-todo/node_modules/gulp-util/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js
JavaScript
apache-2.0
3,021
// Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // NOTE: These type checking functions intentionally don't use `instanceof` // because it is fragile and can be easily faked with `Object.create()`. function isArray(arg) { if (Array.isArray) { return Array.isArray(arg); } return objectToString(arg) === '[object Array]'; } exports.isArray = isArray; function isBoolean(arg) { return typeof arg === 'boolean'; } exports.isBoolean = isBoolean; function isNull(arg) { return arg === null; } exports.isNull = isNull; function isNullOrUndefined(arg) { return arg == null; } exports.isNullOrUndefined = isNullOrUndefined; function isNumber(arg) { return typeof arg === 'number'; } exports.isNumber = isNumber; function isString(arg) { return typeof arg === 'string'; } exports.isString = isString; function isSymbol(arg) { return typeof arg === 'symbol'; } exports.isSymbol = isSymbol; function isUndefined(arg) { return arg === void 0; } exports.isUndefined = isUndefined; function isRegExp(re) { return objectToString(re) === '[object RegExp]'; } exports.isRegExp = isRegExp; function isObject(arg) { return typeof arg === 'object' && arg !== null; } exports.isObject = isObject; function isDate(d) { return objectToString(d) === '[object Date]'; } exports.isDate = isDate; function isError(e) { return (objectToString(e) === '[object Error]' || e instanceof Error); } exports.isError = isError; function isFunction(arg) { return typeof arg === 'function'; } exports.isFunction = isFunction; function isPrimitive(arg) { return arg === null || typeof arg === 'boolean' || typeof arg === 'number' || typeof arg === 'string' || typeof arg === 'symbol' || // ES6 symbol typeof arg === 'undefined'; } exports.isPrimitive = isPrimitive; exports.isBuffer = Buffer.isBuffer; function objectToString(o) { return Object.prototype.toString.call(o); }
Hawkin1991/iblog
node_modules/core-util-is/lib/util.js
JavaScript
apache-2.0
3,021
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.framework.impl; import com.intellij.framework.FrameworkType; import com.intellij.framework.FrameworkTypeEx; import java.util.Comparator; import java.util.HashMap; import java.util.Map; /** * @author nik */ public class FrameworkTypeUtil { public static final Comparator<FrameworkType> FRAMEWORK_TYPE_COMPARATOR = (o1, o2) -> o1.getPresentableName().compareToIgnoreCase(o2.getPresentableName()); public static Map<String, FrameworkType> computeFrameworkTypeByIdMap() { Map<String, FrameworkType> frameworkTypes = new HashMap<>(); for (FrameworkTypeEx type : FrameworkTypeEx.EP_NAME.getExtensions()) { frameworkTypes.put(type.getId(), type); } return frameworkTypes; } }
da1z/intellij-community
java/idea-ui/src/com/intellij/framework/impl/FrameworkTypeUtil.java
Java
apache-2.0
1,329
/* * %CopyrightBegin% * * Copyright Ericsson AB 1998-2016. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * %CopyrightEnd% */ #include "eidef.h" #ifdef __WIN32__ #include <winsock2.h> #include <windows.h> #include <winbase.h> #elif VXWORKS #include <vxWorks.h> #include <ifLib.h> #include <sockLib.h> #include <inetLib.h> #include <unistd.h> #include <sys/types.h> #include <sys/socket.h> #include <netinet/in.h> #else #include <unistd.h> #include <sys/types.h> #include <sys/wait.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #endif #include <stdlib.h> #include <string.h> #include "ei_internal.h" #include "putget.h" #include "ei_epmd.h" #include "ei_portio.h" /* publish our listen port and alive name */ /* return the (useless) creation number */ /* publish our listen port and alive name */ /* return the (useless) creation number */ /* this protocol is a lot more complex than the old one */ static int ei_epmd_r4_publish (int port, const char *alive, unsigned ms) { char buf[EPMDBUF]; char *s = buf; int fd; int elen = 0; int nlen = strlen(alive); int len = elen + nlen + 13; /* hard coded: be careful! */ int n; int res, creation; if (len > sizeof(buf)-2) { erl_errno = ERANGE; return -1; } s = buf; put16be(s,len); put8(s,EI_EPMD_ALIVE2_REQ); put16be(s,port); /* port number */ put8(s,'h'); /* h = r4 hidden node */ put8(s, EI_MYPROTO); /* protocol 0 ?? */ put16be(s,EI_DIST_HIGH); /* highest understood version: 1 = R4 */ put16be(s,EI_DIST_LOW); /* lowest: 0 = R3 */ put16be(s,nlen); /* length of alivename */ strcpy(s, alive); s += nlen; put16be(s,elen); /* length of extra string = 0 */ /* no extra string */ if ((fd = ei_epmd_connect_tmo(NULL,ms)) < 0) return fd; if ((res = ei_write_fill_t(fd, buf, len+2, ms)) != len+2) { closesocket(fd); erl_errno = (res == -2) ? ETIMEDOUT : EIO; return -1; } EI_TRACE_CONN6("ei_epmd_r4_publish", "-> ALIVE2_REQ alive=%s port=%d ntype=%d " "proto=%d dist-high=%d dist-low=%d", alive,port,'H',EI_MYPROTO,EI_DIST_HIGH,EI_DIST_LOW); if ((n = ei_read_fill_t(fd, buf, 4, ms)) != 4) { EI_TRACE_ERR0("ei_epmd_r4_publish","<- CLOSE"); closesocket(fd); erl_errno = (n == -2) ? ETIMEDOUT : EIO; return -2; /* version mismatch */ } /* Don't close fd here! It keeps us registered with epmd */ s = buf; if (((res=get8(s)) != EI_EPMD_ALIVE2_RESP)) { /* response */ EI_TRACE_ERR1("ei_epmd_r4_publish","<- unknown (%d)",res); EI_TRACE_ERR0("ei_epmd_r4_publish","-> CLOSE"); closesocket(fd); erl_errno = EIO; return -1; } EI_TRACE_CONN0("ei_epmd_r4_publish","<- ALIVE2_RESP"); if (((res=get8(s)) != 0)) { /* 0 == success */ EI_TRACE_ERR1("ei_epmd_r4_publish"," result=%d (fail)",res); closesocket(fd); erl_errno = EIO; return -1; } creation = get16be(s); EI_TRACE_CONN2("ei_epmd_r4_publish", " result=%d (ok) creation=%d",res,creation); /* probably should save fd so we can close it later... */ /* epmd_saveconn(OPEN,fd,alive); */ /* return the creation number, for no good reason */ /* return creation;*/ /* no - return the descriptor */ return fd; } int ei_epmd_publish(int port, const char *alive) { return ei_epmd_publish_tmo(port, alive, 0); } int ei_epmd_publish_tmo(int port, const char *alive, unsigned ms) { return ei_epmd_r4_publish(port,alive, ms);; } /* * Publish a name for our C-node. * a file descriptor is returned - close it to unpublish. * */ int ei_publish(ei_cnode* ec, int port) { return ei_epmd_publish(port, ei_thisalivename(ec)); } int ei_publish_tmo(ei_cnode* ec, int port, unsigned ms) { return ei_epmd_publish_tmo(port, ei_thisalivename(ec), ms); }
lemenkov/otp
lib/erl_interface/src/epmd/epmd_publish.c
C
apache-2.0
4,379
package com.dianping.cat.consumer.dependency; import org.junit.Assert; import org.junit.Test; import org.unidal.helper.Files; import com.dianping.cat.consumer.dependency.model.entity.DependencyReport; import com.dianping.cat.consumer.dependency.model.transform.DefaultSaxParser; public class DependencyReportMergerTest { @Test public void testDependencyReportMerge() throws Exception { String oldXml = Files.forIO().readFrom(getClass().getResourceAsStream("dependency_new.xml"), "utf-8"); String newXml = Files.forIO().readFrom(getClass().getResourceAsStream("dependency_new.xml"), "utf-8"); DependencyReport reportOld = DefaultSaxParser.parse(oldXml); DependencyReport reportNew = DefaultSaxParser.parse(newXml); String expected = Files.forIO().readFrom(getClass().getResourceAsStream("dependency_analyzer_merger.xml"), "utf-8"); DependencyReportMerger merger = new DependencyReportMerger(new DependencyReport(reportOld.getDomain())); reportOld.accept(merger); reportNew.accept(merger); Assert.assertEquals("Check the merge result!", expected.replace("\r", ""), merger.getDependencyReport().toString() .replace("\r", "")); Assert.assertEquals("Source report is changed!", newXml.replace("\r", ""), reportNew.toString().replace("\r", "")); } }
chqlb/cat
cat-consumer/src/test/java/com/dianping/cat/consumer/dependency/DependencyReportMergerTest.java
Java
apache-2.0
1,280
// // Copyright (c) Microsoft. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using Microsoft.Azure.Management.Resources; using Microsoft.Azure.Management.Resources.Models; using Microsoft.Azure.Management.Sql; using Microsoft.Azure.Management.Sql.Models; using Microsoft.Azure.Test; using System; using System.Net.Http; namespace Sql2.Tests.ScenarioTests { class Sql2ScenarioHelper { /// <summary> /// The region in which the tests will create their needed resources /// </summary> private static string TestEnvironmentRegion = "Australia East"; /// <summary> /// Generate a SQL Client from the test base to use. /// </summary> /// <returns>SQL Client</returns> public static SqlManagementClient GetSqlClient(DelegatingHandler handler) { return TestBase.GetServiceClient<SqlManagementClient>(new CSMTestEnvironmentFactory()).WithHandler(handler); } /// <summary> /// Generate a Resource Management client from the test base to use for managing resource groups. /// </summary> /// <returns>Resource Management client</returns> public static ResourceManagementClient GetResourceClient(DelegatingHandler handler) { return TestBase.GetServiceClient<ResourceManagementClient>(new CSMTestEnvironmentFactory()).WithHandler(handler); } /// <summary> /// Responsible for creating a resource group, and within it a SQL database server, as well as creating a SqlClient for the given handler. /// Once these are created, this method calls the given test with the created sql client, the names of the resource group and server. /// This method does not removes the created resources !!! it should be run in an undo context that wraps the call to this method. /// </summary> /// <param name="handler">A delegation handler to create a Sql client based on it</param> /// <param name="serverVersion">The version of the server being created</param> /// <param name="test">A function that receives a sql client, names of a created resource group and server</param> public static void RunServerTestInEnvironment(BasicDelegatingHandler handler, string serverVersion, Action<SqlManagementClient, string, Server> test) { RunServerTestInEnvironment(handler, serverVersion, TestEnvironmentRegion, test); } /// <summary> /// Responsible for creating a resource group, and within it a SQL database server, as well as creating a SqlClient for the given handler. /// Once these are created, this method calls the given test with the created sql client, the names of the resource group and server. /// This method does not removes the created resources !!! it should be run in an undo context that wraps the call to this method. /// </summary> /// <param name="handler">A delegation handler to create a Sql client based on it</param> /// <param name="serverVersion">The version of the server being created</param> /// <param name="serverLocation">The location of the server being created</param> /// <param name="test">A function that receives a sql client, names of a created resource group and server</param> public static void RunServerTestInEnvironment(BasicDelegatingHandler handler, string serverVersion, string serverLocation, Action<SqlManagementClient, string, Server> test) { // Management Clients var sqlClient = Sql2ScenarioHelper.GetSqlClient(handler); var resClient = Sql2ScenarioHelper.GetResourceClient(handler); // Variables for server create string serverName = TestUtilities.GenerateName("csm-sql-server-"); string resGroupName = TestUtilities.GenerateName("csm-sql-rg-"); string adminLogin = "testlogin"; string adminPass = "testp@ssMakingIt1007Longer"; string version = serverVersion; // Create the resource group. resClient.ResourceGroups.CreateOrUpdate(resGroupName, new ResourceGroup() { Location = serverLocation, }); try { ////////////////////////////////////////////////////////////////////// // Create server for test. var server = sqlClient.Servers.CreateOrUpdate(resGroupName, serverName, new ServerCreateOrUpdateParameters() { Location = serverLocation, Properties = new ServerCreateOrUpdateProperties() { AdministratorLogin = adminLogin, AdministratorLoginPassword = adminPass, Version = version, } }).Server; test(sqlClient, resGroupName, server); } finally { // Clean up the resource group. resClient.ResourceGroups.Delete(resGroupName); } } /// <summary> /// Responsible for creating a resource group, within it a SQL database server and a database, as well as creating a SqlClient for /// the given handler. /// Once these are created, this method calls the given test with the created sql client, the names of the resource group, server and /// database. /// This method does not removes the created resources !!! it should be run in an undo context that wraps the call to this method. /// </summary> /// <param name="handler">A delegation handler to create a Sql client based on it</param> /// <param name="serverVersion">The version of the server being created</param> /// <param name="test">A function that receives a sql client, names of a created resource group, server and database</param> public static void RunDatabaseTestInEnvironment(BasicDelegatingHandler handler, string serverVersion, Action<SqlManagementClient, string, Server, Database> test) { var testAdapter = new Action<SqlManagementClient, string, Server>((sqlClient, rgName, server) => RunDbTest(sqlClient, rgName, server, test)); RunServerTestInEnvironment(handler, serverVersion, testAdapter); } /// <summary> /// Responsible for creating a resource group, within it a SQL database server and a database, as well as creating a SqlClient for /// the given handler. /// Once these are created, this method calls the given test with the created sql client, the names of the resource group, server and /// database. /// This method does not removes the created resources !!! it should be run in an undo context that wraps the call to this method. /// </summary> /// <param name="handler">A delegation handler to create a Sql client based on it</param> /// <param name="serverVersion">The version of the server being created</param> /// <param name="serverLocation">The location of the server being created</param> /// <param name="test">A function that receives a sql client, names of a created resource group, server and database</param> public static void RunDatabaseTestInEnvironment(BasicDelegatingHandler handler, string serverVersion, string serverLocation, Action<SqlManagementClient, string, Server, Database> test) { var testAdapter = new Action<SqlManagementClient, string, Server>((sqlClient, rgName, server) => RunDbTest(sqlClient, rgName, server, test)); RunServerTestInEnvironment(handler, serverVersion, serverLocation, testAdapter); } /// <summary> /// A helper method that creates only a database within the given resource group and server. Once it is created this method calls the /// given test with the sql client and the names of the resource group, server and database. /// </summary> private static void RunDbTest(SqlManagementClient sqlClient, string resGroupName, Server server, Action<SqlManagementClient, string, Server, Database> test) { // Variables for database create string databaseName = TestUtilities.GenerateName("csm-auditing-db"); string databaseCollation = "Japanese_Bushu_Kakusu_100_CS_AS_KS_WS"; string databaseEdition = "Basic"; long databaseMaxSize = 1L * 1024L * 1024L * 1024L; // 1 GB Guid dbSloBasic = new Guid("dd6d99bb-f193-4ec1-86f2-43d3bccbc49c"); // Basic ////////////////////////////////////////////////////////////////////// // Create database for test. var database = sqlClient.Databases.CreateOrUpdate(resGroupName, server.Name, databaseName, new DatabaseCreateOrUpdateParameters() { Location = server.Location, Properties = new DatabaseCreateOrUpdateProperties() { Collation = databaseCollation, Edition = databaseEdition, MaxSizeBytes = databaseMaxSize, RequestedServiceObjectiveId = dbSloBasic, }, }).Database; test(sqlClient, resGroupName, server, database); } } }
cwickham3/azure-sdk-for-net
src/ResourceManagement/Sql/Sql.Tests/ScenarioTests/Sql2ScenarioHelper.cs
C#
apache-2.0
9,979
<html><head><title>Nest - Aggregations</title><meta http-equiv="cache-control" content="no-cache"/><meta http-equiv="pragma" content="no-cache"/><meta http-equiv="content-type" content="text/html;charset=utf-8"/><meta http-equiv="expires" content="0"/><meta name="description" content="elasticsearch"/><meta name="keywords" content="nest, elasticsearch, .net, client"/><meta name="author" content="martijn laarman"/><meta name="viewport" content="width=device-width, initial-scale=1"/> <script src="/scripts/html5shiv.js"></script><link rel="stylesheet" type="text/css" href="/styles/normalize.css"/><link rel="stylesheet" type="text/css" href="/styles/layout.css"/><link rel="stylesheet" type="text/css" href="/styles/pygments.css"/><link rel="stylesheet" type="text/css" href="/styles/pygments.css"/><link rel="stylesheet" type="text/css" href="//netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css"/><link href="//fonts.googleapis.com/css?family=Ubuntu+Mono|Open+Sans" rel="stylesheet" type="text/css"/><link href="/prettify/prettify.css" type="text/css" rel="stylesheet"/><link href="/prettify/sunburst.css" type="text/css" rel="stylesheet"/><script src="//code.jquery.com/jquery.min.js" type="text/javascript"></script><script type="text/javascript" src="/prettify/prettify.js"></script><script type="text/javascript" src="/prettify/fix_code_tags.js"></script></head><body><div class="wrapper"><header class="header"><div class="actions"><iframe src="//ghbtns.com/github-btn.html?user=elasticsearch&amp;repo=elasticsearch-net&amp;type=fork&amp;count=true" allowtransparency="true" frameborder="0" scrolling="0" width="95" height="20"></iframe><iframe src="//ghbtns.com/github-btn.html?user=elasticsearch&amp;repo=elasticsearch-net&amp;type=watch&amp;count=true" allowtransparency="true" frameborder="0" scrolling="0" width="110" height="20"></iframe></div><img src="/images/nest-nuget-icon.png" width="48" height="48"/><h1 class="nest">NEST</h1><p>Documentation</p></header><div class="divide"></div><div class="middle"><div class="container"><main class="content"><h1 id="histogram-aggregation">Histogram aggregation</h1> <p>A multi-bucket values source based aggregation that can be applied on numeric values extracted from the documents.</p> <h2 id="usage">Usage</h2> <h3 id="fluent-syntax">Fluent Syntax</h3> <pre><code>var result = client.Search&lt;ElasticsearchProject&gt;(s =&gt; s .Aggregations(a =&gt; a .Histogram(&quot;my_histogram_agg&quot;, h =&gt; h .Field(p =&gt; p.LOC) .Interval(100) ) ) ); var agg = result.Aggs.Histogram(&quot;my_histogram_agg&quot;); </code></pre><h3 id="object-initializer-syntax">Object Initializer Syntax</h3> <pre><code>var request = new SearchRequest { Aggregations = new Dictionary&lt;string, IAggregationContainer&gt; { { &quot;my_histogram_agg&quot;, new AggregationContainer { Histogram = new HistogramAggregator { Field = &quot;loc&quot;, Interval = 100 } } } } }; var result = client.Search&lt;ElasticsearchProject&gt;(request); var agg = result.Aggs.Histogram(&quot;my_histogram_agg&quot;); </code></pre><p>Refer to the <a href="http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-histogram-aggregation.html">original docs</a> for more information.</p> </main></div><aside class="left-sidebar"><aside id="menu"><ul><li><h4><a href="/">Home</a><a href="/contributing.html">Contributing</a><a href="/building.html">Building</a><a href="/breaking-changes.html">1.0 Breaking Changes</a><a href="https://github.com/elasticsearch/elasticsearch-net/releases">Release Notes</a></h4></li></ul><ul id="elasticsearch-net"><h4 class="title">Elasticsearch.Net</h4><ul><li><a href="/elasticsearch-net/quick-start.html">Quick Start</a></li><li><a href="/elasticsearch-net/connecting.html">Connecting</a></li><li><a href="/elasticsearch-net/security.html">Security</a></li><li><a href="/elasticsearch-net/cluster-failover.html">Cluster failover</a></li><li><a href="/elasticsearch-net/building-requests.html">Building requests</a></li><li><a href="/elasticsearch-net/handling-responses.html">Handling responses</a></li><li><a href="/elasticsearch-net/errors.html">Errors</a></li></ul></ul><ul id="nest"><h4 class="title">NEST</h4><ul><li><a href="/nest/quick-start.html">Quick Start</a></li><li><a href="/nest/connecting.html">Connecting</a></li><li><a href="/nest/index-type-inference.html">Type/Index Inference</a></li><li><a href="/nest/handling-responses.html">Handling responses</a></li><li><a href="/nest/writing-queries.html">Writing queries</a></li><li><a href="/nest/tips-tricks.html">Tips & Tricks</a></li></ul><li><h4><a href="/nest/core/"><i class="fa fa-chevron-right"></i>Core</a></h4></li><li><h4><a href="/nest/indices/aliases.html"><i class="fa fa-chevron-right"></i>Indices</a></h4></li><li><h4><a href="/nest/cluster/health.html"><i class="fa fa-chevron-right"></i>Cluster</a></h4></li><li><h4><a href="/nest/search/basics.html"><i class="fa fa-chevron-right"></i>Search</a></h4></li><h4><a href="/nest/aggregations/handling.html"><i class="fa fa-chevron-down"></i>Aggregations</a></h4><ul><li class="sub"><a href="/nest/aggregations/avg.html">Avg</a></li><li class="sub"><a href="/nest/aggregations/cardinality.html">Cardinality</a></li><li class="sub"><a href="/nest/aggregations/date-histogram.html">Date Histogram</a></li><li class="sub"><a href="/nest/aggregations/date-range.html">Date Range</a></li><li class="sub"><a href="/nest/aggregations/extended-stats.html">Extended Stats</a></li><li class="sub"><a href="/nest/aggregations/filter.html">Filter</a></li><li class="sub"><a href="/nest/aggregations/geo-bounds.html">Geo Bounds</a></li><li class="sub"><a href="/nest/aggregations/geo-distance.html">Geo Distance</a></li><li class="sub"><a href="/nest/aggregations/geohash-grid.html">Geohash Grid</a></li><li class="sub"><a href="/nest/aggregations/global.html">Global</a></li><li class="sub"><a href="/nest/aggregations/histogram.html" class="selected">Histogram</a></li><li class="sub"><a href="/nest/aggregations/ipv4.html">IPv4 Range</a></li><li class="sub"><a href="/nest/aggregations/max.html">Max</a></li><li class="sub"><a href="/nest/aggregations/min.html">Min</a></li><li class="sub"><a href="/nest/aggregations/missing.html">Missing</a></li><li class="sub"><a href="/nest/aggregations/percentiles.html">Percentiles</a></li><li class="sub"><a href="/nest/aggregations/percentile-ranks.html">Percentiles Ranks</a></li><li class="sub"><a href="/nest/aggregations/range.html">Range</a></li><li class="sub"><a href="/nest/aggregations/nested.html">Nested</a></li><li class="sub"><a href="/nest/aggregations/reverse-nested.html">Reverse Nested</a></li><li class="sub"><a href="/nest/aggregations/significant-terms.html">Significant Terms</a></li><li class="sub"><a href="/nest/aggregations/stats.html">Stats</a></li><li class="sub"><a href="/nest/aggregations/sum.html">Sum</a></li><li class="sub"><a href="/nest/aggregations/terms.html">Terms</a></li><li class="sub"><a href="/nest/aggregations/top-hits.html">Top Hits</a></li><li class="sub"><a href="/nest/aggregations/value-count.html">Value Count</a></li></ul><li><h4><a href="/nest/facets/handling.html"><i class="fa fa-chevron-right"></i>Facets</a></h4></li></ul></aside></aside></div><footer class="footer"></footer></div></body></html>
starckgates/elasticsearch-net
docs/build/nest/aggregations/histogram.html
HTML
apache-2.0
7,513
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.api.java; import org.apache.flink.api.common.Plan; import org.apache.flink.api.common.operators.GenericDataSinkBase; import org.apache.flink.api.java.io.DiscardingOutputFormat; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests for multiple invocations of a plan. */ public class MultipleInvokationsTest { @Test public void testMultipleInvocationsGetPlan() { try { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); // ----------- Execution 1 --------------- DataSet<String> data = env.fromElements("Some", "test", "data").name("source1"); //data.print(); data.output(new DiscardingOutputFormat<String>()).name("print1"); data.output(new DiscardingOutputFormat<String>()).name("output1"); { Plan p = env.createProgramPlan(); assertEquals(2, p.getDataSinks().size()); for (GenericDataSinkBase<?> sink : p.getDataSinks()) { assertTrue(sink.getName().equals("print1") || sink.getName().equals("output1")); assertEquals("source1", sink.getInput().getName()); } } // ----------- Execution 2 --------------- data.writeAsText("/some/file/path").name("textsink"); { Plan p = env.createProgramPlan(); assertEquals(1, p.getDataSinks().size()); GenericDataSinkBase<?> sink = p.getDataSinks().iterator().next(); assertEquals("textsink", sink.getName()); assertEquals("source1", sink.getInput().getName()); } } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail(e.getMessage()); } } }
ueshin/apache-flink
flink-java/src/test/java/org/apache/flink/api/java/MultipleInvokationsTest.java
Java
apache-2.0
2,487
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2013 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.session; import javax.swing.JPanel; import org.zaproxy.zap.session.SessionManagementMethodType.UnsupportedSessionManagementMethodException; /** * An Options Panel that is used to configure all the settings corresponding to an * {@link SessionManagementMethod}.<br/> * <br/> * This panel will be displayed to users in a separate dialog. * * @param <T> the session management method type */ public abstract class AbstractSessionManagementMethodOptionsPanel extends JPanel { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 9003182467823059637L; public AbstractSessionManagementMethodOptionsPanel() { super(); } /** * Binds (loads) data from an existing Session Management method in the panel. After this * method, the {@link #getMethod()} should return the same object, eventually with some changes * (if {@link #saveMethod()} was called). * * @param method the method to be loaded/shown in the panel. */ public abstract void bindMethod(SessionManagementMethod method) throws UnsupportedSessionManagementMethodException; /** * Validate the fields of the configuration panel. If any of the fields are not in the proper * state, an {@link IllegalStateException} is thrown, containing a message describing the * problem. * * @throws IllegalStateException if any of the fields are not in the valid state */ public abstract void validateFields() throws IllegalStateException; /** * Save the changes from the panel in the session management method. */ public abstract void saveMethod(); /** * Gets the session management method configured by this panel. * * @return the method */ public abstract SessionManagementMethod getMethod(); }
cassiodeveloper/zaproxy
src/org/zaproxy/zap/session/AbstractSessionManagementMethodOptionsPanel.java
Java
apache-2.0
2,512
package com.marshalchen.common.demoofui; import android.content.Context; import android.content.Intent; import android.content.res.Configuration; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.TaskStackBuilder; import android.support.v4.view.MenuItemCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBar; import android.support.v7.app.ActionBarActivity; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.widget.SearchView; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.animation.AnimationUtils; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.Toast; import butterknife.ButterKnife; import butterknife.InjectView; import com.marshalchen.common.commonUtils.logUtils.Logs; import com.marshalchen.common.commonUtils.urlUtils.HttpsUtils; import com.marshalchen.common.uimodule.customFonts.CalligraphyContextWrapper; import com.marshalchen.common.uimodule.nineoldandroids.animation.Animator; import com.marshalchen.common.ui.ToastUtil; import com.marshalchen.common.uimodule.shimmer.Shimmer; import com.marshalchen.common.uimodule.shimmer.ShimmerTextView; public class DemoOfUiActivity extends ActionBarActivity { private CharSequence mDrawerTitle; private CharSequence mTitle; private ActionBarDrawerToggle mDrawerToggle; private String[] mPlanetTitles; @InjectView(R.id.drawer_layout) DrawerLayout mDrawerLayout; @InjectView(R.id.left_drawer_listview) ListView mDrawerList; private Fragment mContent; @InjectView(R.id.favShimmerTextView) ShimmerTextView favShimmerTextView; Shimmer shimmer; @InjectView(R.id.main_content_frame) View main_content_frame; @InjectView(R.id.favShimmerReaLayout) View favShimmerReaLayout; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main_landing_activity); ButterKnife.inject(this); mContent = new LandingFragment(); getSupportFragmentManager().beginTransaction() .add(R.id.main_content_frame, mContent).commit(); initViews(); initShimmerTextView(); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); // Sync the toggle state after onRestoreInstanceState has occurred. mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } @Override protected void attachBaseContext(Context newBase) { super.attachBaseContext(new CalligraphyContextWrapper(newBase)); } @Override public Intent getSupportParentActivityIntent() { Logs.d("upTask"); // finish(); return super.getSupportParentActivityIntent(); } @Override public void onCreateSupportNavigateUpTaskStack(TaskStackBuilder builder) { super.onCreateSupportNavigateUpTaskStack(builder); } // @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu items for use in the action bar MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.main_menu_layout, menu); MenuItem searchItem = menu.findItem(R.id.action_search); SearchView searchView = (SearchView) MenuItemCompat.getActionView(searchItem); searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String s) { ToastUtil.show(DemoOfUiActivity.this, "test~", Toast.LENGTH_SHORT); return false; } @Override public boolean onQueryTextChange(String s) { return false; } }); searchView.setOnSearchClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // ToastUtil.show(MainActivity.this,"show~", Toast.LENGTH_SHORT); } }); // menu.findItem(R.id.action_search).getActionView() > API 11 // if (searchItem != null) { // searchItem.collapseActionView(); // } return super.onCreateOptionsMenu(menu); } /* Called whenever we call invalidateOptionsMenu() */ @Override public boolean onPrepareOptionsMenu(Menu menu) { // boolean drawerOpen = mDrawerLayout.isDrawerOpen(mDrawerList); // menu.findItem(R.id.action_websearch).setVisible(!drawerOpen); return super.onPrepareOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } switch (item.getItemId()) { default: return super.onOptionsItemSelected(item); } } private <T> void selectItems(T T) { // Fragment fragment = new PrologueFragment(); // Bundle args = new Bundle(); // // args.putInt(PlanetFragment.ARG_PLANET_NUMBER, position); // fragment.setArguments(args); // // Insert the fragment by replacing any existing fragment // FragmentManager fragmentManager = getSupportFragmentManager(); // fragmentManager.beginTransaction() // .replace(R.id.main_content_frame, fragment) // .commit(); // mDrawerLayout.closeDrawer(mDrawerList); } private void selectItem(int position) { setTitle(mPlanetTitles[position]); mDrawerLayout.closeDrawer(mDrawerList); } private class DrawerItemClickListener implements ListView.OnItemClickListener { @Override public void onItemClick(AdapterView parent, View view, int position, long id) { selectItem(position); } } @Override public void setTitle(CharSequence title) { mTitle = title; getSupportActionBar().setTitle(title); } private void initViews() { ActionBar actionBar = getSupportActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); // actionBar.setHomeAsUpIndicator(R.drawable.ic_drawer); actionBar.setHomeButtonEnabled(true); // actionBar.setDisplayShowHomeEnabled(false); mPlanetTitles = getResources().getStringArray(R.array.items_name); mTitle = mDrawerTitle = getTitle(); mDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout, R.string.drawer_open, R.string.drawer_close) { /** Called when a drawer has settled in a completely closed state. */ public void onDrawerClosed(View view) { super.onDrawerClosed(view); getSupportActionBar().setTitle(mTitle); invalidateOptionsMenu(); // creates call to onPrepareOptionsMenu() } /** Called when a drawer has settled in a completely open state. */ public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); getSupportActionBar().setTitle(mDrawerTitle); invalidateOptionsMenu(); // creates call to onPrepareOptionsMenu() } }; // Set the drawer toggle as the DrawerListener Logs.d("mDrawerLayout " + (mDrawerLayout != null) + " " + "mDrawerToggle " + (mDrawerToggle != null)); mDrawerLayout.setDrawerListener(mDrawerToggle); //mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, Gravity.CLIP_VERTICAL); //mDrawerLayout.setScrimColor(getResources().getColor(R.color.babyBlueColor)); // mDrawerList.setAdapter(new ArrayAdapter<String>(this, // R.layout.left_menu, mPlanetTitles)); //mDrawerList.setAdapter(new SimpleAdapter(this,null,R.layout.left_menu_layout,null,null)); mDrawerList.setAdapter(new ArrayAdapter<String>(this, R.layout.left_menu_layout, mPlanetTitles)); // Set the list's click listener mDrawerList.setOnItemClickListener(new DrawerItemClickListener()); // getSupportParentActivityIntent(); } private void testHttps() { new Thread() { @Override public void run() { super.run(); HttpsUtils.sendWithSSlSocketWithCrt(DemoOfUiActivity.this, "ca.crt", "https://xxx.xxx"); } }.start(); } private void initShimmerTextView() { main_content_frame.setVisibility(View.INVISIBLE); shimmer = new Shimmer(); shimmer.setRepeatCount(0) .setDuration(800) .setStartDelay(300) .setDirection(Shimmer.ANIMATION_DIRECTION_LTR) .setAnimatorListener(new Animator.AnimatorListener() { @Override public void onAnimationStart(Animator animation) { } @Override public void onAnimationEnd(Animator animation) { main_content_frame.setVisibility(View.VISIBLE); main_content_frame.startAnimation(AnimationUtils.loadAnimation(DemoOfUiActivity.this, R.anim.fade_ins)); favShimmerReaLayout.setVisibility(View.GONE); } @Override public void onAnimationCancel(Animator animation) { } @Override public void onAnimationRepeat(Animator animation) { } }); shimmer.start(favShimmerTextView); } }
liwangdong/UltimateAndroid
UltimateAndroidNormal/DemoOfUI/src/com/marshalchen/common/demoofui/DemoOfUiActivity.java
Java
apache-2.0
10,046
/** * Production environment settings * * This file can include shared settings for a production environment, * such as API keys or remote database passwords. If you're using * a version control solution for your Sails app, this file will * be committed to your repository unless you add it to your .gitignore * file. If your repository will be publicly viewable, don't add * any private information to this file! * */ module.exports = { /*************************************************************************** * Set the default database connection for models in the production * * environment (see config/connections.js and config/models.js ) * ***************************************************************************/ // models: { // connection: 'someMysqlServer' // }, /*************************************************************************** * Set the port in the production environment to 80 * ***************************************************************************/ // port: 80, /*************************************************************************** * Set the log level in production environment to "silent" * ***************************************************************************/ // log: { // level: "silent" // } };
lauravsilva/Facilities344
config/env/production.js
JavaScript
apache-2.0
1,371
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.bpmn.sequenceflow; import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase; import org.camunda.bpm.engine.impl.util.CollectionUtil; import org.camunda.bpm.engine.test.Deployment; import org.camunda.bpm.engine.test.bpmn.gateway.ExclusiveGatewayTest; /** * See {@link ExclusiveGatewayTest} for a default sequence flow test on an exclusive gateway. * * @author Joram Barrez */ public class DefaultSequenceFlowTest extends PluggableProcessEngineTestCase { @Deployment public void testDefaultSequenceFlowOnTask() { String procId = runtimeService.startProcessInstanceByKey("defaultSeqFlow", CollectionUtil.singletonMap("input", 2)).getId(); assertNotNull(runtimeService.createExecutionQuery().processInstanceId(procId).activityId("task2").singleResult()); procId = runtimeService.startProcessInstanceByKey("defaultSeqFlow", CollectionUtil.singletonMap("input", 3)).getId(); assertNotNull(runtimeService.createExecutionQuery().processInstanceId(procId).activityId("task3").singleResult()); procId = runtimeService.startProcessInstanceByKey("defaultSeqFlow", CollectionUtil.singletonMap("input", 123)).getId(); assertNotNull(runtimeService.createExecutionQuery().processInstanceId(procId).activityId("task1").singleResult()); } }
nagyistoce/camunda-bpm-platform
engine/src/test/java/org/camunda/bpm/engine/test/bpmn/sequenceflow/DefaultSequenceFlowTest.java
Java
apache-2.0
1,924
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.Session; import com.facebook.presto.client.ClientSession; import com.facebook.presto.client.Column; import com.facebook.presto.client.QueryResults; import com.facebook.presto.client.StatementClient; import com.facebook.presto.execution.QueryIdGenerator; import com.facebook.presto.metadata.SessionPropertyManager; import com.facebook.presto.security.AccessControl; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.AbstractIterator; import io.airlift.http.client.HttpClient; import io.airlift.http.server.HttpServerInfo; import io.airlift.json.JsonCodec; import io.airlift.units.Duration; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.net.URI; import java.util.Iterator; import java.util.List; import static com.facebook.presto.server.ResourceUtil.assertRequest; import static com.facebook.presto.server.ResourceUtil.createSessionForRequest; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Strings.isNullOrEmpty; import static com.google.common.collect.Iterators.concat; import static com.google.common.collect.Iterators.transform; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MINUTES; import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import static javax.ws.rs.core.Response.status; @Path("/v1/execute") public class ExecuteResource { private final HttpServerInfo serverInfo; private final AccessControl accessControl; private final SessionPropertyManager sessionPropertyManager; private final HttpClient httpClient; private final QueryIdGenerator queryIdGenerator; private final JsonCodec<QueryResults> queryResultsCodec; @Inject public ExecuteResource( HttpServerInfo serverInfo, AccessControl accessControl, SessionPropertyManager sessionPropertyManager, @ForExecute HttpClient httpClient, QueryIdGenerator queryIdGenerator, JsonCodec<QueryResults> queryResultsCodec) { this.serverInfo = requireNonNull(serverInfo, "serverInfo is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.sessionPropertyManager = requireNonNull(sessionPropertyManager, "sessionPropertyManager is null"); this.httpClient = requireNonNull(httpClient, "httpClient is null"); this.queryIdGenerator = requireNonNull(queryIdGenerator, "queryIdGenerator is null"); this.queryResultsCodec = requireNonNull(queryResultsCodec, "queryResultsCodec is null"); } @POST @Produces(MediaType.APPLICATION_JSON) public Response createQuery(String query, @Context HttpServletRequest servletRequest) { assertRequest(!isNullOrEmpty(query), "SQL query is empty"); Session session = createSessionForRequest(servletRequest, accessControl, sessionPropertyManager, queryIdGenerator.createNextQueryId()); ClientSession clientSession = session.toClientSession(serverUri(), false, new Duration(2, MINUTES)); StatementClient client = new StatementClient(httpClient, queryResultsCodec, clientSession, query); List<Column> columns = getColumns(client); Iterator<List<Object>> iterator = flatten(new ResultsPageIterator(client)); SimpleQueryResults results = new SimpleQueryResults(columns, iterator); return Response.ok(results, MediaType.APPLICATION_JSON_TYPE).build(); } private URI serverUri() { checkState(serverInfo.getHttpUri() != null, "No HTTP URI for this server (HTTP disabled?)"); return serverInfo.getHttpUri(); } private static List<Column> getColumns(StatementClient client) { while (client.isValid()) { List<Column> columns = client.current().getColumns(); if (columns != null) { return columns; } client.advance(); } if (!client.isFailed()) { throw internalServerError("No columns"); } throw internalServerError(failureMessage(client.finalResults())); } @SuppressWarnings("RedundantTypeArguments") private static <T> Iterator<T> flatten(Iterator<Iterable<T>> iterator) { // the explicit type argument is required by the Eclipse compiler return concat(transform(iterator, Iterable<T>::iterator)); } private static class ResultsPageIterator extends AbstractIterator<Iterable<List<Object>>> { private final StatementClient client; private ResultsPageIterator(StatementClient client) { this.client = requireNonNull(client, "client is null"); } @Override protected Iterable<List<Object>> computeNext() { while (client.isValid()) { Iterable<List<Object>> data = client.current().getData(); client.advance(); if (data != null) { return data; } } if (client.isFailed()) { throw internalServerError(failureMessage(client.finalResults())); } return endOfData(); } } private static WebApplicationException internalServerError(String message) { return new WebApplicationException(status(INTERNAL_SERVER_ERROR).entity(message).build()); } private static String failureMessage(QueryResults results) { return format("Query failed (#%s): %s", results.getId(), results.getError().getMessage()); } public static class SimpleQueryResults { private final List<Column> columns; private final Iterator<List<Object>> data; public SimpleQueryResults(List<Column> columns, Iterator<List<Object>> data) { this.columns = requireNonNull(columns, "columns is null"); this.data = requireNonNull(data, "data is null"); } @JsonProperty public List<Column> getColumns() { return columns; } @JsonProperty public Iterator<List<Object>> getData() { return data; } } }
ipros-team/presto
presto-main/src/main/java/com/facebook/presto/server/ExecuteResource.java
Java
apache-2.0
7,141
-- database: presto_tpcds; groups: tpcds; requires: com.teradata.tempto.fulfillment.table.hive.tpcds.ImmutableTpcdsTablesRequirements WITH customer_total_return AS ( SELECT "cr_returning_customer_sk" "ctr_customer_sk" , "ca_state" "ctr_state" , "sum"("cr_return_amt_inc_tax") "ctr_total_return" FROM catalog_returns , date_dim , customer_address WHERE ("cr_returned_date_sk" = "d_date_sk") AND ("d_year" = 2000) AND ("cr_returning_addr_sk" = "ca_address_sk") GROUP BY "cr_returning_customer_sk", "ca_state" ) SELECT "c_customer_id" , "c_salutation" , "c_first_name" , "c_last_name" , "ca_street_number" , "ca_street_name" , "ca_street_type" , "ca_suite_number" , "ca_city" , "ca_county" , "ca_state" , "ca_zip" , "ca_country" , "ca_gmt_offset" , "ca_location_type" , "ctr_total_return" FROM customer_total_return ctr1 , customer_address , customer WHERE ("ctr1"."ctr_total_return" > ( SELECT ("avg"("ctr_total_return") * DECIMAL '1.2') FROM customer_total_return ctr2 WHERE ("ctr1"."ctr_state" = "ctr2"."ctr_state") )) AND ("ca_address_sk" = "c_current_addr_sk") AND ("ca_state" = 'GA') AND ("ctr1"."ctr_customer_sk" = "c_customer_sk") ORDER BY "c_customer_id" ASC, "c_salutation" ASC, "c_first_name" ASC, "c_last_name" ASC, "ca_street_number" ASC, "ca_street_name" ASC, "ca_street_type" ASC, "ca_suite_number" ASC, "ca_city" ASC, "ca_county" ASC, "ca_state" ASC, "ca_zip" ASC, "ca_country" ASC, "ca_gmt_offset" ASC, "ca_location_type" ASC, "ctr_total_return" ASC LIMIT 100
jxiang/presto
presto-product-tests/src/main/resources/sql-tests/testcases/tpcds/q81.sql
SQL
apache-2.0
1,565
package org.askerov.dynamicgrid; import android.widget.BaseAdapter; import java.util.HashMap; import java.util.List; /** * Author: alex askerov * Date: 9/6/13 * Time: 7:43 PM */ /** * Abstract adapter for {@link org.askerov.dynamicgrid.DynamicGridView} with sable items id; */ public abstract class AbstractDynamicGridAdapter extends BaseAdapter implements DynamicGridAdapterInterface { public static final int INVALID_ID = -1; private int nextStableId = 0; private HashMap<Object, Integer> mIdMap = new HashMap<Object, Integer>(); /** * Adapter must have stable id * * @return */ @Override public final boolean hasStableIds() { return true; } /** * creates stable id for object * * @param item */ protected void addStableId(Object item) { mIdMap.put(item, nextStableId++); } /** * create stable ids for list * * @param items */ protected void addAllStableId(List<?> items) { for (Object item : items) { addStableId(item); } } /** * get id for position * * @param position * @return */ @Override public final long getItemId(int position) { if (position < 0 || position >= mIdMap.size()) { return INVALID_ID; } Object item = getItem(position); return mIdMap.get(item); } /** * clear stable id map * should called when clear adapter data; */ protected void clearStableIdMap() { mIdMap.clear(); } /** * remove stable id for <code>item</code>. Should called on remove data item from adapter * * @param item */ protected void removeStableID(Object item) { mIdMap.remove(item); } }
nKey/DynamicGrid
dynamicgrid/src/org/askerov/dynamicgrid/AbstractDynamicGridAdapter.java
Java
apache-2.0
1,813
/* Copyright 2012 Selenium committers Copyright 2012 Software Freedom Conservancy Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.server; import static org.junit.Assert.assertTrue; import static org.openqa.selenium.net.PortProber.pollPort; import com.google.common.base.Throwables; import com.google.common.io.ByteStreams; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.openqa.selenium.Build; import org.openqa.selenium.net.PortProber; import org.openqa.selenium.os.CommandLine; import org.openqa.selenium.testing.InProject; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Proxy; import java.net.URL; import java.net.URLConnection; import javax.net.ssl.HttpsURLConnection; public class SeleniumServerProxyTest { private static CommandLine command; private static int port; @BeforeClass public static void startServer() { new Build().of("selenium-server-standalone").go(); File serverJar = InProject.locate( "build/java/server/src/org/openqa/grid/selenium/selenium-standalone.jar"); port = PortProber.findFreePort(); command = new CommandLine("java", "-jar", serverJar.getAbsolutePath(), "-port", "" + port); command.executeAsync(); pollPort(port); } @AfterClass public static void killServer() { command.destroy(); } @Test @Ignore public void testProxiesSeleniumStaticResourcesWithUpstreamProxy() throws Exception { URL target = new URL("http://www.google.com/selenium-server/core/Blank.html"); URLConnection client = target.openConnection(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("127.0.0.1", port))); assertTrue(getResponseAsString(client).contains("<body>")); target = new URL("https://www.google.com/selenium-server/core/Blank.html"); client = target.openConnection(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("127.0.0.1", port))); TrustEverythingSSLTrustManager.trustAllSSLCertificates((HttpsURLConnection) client); assertTrue(getResponseAsString(client).contains("<body>")); } private String getResponseAsString(URLConnection client) { try { byte[] bytes = ByteStreams.toByteArray(client.getInputStream()); return new String(bytes); } catch (IOException e) { throw Throwables.propagate(e); } } }
freynaud/selenium
java/server/test/org/openqa/selenium/server/SeleniumServerProxyTest.java
Java
apache-2.0
2,914
/* Copyright 2011 Selenium committers Copyright 2011 Software Freedom Conservancy Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.grid.e2e.utils; import static java.util.concurrent.TimeUnit.SECONDS; import com.google.common.base.Function; import org.openqa.grid.internal.Registry; import org.openqa.selenium.support.ui.FluentWait; import org.openqa.selenium.support.ui.Wait; public class RegistryTestHelper { private RegistryTestHelper() { // Utility class } /** * Wait for the registry to have exactly nodeNumber nodes registered. */ public static void waitForNode(final Registry r, final int nodeNumber) { newWait().until(new Function<Object, Integer>() { @Override public Integer apply(Object input) { Integer i = r.getAllProxies().size(); if (i != nodeNumber) { return null; } else { return i; } } }); } public static void waitForActiveTestSessionCount(final Registry r, final int activeTestSesssions) { newWait().until(new Function<Object, Integer>() { @Override public Integer apply(Object input) { Integer i = r.getActiveSessions().size(); if (i != activeTestSesssions) { return null; } else { return i; } } }); } public static void waitForNewSessionRequestCount(final Registry r, final int newSessionRequestCount) { newWait().until(new Function<Object, Integer>() { @Override public Integer apply(Object input) { Integer i = r.getNewSessionRequestCount(); if (i != newSessionRequestCount) { return null; } else { return i; } } }); } private static Wait<Object> newWait() { return new FluentWait<Object>("").withTimeout(30, SECONDS); } }
xlzdew/seleniumpr
java/server/test/org/openqa/grid/e2e/utils/RegistryTestHelper.java
Java
apache-2.0
2,326
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for metric_ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import math import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin from tensorflow.contrib import metrics as metrics_lib from tensorflow.contrib.metrics.python.ops import metric_ops from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes as dtypes_lib from tensorflow.python.framework import errors_impl from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor from tensorflow.python.ops import array_ops from tensorflow.python.ops import data_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test NAN = float('nan') metrics = metrics_lib def _enqueue_vector(sess, queue, values, shape=None): if not shape: shape = (1, len(values)) dtype = queue.dtypes[0] sess.run( queue.enqueue(constant_op.constant( values, dtype=dtype, shape=shape))) def _binary_2d_label_to_sparse_value(labels): """Convert dense 2D binary indicator tensor to sparse tensor. Only 1 values in `labels` are included in result. Args: labels: Dense 2D binary indicator tensor. Returns: `SparseTensorValue` whose values are indices along the last dimension of `labels`. """ indices = [] values = [] batch = 0 for row in labels: label = 0 xi = 0 for x in row: if x == 1: indices.append([batch, xi]) values.append(label) xi += 1 else: assert x == 0 label += 1 batch += 1 shape = [len(labels), len(labels[0])] return sparse_tensor.SparseTensorValue( np.array(indices, np.int64), np.array(values, np.int64), np.array(shape, np.int64)) def _binary_2d_label_to_sparse(labels): """Convert dense 2D binary indicator tensor to sparse tensor. Only 1 values in `labels` are included in result. Args: labels: Dense 2D binary indicator tensor. Returns: `SparseTensor` whose values are indices along the last dimension of `labels`. """ return sparse_tensor.SparseTensor.from_value( _binary_2d_label_to_sparse_value(labels)) def _binary_3d_label_to_sparse_value(labels): """Convert dense 3D binary indicator tensor to sparse tensor. Only 1 values in `labels` are included in result. Args: labels: Dense 2D binary indicator tensor. Returns: `SparseTensorValue` whose values are indices along the last dimension of `labels`. """ indices = [] values = [] for d0, labels_d0 in enumerate(labels): for d1, labels_d1 in enumerate(labels_d0): d2 = 0 for class_id, label in enumerate(labels_d1): if label == 1: values.append(class_id) indices.append([d0, d1, d2]) d2 += 1 else: assert label == 0 shape = [len(labels), len(labels[0]), len(labels[0][0])] return sparse_tensor.SparseTensorValue( np.array(indices, np.int64), np.array(values, np.int64), np.array(shape, np.int64)) def _binary_3d_label_to_sparse(labels): """Convert dense 3D binary indicator tensor to sparse tensor. Only 1 values in `labels` are included in result. Args: labels: Dense 2D binary indicator tensor. Returns: `SparseTensor` whose values are indices along the last dimension of `labels`. """ return sparse_tensor.SparseTensor.from_value( _binary_3d_label_to_sparse_value(labels)) def _assert_nan(test_case, actual): test_case.assertTrue(math.isnan(actual), 'Expected NAN, got %s.' % actual) def _assert_local_variables(test_case, expected): test_case.assertEquals( set(expected), set(v.name for v in variables.local_variables())) class StreamingMeanTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_mean(array_ops.ones([4, 3])) _assert_local_variables(self, ('mean/count:0', 'mean/total:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_mean( array_ops.ones([4, 3]), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_mean( array_ops.ones([4, 3]), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testBasic(self): with self.test_session() as sess: values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() mean, update_op = metrics.streaming_mean(values) sess.run(variables.local_variables_initializer()) for _ in range(4): sess.run(update_op) self.assertAlmostEqual(1.65, sess.run(mean), 5) def testUpdateOpsReturnsCurrentValue(self): with self.test_session() as sess: values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() mean, update_op = metrics.streaming_mean(values) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.5, sess.run(update_op), 5) self.assertAlmostEqual(1.475, sess.run(update_op), 5) self.assertAlmostEqual(12.4 / 6.0, sess.run(update_op), 5) self.assertAlmostEqual(1.65, sess.run(update_op), 5) self.assertAlmostEqual(1.65, sess.run(mean), 5) def test1dWeightedValues(self): with self.test_session() as sess: # Create the queue that populates the values. values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() # Create the queue that populates the weighted labels. weights_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 1)) _enqueue_vector(sess, weights_queue, [1]) _enqueue_vector(sess, weights_queue, [0]) _enqueue_vector(sess, weights_queue, [0]) _enqueue_vector(sess, weights_queue, [1]) weights = weights_queue.dequeue() mean, update_op = metrics.streaming_mean(values, weights) variables.local_variables_initializer().run() for _ in range(4): update_op.eval() self.assertAlmostEqual((0 + 1 - 3.2 + 4.0) / 4.0, mean.eval(), 5) def test1dWeightedValues_placeholders(self): with self.test_session() as sess: # Create the queue that populates the values. feed_values = ((0, 1), (-4.2, 9.1), (6.5, 0), (-3.2, 4.0)) values = array_ops.placeholder(dtype=dtypes_lib.float32) # Create the queue that populates the weighted labels. weights_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1,)) _enqueue_vector(sess, weights_queue, 1, shape=(1,)) _enqueue_vector(sess, weights_queue, 0, shape=(1,)) _enqueue_vector(sess, weights_queue, 0, shape=(1,)) _enqueue_vector(sess, weights_queue, 1, shape=(1,)) weights = weights_queue.dequeue() mean, update_op = metrics.streaming_mean(values, weights) variables.local_variables_initializer().run() for i in range(4): update_op.eval(feed_dict={values: feed_values[i]}) self.assertAlmostEqual((0 + 1 - 3.2 + 4.0) / 4.0, mean.eval(), 5) def test2dWeightedValues(self): with self.test_session() as sess: # Create the queue that populates the values. values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() # Create the queue that populates the weighted labels. weights_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, weights_queue, [1, 1]) _enqueue_vector(sess, weights_queue, [1, 0]) _enqueue_vector(sess, weights_queue, [0, 1]) _enqueue_vector(sess, weights_queue, [0, 0]) weights = weights_queue.dequeue() mean, update_op = metrics.streaming_mean(values, weights) variables.local_variables_initializer().run() for _ in range(4): update_op.eval() self.assertAlmostEqual((0 + 1 - 4.2 + 0) / 4.0, mean.eval(), 5) def test2dWeightedValues_placeholders(self): with self.test_session() as sess: # Create the queue that populates the values. feed_values = ((0, 1), (-4.2, 9.1), (6.5, 0), (-3.2, 4.0)) values = array_ops.placeholder(dtype=dtypes_lib.float32) # Create the queue that populates the weighted labels. weights_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(2,)) _enqueue_vector(sess, weights_queue, [1, 1], shape=(2,)) _enqueue_vector(sess, weights_queue, [1, 0], shape=(2,)) _enqueue_vector(sess, weights_queue, [0, 1], shape=(2,)) _enqueue_vector(sess, weights_queue, [0, 0], shape=(2,)) weights = weights_queue.dequeue() mean, update_op = metrics.streaming_mean(values, weights) variables.local_variables_initializer().run() for i in range(4): update_op.eval(feed_dict={values: feed_values[i]}) self.assertAlmostEqual((0 + 1 - 4.2 + 0) / 4.0, mean.eval(), 5) class StreamingMeanTensorTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_mean_tensor(array_ops.ones([4, 3])) _assert_local_variables(self, ('mean/total_tensor:0', 'mean/count_tensor:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_mean_tensor( array_ops.ones([4, 3]), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_mean_tensor( array_ops.ones([4, 3]), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testBasic(self): with self.test_session() as sess: values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() mean, update_op = metrics.streaming_mean_tensor(values) sess.run(variables.local_variables_initializer()) for _ in range(4): sess.run(update_op) self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(mean)) def testMultiDimensional(self): with self.test_session() as sess: values_queue = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(2, 2, 2)) _enqueue_vector( sess, values_queue, [[[1, 2], [1, 2]], [[1, 2], [1, 2]]], shape=(2, 2, 2)) _enqueue_vector( sess, values_queue, [[[1, 2], [1, 2]], [[3, 4], [9, 10]]], shape=(2, 2, 2)) values = values_queue.dequeue() mean, update_op = metrics.streaming_mean_tensor(values) sess.run(variables.local_variables_initializer()) for _ in range(2): sess.run(update_op) self.assertAllClose([[[1, 2], [1, 2]], [[2, 3], [5, 6]]], sess.run(mean)) def testUpdateOpsReturnsCurrentValue(self): with self.test_session() as sess: values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() mean, update_op = metrics.streaming_mean_tensor(values) sess.run(variables.local_variables_initializer()) self.assertAllClose([[0, 1]], sess.run(update_op), 5) self.assertAllClose([[-2.1, 5.05]], sess.run(update_op), 5) self.assertAllClose([[2.3 / 3., 10.1 / 3.]], sess.run(update_op), 5) self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(update_op), 5) self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(mean), 5) def testWeighted1d(self): with self.test_session() as sess: # Create the queue that populates the values. values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() # Create the queue that populates the weights. weights_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 1)) _enqueue_vector(sess, weights_queue, [[1]]) _enqueue_vector(sess, weights_queue, [[0]]) _enqueue_vector(sess, weights_queue, [[1]]) _enqueue_vector(sess, weights_queue, [[0]]) weights = weights_queue.dequeue() mean, update_op = metrics.streaming_mean_tensor(values, weights) sess.run(variables.local_variables_initializer()) for _ in range(4): sess.run(update_op) self.assertAllClose([[3.25, 0.5]], sess.run(mean), 5) def testWeighted2d_1(self): with self.test_session() as sess: # Create the queue that populates the values. values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() # Create the queue that populates the weights. weights_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, weights_queue, [1, 1]) _enqueue_vector(sess, weights_queue, [1, 0]) _enqueue_vector(sess, weights_queue, [0, 1]) _enqueue_vector(sess, weights_queue, [0, 0]) weights = weights_queue.dequeue() mean, update_op = metrics.streaming_mean_tensor(values, weights) sess.run(variables.local_variables_initializer()) for _ in range(4): sess.run(update_op) self.assertAllClose([[-2.1, 0.5]], sess.run(mean), 5) def testWeighted2d_2(self): with self.test_session() as sess: # Create the queue that populates the values. values_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, values_queue, [0, 1]) _enqueue_vector(sess, values_queue, [-4.2, 9.1]) _enqueue_vector(sess, values_queue, [6.5, 0]) _enqueue_vector(sess, values_queue, [-3.2, 4.0]) values = values_queue.dequeue() # Create the queue that populates the weights. weights_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 2)) _enqueue_vector(sess, weights_queue, [0, 1]) _enqueue_vector(sess, weights_queue, [0, 0]) _enqueue_vector(sess, weights_queue, [0, 1]) _enqueue_vector(sess, weights_queue, [0, 0]) weights = weights_queue.dequeue() mean, update_op = metrics.streaming_mean_tensor(values, weights) sess.run(variables.local_variables_initializer()) for _ in range(4): sess.run(update_op) self.assertAllClose([[0, 0.5]], sess.run(mean), 5) class StreamingAccuracyTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_accuracy( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), name='my_accuracy') _assert_local_variables(self, ('my_accuracy/count:0', 'my_accuracy/total:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_accuracy( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_accuracy( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self): predictions = array_ops.ones((10, 3)) labels = array_ops.ones((10, 4)) with self.assertRaises(ValueError): metrics.streaming_accuracy(predictions, labels) def testPredictionsAndWeightsOfDifferentSizeRaisesValueError(self): predictions = array_ops.ones((10, 3)) labels = array_ops.ones((10, 3)) weights = array_ops.ones((9, 3)) with self.assertRaises(ValueError): metrics.streaming_accuracy(predictions, labels, weights) def testValueTensorIsIdempotent(self): predictions = random_ops.random_uniform( (10, 3), maxval=3, dtype=dtypes_lib.int64, seed=1) labels = random_ops.random_uniform( (10, 3), maxval=3, dtype=dtypes_lib.int64, seed=2) accuracy, update_op = metrics.streaming_accuracy(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_accuracy = accuracy.eval() for _ in range(10): self.assertEqual(initial_accuracy, accuracy.eval()) def testMultipleUpdates(self): with self.test_session() as sess: # Create the queue that populates the predictions. preds_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 1)) _enqueue_vector(sess, preds_queue, [0]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [2]) _enqueue_vector(sess, preds_queue, [1]) predictions = preds_queue.dequeue() # Create the queue that populates the labels. labels_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 1)) _enqueue_vector(sess, labels_queue, [0]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [2]) labels = labels_queue.dequeue() accuracy, update_op = metrics.streaming_accuracy(predictions, labels) sess.run(variables.local_variables_initializer()) for _ in xrange(3): sess.run(update_op) self.assertEqual(0.5, sess.run(update_op)) self.assertEqual(0.5, accuracy.eval()) def testEffectivelyEquivalentSizes(self): predictions = array_ops.ones((40, 1)) labels = array_ops.ones((40,)) with self.test_session() as sess: accuracy, update_op = metrics.streaming_accuracy(predictions, labels) sess.run(variables.local_variables_initializer()) self.assertEqual(1.0, update_op.eval()) self.assertEqual(1.0, accuracy.eval()) def testEffectivelyEquivalentSizesWithStaicShapedWeight(self): predictions = ops.convert_to_tensor([1, 1, 1]) # shape 3, labels = array_ops.expand_dims(ops.convert_to_tensor([1, 0, 0]), 1) # shape 3, 1 weights = array_ops.expand_dims(ops.convert_to_tensor([100, 1, 1]), 1) # shape 3, 1 with self.test_session() as sess: accuracy, update_op = metrics.streaming_accuracy(predictions, labels, weights) sess.run(variables.local_variables_initializer()) # if streaming_accuracy does not flatten the weight, accuracy would be # 0.33333334 due to an intended broadcast of weight. Due to flattening, # it will be higher than .95 self.assertGreater(update_op.eval(), .95) self.assertGreater(accuracy.eval(), .95) def testEffectivelyEquivalentSizesWithDynamicallyShapedWeight(self): predictions = ops.convert_to_tensor([1, 1, 1]) # shape 3, labels = array_ops.expand_dims(ops.convert_to_tensor([1, 0, 0]), 1) # shape 3, 1 weights = [[100], [1], [1]] # shape 3, 1 weights_placeholder = array_ops.placeholder( dtype=dtypes_lib.int32, name='weights') feed_dict = {weights_placeholder: weights} with self.test_session() as sess: accuracy, update_op = metrics.streaming_accuracy(predictions, labels, weights_placeholder) sess.run(variables.local_variables_initializer()) # if streaming_accuracy does not flatten the weight, accuracy would be # 0.33333334 due to an intended broadcast of weight. Due to flattening, # it will be higher than .95 self.assertGreater(update_op.eval(feed_dict=feed_dict), .95) self.assertGreater(accuracy.eval(feed_dict=feed_dict), .95) def testMultipleUpdatesWithWeightedValues(self): with self.test_session() as sess: # Create the queue that populates the predictions. preds_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 1)) _enqueue_vector(sess, preds_queue, [0]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [2]) _enqueue_vector(sess, preds_queue, [1]) predictions = preds_queue.dequeue() # Create the queue that populates the labels. labels_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.float32, shapes=(1, 1)) _enqueue_vector(sess, labels_queue, [0]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [2]) labels = labels_queue.dequeue() # Create the queue that populates the weights. weights_queue = data_flow_ops.FIFOQueue( 4, dtypes=dtypes_lib.int64, shapes=(1, 1)) _enqueue_vector(sess, weights_queue, [1]) _enqueue_vector(sess, weights_queue, [1]) _enqueue_vector(sess, weights_queue, [0]) _enqueue_vector(sess, weights_queue, [0]) weights = weights_queue.dequeue() accuracy, update_op = metrics.streaming_accuracy(predictions, labels, weights) sess.run(variables.local_variables_initializer()) for _ in xrange(3): sess.run(update_op) self.assertEqual(1.0, sess.run(update_op)) self.assertEqual(1.0, accuracy.eval()) class StreamingTruePositivesTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_true_positives((0, 1, 0), (0, 1, 1)) _assert_local_variables(self, ('true_positives/count:0',)) def testUnweighted(self): for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32): predictions = math_ops.cast(constant_op.constant( ((1, 0, 1, 0), (0, 1, 1, 1), (0, 0, 0, 0))), dtype=dtype) labels = math_ops.cast(constant_op.constant( ((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))), dtype=dtype) tp, tp_update_op = metrics.streaming_true_positives(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, tp.eval()) self.assertEqual(1, tp_update_op.eval()) self.assertEqual(1, tp.eval()) def testWeighted(self): for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32): predictions = math_ops.cast(constant_op.constant( ((1, 0, 1, 0), (0, 1, 1, 1), (0, 0, 0, 0))), dtype=dtype) labels = math_ops.cast(constant_op.constant( ((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))), dtype=dtype) tp, tp_update_op = metrics.streaming_true_positives( predictions, labels, weights=37.0) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, tp.eval()) self.assertEqual(37.0, tp_update_op.eval()) self.assertEqual(37.0, tp.eval()) class StreamingFalseNegativesTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_false_negatives((0, 1, 0), (0, 1, 1)) _assert_local_variables(self, ('false_negatives/count:0',)) def testUnweighted(self): for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32): predictions = math_ops.cast(constant_op.constant( ((1, 0, 1, 0), (0, 1, 1, 1), (0, 0, 0, 0))), dtype=dtype) labels = math_ops.cast(constant_op.constant( ((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))), dtype=dtype) fn, fn_update_op = metrics.streaming_false_negatives(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, fn.eval()) self.assertEqual(2, fn_update_op.eval()) self.assertEqual(2, fn.eval()) def testWeighted(self): for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32): predictions = math_ops.cast(constant_op.constant( ((1, 0, 1, 0), (0, 1, 1, 1), (0, 0, 0, 0))), dtype=dtype) labels = math_ops.cast(constant_op.constant( ((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))), dtype=dtype) fn, fn_update_op = metrics.streaming_false_negatives( predictions, labels, weights=((3.0,), (5.0,), (7.0,))) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, fn.eval()) self.assertEqual(8.0, fn_update_op.eval()) self.assertEqual(8.0, fn.eval()) class StreamingFalsePositivesTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_false_positives((0, 1, 0), (0, 1, 1)) _assert_local_variables(self, ('false_positives/count:0',)) def testUnweighted(self): for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32): predictions = math_ops.cast(constant_op.constant( ((1, 0, 1, 0), (0, 1, 1, 1), (0, 0, 0, 0))), dtype=dtype) labels = math_ops.cast(constant_op.constant( ((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))), dtype=dtype) fp, fp_update_op = metrics.streaming_false_positives(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, fp.eval()) self.assertEqual(4, fp_update_op.eval()) self.assertEqual(4, fp.eval()) def testWeighted(self): for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32): predictions = math_ops.cast(constant_op.constant( ((1, 0, 1, 0), (0, 1, 1, 1), (0, 0, 0, 0))), dtype=dtype) labels = math_ops.cast(constant_op.constant( ((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))), dtype=dtype) fp, fp_update_op = metrics.streaming_false_positives( predictions, labels, weights=((1.0, 2.0, 3.0, 5.0), (7.0, 11.0, 13.0, 17.0), (19.0, 23.0, 29.0, 31.0))) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, fp.eval()) self.assertEqual(42.0, fp_update_op.eval()) self.assertEqual(42.0, fp.eval()) class StreamingTrueNegativesTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_true_negatives((0, 1, 0), (0, 1, 1)) _assert_local_variables(self, ('true_negatives/count:0',)) def testUnweighted(self): for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32): predictions = math_ops.cast(constant_op.constant( ((1, 0, 1, 0), (0, 1, 1, 1), (0, 0, 0, 0))), dtype=dtype) labels = math_ops.cast(constant_op.constant( ((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))), dtype=dtype) tn, tn_update_op = metrics.streaming_true_negatives(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, tn.eval()) self.assertEqual(5, tn_update_op.eval()) self.assertEqual(5, tn.eval()) def testWeighted(self): for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32): predictions = math_ops.cast(constant_op.constant( ((1, 0, 1, 0), (0, 1, 1, 1), (0, 0, 0, 0))), dtype=dtype) labels = math_ops.cast(constant_op.constant( ((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))), dtype=dtype) tn, tn_update_op = metrics.streaming_true_negatives( predictions, labels, weights=((0.0, 2.0, 3.0, 5.0),)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, tn.eval()) self.assertEqual(15.0, tn_update_op.eval()) self.assertEqual(15.0, tn.eval()) class StreamingTruePositivesAtThresholdsTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_true_positives_at_thresholds( (0.0, 1.0, 0.0), (0, 1, 1), thresholds=(0.15, 0.5, 0.85)) _assert_local_variables(self, ('true_positives:0',)) def testUnweighted(self): predictions = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3))) labels = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))) tp, tp_update_op = metrics.streaming_true_positives_at_thresholds( predictions, labels, thresholds=(0.15, 0.5, 0.85)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAllEqual((0, 0, 0), tp.eval()) self.assertAllEqual((3, 1, 0), tp_update_op.eval()) self.assertAllEqual((3, 1, 0), tp.eval()) def testWeighted(self): predictions = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3))) labels = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))) tp, tp_update_op = metrics.streaming_true_positives_at_thresholds( predictions, labels, weights=37.0, thresholds=(0.15, 0.5, 0.85)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAllEqual((0.0, 0.0, 0.0), tp.eval()) self.assertAllEqual((111.0, 37.0, 0.0), tp_update_op.eval()) self.assertAllEqual((111.0, 37.0, 0.0), tp.eval()) class StreamingFalseNegativesAtThresholdsTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_false_negatives_at_thresholds( (0.0, 1.0, 0.0), (0, 1, 1), thresholds=( 0.15, 0.5, 0.85,)) _assert_local_variables(self, ('false_negatives:0',)) def testUnweighted(self): predictions = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3))) labels = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))) fn, fn_update_op = metrics.streaming_false_negatives_at_thresholds( predictions, labels, thresholds=(0.15, 0.5, 0.85)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAllEqual((0, 0, 0), fn.eval()) self.assertAllEqual((0, 2, 3), fn_update_op.eval()) self.assertAllEqual((0, 2, 3), fn.eval()) def testWeighted(self): predictions = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3))) labels = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))) fn, fn_update_op = metrics.streaming_false_negatives_at_thresholds( predictions, labels, weights=((3.0,), (5.0,), (7.0,)), thresholds=(0.15, 0.5, 0.85)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAllEqual((0.0, 0.0, 0.0), fn.eval()) self.assertAllEqual((0.0, 8.0, 11.0), fn_update_op.eval()) self.assertAllEqual((0.0, 8.0, 11.0), fn.eval()) class StreamingFalsePositivesAtThresholdsTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_false_positives_at_thresholds( (0.0, 1.0, 0.0), (0, 1, 1), thresholds=(0.15, 0.5, 0.85)) _assert_local_variables(self, ('false_positives:0',)) def testUnweighted(self): predictions = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3))) labels = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))) fp, fp_update_op = metrics.streaming_false_positives_at_thresholds( predictions, labels, thresholds=(0.15, 0.5, 0.85)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAllEqual((0, 0, 0), fp.eval()) self.assertAllEqual((7, 4, 2), fp_update_op.eval()) self.assertAllEqual((7, 4, 2), fp.eval()) def testWeighted(self): predictions = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3))) labels = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))) fp, fp_update_op = metrics.streaming_false_positives_at_thresholds( predictions, labels, weights=((1.0, 2.0, 3.0, 5.0), (7.0, 11.0, 13.0, 17.0), (19.0, 23.0, 29.0, 31.0)), thresholds=(0.15, 0.5, 0.85)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAllEqual((0.0, 0.0, 0.0), fp.eval()) self.assertAllEqual((125.0, 42.0, 12.0), fp_update_op.eval()) self.assertAllEqual((125.0, 42.0, 12.0), fp.eval()) class StreamingTrueNegativesAtThresholdsTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_true_negatives_at_thresholds( (0.0, 1.0, 0.0), (0, 1, 1), thresholds=(0.15, 0.5, 0.85)) _assert_local_variables(self, ('true_negatives:0',)) def testUnweighted(self): predictions = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3))) labels = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))) tn, tn_update_op = metrics.streaming_true_negatives_at_thresholds( predictions, labels, thresholds=(0.15, 0.5, 0.85)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAllEqual((0, 0, 0), tn.eval()) self.assertAllEqual((2, 5, 7), tn_update_op.eval()) self.assertAllEqual((2, 5, 7), tn.eval()) def testWeighted(self): predictions = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6), (0.1, 0.2, 0.4, 0.3))) labels = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0))) tn, tn_update_op = metrics.streaming_true_negatives_at_thresholds( predictions, labels, weights=((0.0, 2.0, 3.0, 5.0),), thresholds=(0.15, 0.5, 0.85)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAllEqual((0.0, 0.0, 0.0), tn.eval()) self.assertAllEqual((5.0, 15.0, 23.0), tn_update_op.eval()) self.assertAllEqual((5.0, 15.0, 23.0), tn.eval()) class StreamingPrecisionTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_precision( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1))) _assert_local_variables(self, ('precision/false_positives/count:0', 'precision/true_positives/count:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_precision( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_precision( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1) labels = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.int64, seed=2) precision, update_op = metrics.streaming_precision(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_precision = precision.eval() for _ in range(10): self.assertEqual(initial_precision, precision.eval()) def testAllCorrect(self): inputs = np.random.randint(0, 2, size=(100, 1)) predictions = constant_op.constant(inputs) labels = constant_op.constant(inputs) precision, update_op = metrics.streaming_precision(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(1, sess.run(update_op)) self.assertAlmostEqual(1, precision.eval()) def testSomeCorrect(self): predictions = constant_op.constant([1, 0, 1, 0], shape=(1, 4)) labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) precision, update_op = metrics.streaming_precision(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.5, update_op.eval()) self.assertAlmostEqual(0.5, precision.eval()) def testWeighted1d(self): predictions = constant_op.constant([[1, 0, 1, 0], [1, 0, 1, 0]]) labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]]) precision, update_op = metrics.streaming_precision( predictions, labels, weights=constant_op.constant([[2], [5]])) with self.test_session(): variables.local_variables_initializer().run() weighted_tp = 2.0 + 5.0 weighted_positives = (2.0 + 2.0) + (5.0 + 5.0) expected_precision = weighted_tp / weighted_positives self.assertAlmostEqual(expected_precision, update_op.eval()) self.assertAlmostEqual(expected_precision, precision.eval()) def testWeighted1d_placeholders(self): predictions = array_ops.placeholder(dtype=dtypes_lib.float32) labels = array_ops.placeholder(dtype=dtypes_lib.float32) feed_dict = { predictions: ((1, 0, 1, 0), (1, 0, 1, 0)), labels: ((0, 1, 1, 0), (1, 0, 0, 1)) } precision, update_op = metrics.streaming_precision( predictions, labels, weights=constant_op.constant([[2], [5]])) with self.test_session(): variables.local_variables_initializer().run() weighted_tp = 2.0 + 5.0 weighted_positives = (2.0 + 2.0) + (5.0 + 5.0) expected_precision = weighted_tp / weighted_positives self.assertAlmostEqual( expected_precision, update_op.eval(feed_dict=feed_dict)) self.assertAlmostEqual( expected_precision, precision.eval(feed_dict=feed_dict)) def testWeighted2d(self): predictions = constant_op.constant([[1, 0, 1, 0], [1, 0, 1, 0]]) labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]]) precision, update_op = metrics.streaming_precision( predictions, labels, weights=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]])) with self.test_session(): variables.local_variables_initializer().run() weighted_tp = 3.0 + 4.0 weighted_positives = (1.0 + 3.0) + (4.0 + 2.0) expected_precision = weighted_tp / weighted_positives self.assertAlmostEqual(expected_precision, update_op.eval()) self.assertAlmostEqual(expected_precision, precision.eval()) def testWeighted2d_placeholders(self): predictions = array_ops.placeholder(dtype=dtypes_lib.float32) labels = array_ops.placeholder(dtype=dtypes_lib.float32) feed_dict = { predictions: ((1, 0, 1, 0), (1, 0, 1, 0)), labels: ((0, 1, 1, 0), (1, 0, 0, 1)) } precision, update_op = metrics.streaming_precision( predictions, labels, weights=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]])) with self.test_session(): variables.local_variables_initializer().run() weighted_tp = 3.0 + 4.0 weighted_positives = (1.0 + 3.0) + (4.0 + 2.0) expected_precision = weighted_tp / weighted_positives self.assertAlmostEqual( expected_precision, update_op.eval(feed_dict=feed_dict)) self.assertAlmostEqual( expected_precision, precision.eval(feed_dict=feed_dict)) def testAllIncorrect(self): inputs = np.random.randint(0, 2, size=(100, 1)) predictions = constant_op.constant(inputs) labels = constant_op.constant(1 - inputs) precision, update_op = metrics.streaming_precision(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) sess.run(update_op) self.assertAlmostEqual(0, precision.eval()) def testZeroTrueAndFalsePositivesGivesZeroPrecision(self): predictions = constant_op.constant([0, 0, 0, 0]) labels = constant_op.constant([0, 0, 0, 0]) precision, update_op = metrics.streaming_precision(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) sess.run(update_op) self.assertEqual(0.0, precision.eval()) class StreamingRecallTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_recall( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1))) _assert_local_variables(self, ('recall/false_negatives/count:0', 'recall/true_positives/count:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_recall( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_recall( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1) labels = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.int64, seed=2) recall, update_op = metrics.streaming_recall(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_recall = recall.eval() for _ in range(10): self.assertEqual(initial_recall, recall.eval()) def testAllCorrect(self): np_inputs = np.random.randint(0, 2, size=(100, 1)) predictions = constant_op.constant(np_inputs) labels = constant_op.constant(np_inputs) recall, update_op = metrics.streaming_recall(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) sess.run(update_op) self.assertEqual(1, recall.eval()) def testSomeCorrect(self): predictions = constant_op.constant([1, 0, 1, 0], shape=(1, 4)) labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) recall, update_op = metrics.streaming_recall(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.5, update_op.eval()) self.assertAlmostEqual(0.5, recall.eval()) def testWeighted1d(self): predictions = constant_op.constant([[1, 0, 1, 0], [0, 1, 0, 1]]) labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]]) weights = constant_op.constant([[2], [5]]) recall, update_op = metrics.streaming_recall( predictions, labels, weights=weights) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) weighted_tp = 2.0 + 5.0 weighted_t = (2.0 + 2.0) + (5.0 + 5.0) expected_precision = weighted_tp / weighted_t self.assertAlmostEqual(expected_precision, update_op.eval()) self.assertAlmostEqual(expected_precision, recall.eval()) def testWeighted2d(self): predictions = constant_op.constant([[1, 0, 1, 0], [0, 1, 0, 1]]) labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]]) weights = constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]]) recall, update_op = metrics.streaming_recall( predictions, labels, weights=weights) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) weighted_tp = 3.0 + 1.0 weighted_t = (2.0 + 3.0) + (4.0 + 1.0) expected_precision = weighted_tp / weighted_t self.assertAlmostEqual(expected_precision, update_op.eval()) self.assertAlmostEqual(expected_precision, recall.eval()) def testAllIncorrect(self): np_inputs = np.random.randint(0, 2, size=(100, 1)) predictions = constant_op.constant(np_inputs) labels = constant_op.constant(1 - np_inputs) recall, update_op = metrics.streaming_recall(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) sess.run(update_op) self.assertEqual(0, recall.eval()) def testZeroTruePositivesAndFalseNegativesGivesZeroRecall(self): predictions = array_ops.zeros((1, 4)) labels = array_ops.zeros((1, 4)) recall, update_op = metrics.streaming_recall(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) sess.run(update_op) self.assertEqual(0, recall.eval()) class StreamingAUCTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_auc( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1))) _assert_local_variables(self, ('auc/true_positives:0', 'auc/false_negatives:0', 'auc/false_positives:0', 'auc/true_negatives:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_auc( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_auc( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1) labels = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.int64, seed=2) auc, update_op = metrics.streaming_auc(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_auc = auc.eval() for _ in range(10): self.assertAlmostEqual(initial_auc, auc.eval(), 5) def testAllCorrect(self): self.allCorrectAsExpected('ROC') def allCorrectAsExpected(self, curve): inputs = np.random.randint(0, 2, size=(100, 1)) with self.test_session() as sess: predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32) labels = constant_op.constant(inputs) auc, update_op = metrics.streaming_auc(predictions, labels, curve=curve) sess.run(variables.local_variables_initializer()) self.assertEqual(1, sess.run(update_op)) self.assertEqual(1, auc.eval()) def testSomeCorrect(self): with self.test_session() as sess: predictions = constant_op.constant( [1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) auc, update_op = metrics.streaming_auc(predictions, labels) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.5, sess.run(update_op)) self.assertAlmostEqual(0.5, auc.eval()) def testWeighted1d(self): with self.test_session() as sess: predictions = constant_op.constant( [1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) weights = constant_op.constant([2], shape=(1, 1)) auc, update_op = metrics.streaming_auc( predictions, labels, weights=weights) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.5, sess.run(update_op), 5) self.assertAlmostEqual(0.5, auc.eval(), 5) def testWeighted2d(self): with self.test_session() as sess: predictions = constant_op.constant( [1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) weights = constant_op.constant([1, 2, 3, 4], shape=(1, 4)) auc, update_op = metrics.streaming_auc( predictions, labels, weights=weights) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.7, sess.run(update_op), 5) self.assertAlmostEqual(0.7, auc.eval(), 5) def testAUCPRSpecialCase(self): with self.test_session() as sess: predictions = constant_op.constant( [0.1, 0.4, 0.35, 0.8], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant([0, 0, 1, 1], shape=(1, 4)) auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR') sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.79166, sess.run(update_op), delta=1e-3) self.assertAlmostEqual(0.79166, auc.eval(), delta=1e-3) def testAnotherAUCPRSpecialCase(self): with self.test_session() as sess: predictions = constant_op.constant( [0.1, 0.4, 0.35, 0.8, 0.1, 0.135, 0.81], shape=(1, 7), dtype=dtypes_lib.float32) labels = constant_op.constant([0, 0, 1, 0, 1, 0, 1], shape=(1, 7)) auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR') sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.610317, sess.run(update_op), delta=1e-3) self.assertAlmostEqual(0.610317, auc.eval(), delta=1e-3) def testThirdAUCPRSpecialCase(self): with self.test_session() as sess: predictions = constant_op.constant( [0.0, 0.1, 0.2, 0.33, 0.3, 0.4, 0.5], shape=(1, 7), dtype=dtypes_lib.float32) labels = constant_op.constant([0, 0, 0, 0, 1, 1, 1], shape=(1, 7)) auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR') sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.90277, sess.run(update_op), delta=1e-3) self.assertAlmostEqual(0.90277, auc.eval(), delta=1e-3) def testAllIncorrect(self): inputs = np.random.randint(0, 2, size=(100, 1)) with self.test_session() as sess: predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32) labels = constant_op.constant(1 - inputs, dtype=dtypes_lib.float32) auc, update_op = metrics.streaming_auc(predictions, labels) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0, sess.run(update_op)) self.assertAlmostEqual(0, auc.eval()) def testZeroTruePositivesAndFalseNegativesGivesOneAUC(self): with self.test_session() as sess: predictions = array_ops.zeros([4], dtype=dtypes_lib.float32) labels = array_ops.zeros([4]) auc, update_op = metrics.streaming_auc(predictions, labels) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(1, sess.run(update_op), 6) self.assertAlmostEqual(1, auc.eval(), 6) def testRecallOneAndPrecisionOneGivesOnePRAUC(self): with self.test_session() as sess: predictions = array_ops.ones([4], dtype=dtypes_lib.float32) labels = array_ops.ones([4]) auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR') sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(1, sess.run(update_op), 6) self.assertAlmostEqual(1, auc.eval(), 6) def np_auc(self, predictions, labels, weights): """Computes the AUC explicitely using Numpy. Args: predictions: an ndarray with shape [N]. labels: an ndarray with shape [N]. weights: an ndarray with shape [N]. Returns: the area under the ROC curve. """ if weights is None: weights = np.ones(np.size(predictions)) is_positive = labels > 0 num_positives = np.sum(weights[is_positive]) num_negatives = np.sum(weights[~is_positive]) # Sort descending: inds = np.argsort(-predictions) sorted_labels = labels[inds] sorted_weights = weights[inds] is_positive = sorted_labels > 0 tp = np.cumsum(sorted_weights * is_positive) / num_positives return np.sum((sorted_weights * tp)[~is_positive]) / num_negatives def testWithMultipleUpdates(self): num_samples = 1000 batch_size = 10 num_batches = int(num_samples / batch_size) # Create the labels and data. labels = np.random.randint(0, 2, size=num_samples) noise = np.random.normal(0.0, scale=0.2, size=num_samples) predictions = 0.4 + 0.2 * labels + noise predictions[predictions > 1] = 1 predictions[predictions < 0] = 0 def _enqueue_as_batches(x, enqueue_ops): x_batches = x.astype(np.float32).reshape((num_batches, batch_size)) x_queue = data_flow_ops.FIFOQueue( num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,)) for i in range(num_batches): enqueue_ops[i].append(x_queue.enqueue(x_batches[i, :])) return x_queue.dequeue() for weights in (None, np.ones(num_samples), np.random.exponential( scale=1.0, size=num_samples)): expected_auc = self.np_auc(predictions, labels, weights) with self.test_session() as sess: enqueue_ops = [[] for i in range(num_batches)] tf_predictions = _enqueue_as_batches(predictions, enqueue_ops) tf_labels = _enqueue_as_batches(labels, enqueue_ops) tf_weights = (_enqueue_as_batches(weights, enqueue_ops) if weights is not None else None) for i in range(num_batches): sess.run(enqueue_ops[i]) auc, update_op = metrics.streaming_auc( tf_predictions, tf_labels, curve='ROC', num_thresholds=500, weights=tf_weights) sess.run(variables.local_variables_initializer()) for i in range(num_batches): sess.run(update_op) # Since this is only approximate, we can't expect a 6 digits match. # Although with higher number of samples/thresholds we should see the # accuracy improving self.assertAlmostEqual(expected_auc, auc.eval(), 2) class StreamingSpecificityAtSensitivityTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_specificity_at_sensitivity( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), sensitivity=0.7) _assert_local_variables(self, ('specificity_at_sensitivity/true_positives:0', 'specificity_at_sensitivity/false_negatives:0', 'specificity_at_sensitivity/false_positives:0', 'specificity_at_sensitivity/true_negatives:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_specificity_at_sensitivity( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), sensitivity=0.7, metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_specificity_at_sensitivity( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), sensitivity=0.7, updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1) labels = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.int64, seed=2) specificity, update_op = metrics.streaming_specificity_at_sensitivity( predictions, labels, sensitivity=0.7) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_specificity = specificity.eval() for _ in range(10): self.assertAlmostEqual(initial_specificity, specificity.eval(), 5) def testAllCorrect(self): inputs = np.random.randint(0, 2, size=(100, 1)) predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32) labels = constant_op.constant(inputs) specificity, update_op = metrics.streaming_specificity_at_sensitivity( predictions, labels, sensitivity=0.7) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(1, sess.run(update_op)) self.assertEqual(1, specificity.eval()) def testSomeCorrectHighSensitivity(self): predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.45, 0.5, 0.8, 0.9] labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] predictions = constant_op.constant( predictions_values, dtype=dtypes_lib.float32) labels = constant_op.constant(labels_values) specificity, update_op = metrics.streaming_specificity_at_sensitivity( predictions, labels, sensitivity=0.8) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(1.0, sess.run(update_op)) self.assertAlmostEqual(1.0, specificity.eval()) def testSomeCorrectLowSensitivity(self): predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26] labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] predictions = constant_op.constant( predictions_values, dtype=dtypes_lib.float32) labels = constant_op.constant(labels_values) specificity, update_op = metrics.streaming_specificity_at_sensitivity( predictions, labels, sensitivity=0.4) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.6, sess.run(update_op)) self.assertAlmostEqual(0.6, specificity.eval()) def testWeighted1d(self): predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26] labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] weights_values = [3] predictions = constant_op.constant( predictions_values, dtype=dtypes_lib.float32) labels = constant_op.constant(labels_values) weights = constant_op.constant(weights_values) specificity, update_op = metrics.streaming_specificity_at_sensitivity( predictions, labels, weights=weights, sensitivity=0.4) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.6, sess.run(update_op)) self.assertAlmostEqual(0.6, specificity.eval()) def testWeighted2d(self): predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26] labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] weights_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] predictions = constant_op.constant( predictions_values, dtype=dtypes_lib.float32) labels = constant_op.constant(labels_values) weights = constant_op.constant(weights_values) specificity, update_op = metrics.streaming_specificity_at_sensitivity( predictions, labels, weights=weights, sensitivity=0.4) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(8.0 / 15.0, sess.run(update_op)) self.assertAlmostEqual(8.0 / 15.0, specificity.eval()) class StreamingSensitivityAtSpecificityTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_sensitivity_at_specificity( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), specificity=0.7) _assert_local_variables(self, ('sensitivity_at_specificity/true_positives:0', 'sensitivity_at_specificity/false_negatives:0', 'sensitivity_at_specificity/false_positives:0', 'sensitivity_at_specificity/true_negatives:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_sensitivity_at_specificity( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), specificity=0.7, metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_sensitivity_at_specificity( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), specificity=0.7, updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1) labels = random_ops.random_uniform( (10, 3), maxval=2, dtype=dtypes_lib.int64, seed=2) sensitivity, update_op = metrics.streaming_sensitivity_at_specificity( predictions, labels, specificity=0.7) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_sensitivity = sensitivity.eval() for _ in range(10): self.assertAlmostEqual(initial_sensitivity, sensitivity.eval(), 5) def testAllCorrect(self): inputs = np.random.randint(0, 2, size=(100, 1)) predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32) labels = constant_op.constant(inputs) specificity, update_op = metrics.streaming_sensitivity_at_specificity( predictions, labels, specificity=0.7) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(1, sess.run(update_op)) self.assertEqual(1, specificity.eval()) def testSomeCorrectHighSpecificity(self): predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.1, 0.45, 0.5, 0.8, 0.9] labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] predictions = constant_op.constant( predictions_values, dtype=dtypes_lib.float32) labels = constant_op.constant(labels_values) specificity, update_op = metrics.streaming_sensitivity_at_specificity( predictions, labels, specificity=0.8) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.8, sess.run(update_op)) self.assertAlmostEqual(0.8, specificity.eval()) def testSomeCorrectLowSpecificity(self): predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26] labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] predictions = constant_op.constant( predictions_values, dtype=dtypes_lib.float32) labels = constant_op.constant(labels_values) specificity, update_op = metrics.streaming_sensitivity_at_specificity( predictions, labels, specificity=0.4) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.6, sess.run(update_op)) self.assertAlmostEqual(0.6, specificity.eval()) def testWeighted(self): predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26] labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] weights_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] predictions = constant_op.constant( predictions_values, dtype=dtypes_lib.float32) labels = constant_op.constant(labels_values) weights = constant_op.constant(weights_values) specificity, update_op = metrics.streaming_sensitivity_at_specificity( predictions, labels, weights=weights, specificity=0.4) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(0.675, sess.run(update_op)) self.assertAlmostEqual(0.675, specificity.eval()) # TODO(nsilberman): Break this up into two sets of tests. class StreamingPrecisionRecallThresholdsTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_precision_at_thresholds( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), thresholds=[0, 0.5, 1.0]) _assert_local_variables(self, ( 'precision_at_thresholds/true_positives:0', 'precision_at_thresholds/false_positives:0',)) def testMetricsCollection(self): my_collection_name = '__metrics__' prec, _ = metrics.streaming_precision_at_thresholds( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), thresholds=[0, 0.5, 1.0], metrics_collections=[my_collection_name]) rec, _ = metrics.streaming_recall_at_thresholds( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), thresholds=[0, 0.5, 1.0], metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [prec, rec]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, precision_op = metrics.streaming_precision_at_thresholds( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), thresholds=[0, 0.5, 1.0], updates_collections=[my_collection_name]) _, recall_op = metrics.streaming_recall_at_thresholds( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), thresholds=[0, 0.5, 1.0], updates_collections=[my_collection_name]) self.assertListEqual( ops.get_collection(my_collection_name), [precision_op, recall_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1) labels = random_ops.random_uniform( (10, 3), maxval=1, dtype=dtypes_lib.int64, seed=2) thresholds = [0, 0.5, 1.0] prec, prec_op = metrics.streaming_precision_at_thresholds(predictions, labels, thresholds) rec, rec_op = metrics.streaming_recall_at_thresholds(predictions, labels, thresholds) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates, then verify idempotency. sess.run([prec_op, rec_op]) initial_prec = prec.eval() initial_rec = rec.eval() for _ in range(10): sess.run([prec_op, rec_op]) self.assertAllClose(initial_prec, prec.eval()) self.assertAllClose(initial_rec, rec.eval()) # TODO(nsilberman): fix tests (passing but incorrect). def testAllCorrect(self): inputs = np.random.randint(0, 2, size=(100, 1)) with self.test_session() as sess: predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32) labels = constant_op.constant(inputs) thresholds = [0.5] prec, prec_op = metrics.streaming_precision_at_thresholds(predictions, labels, thresholds) rec, rec_op = metrics.streaming_recall_at_thresholds(predictions, labels, thresholds) sess.run(variables.local_variables_initializer()) sess.run([prec_op, rec_op]) self.assertEqual(1, prec.eval()) self.assertEqual(1, rec.eval()) def testSomeCorrect(self): with self.test_session() as sess: predictions = constant_op.constant( [1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4)) thresholds = [0.5] prec, prec_op = metrics.streaming_precision_at_thresholds(predictions, labels, thresholds) rec, rec_op = metrics.streaming_recall_at_thresholds(predictions, labels, thresholds) sess.run(variables.local_variables_initializer()) sess.run([prec_op, rec_op]) self.assertAlmostEqual(0.5, prec.eval()) self.assertAlmostEqual(0.5, rec.eval()) def testAllIncorrect(self): inputs = np.random.randint(0, 2, size=(100, 1)) with self.test_session() as sess: predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32) labels = constant_op.constant(1 - inputs, dtype=dtypes_lib.float32) thresholds = [0.5] prec, prec_op = metrics.streaming_precision_at_thresholds(predictions, labels, thresholds) rec, rec_op = metrics.streaming_recall_at_thresholds(predictions, labels, thresholds) sess.run(variables.local_variables_initializer()) sess.run([prec_op, rec_op]) self.assertAlmostEqual(0, prec.eval()) self.assertAlmostEqual(0, rec.eval()) def testWeights1d(self): with self.test_session() as sess: predictions = constant_op.constant( [[1, 0], [1, 0]], shape=(2, 2), dtype=dtypes_lib.float32) labels = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2)) weights = constant_op.constant( [[0], [1]], shape=(2, 1), dtype=dtypes_lib.float32) thresholds = [0.5, 1.1] prec, prec_op = metrics.streaming_precision_at_thresholds( predictions, labels, thresholds, weights=weights) rec, rec_op = metrics.streaming_recall_at_thresholds( predictions, labels, thresholds, weights=weights) [prec_low, prec_high] = array_ops.split( value=prec, num_or_size_splits=2, axis=0) prec_low = array_ops.reshape(prec_low, shape=()) prec_high = array_ops.reshape(prec_high, shape=()) [rec_low, rec_high] = array_ops.split( value=rec, num_or_size_splits=2, axis=0) rec_low = array_ops.reshape(rec_low, shape=()) rec_high = array_ops.reshape(rec_high, shape=()) sess.run(variables.local_variables_initializer()) sess.run([prec_op, rec_op]) self.assertAlmostEqual(1.0, prec_low.eval(), places=5) self.assertAlmostEqual(0.0, prec_high.eval(), places=5) self.assertAlmostEqual(1.0, rec_low.eval(), places=5) self.assertAlmostEqual(0.0, rec_high.eval(), places=5) def testWeights2d(self): with self.test_session() as sess: predictions = constant_op.constant( [[1, 0], [1, 0]], shape=(2, 2), dtype=dtypes_lib.float32) labels = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2)) weights = constant_op.constant( [[0, 0], [1, 1]], shape=(2, 2), dtype=dtypes_lib.float32) thresholds = [0.5, 1.1] prec, prec_op = metrics.streaming_precision_at_thresholds( predictions, labels, thresholds, weights=weights) rec, rec_op = metrics.streaming_recall_at_thresholds( predictions, labels, thresholds, weights=weights) [prec_low, prec_high] = array_ops.split( value=prec, num_or_size_splits=2, axis=0) prec_low = array_ops.reshape(prec_low, shape=()) prec_high = array_ops.reshape(prec_high, shape=()) [rec_low, rec_high] = array_ops.split( value=rec, num_or_size_splits=2, axis=0) rec_low = array_ops.reshape(rec_low, shape=()) rec_high = array_ops.reshape(rec_high, shape=()) sess.run(variables.local_variables_initializer()) sess.run([prec_op, rec_op]) self.assertAlmostEqual(1.0, prec_low.eval(), places=5) self.assertAlmostEqual(0.0, prec_high.eval(), places=5) self.assertAlmostEqual(1.0, rec_low.eval(), places=5) self.assertAlmostEqual(0.0, rec_high.eval(), places=5) def testExtremeThresholds(self): with self.test_session() as sess: predictions = constant_op.constant( [1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant([0, 1, 1, 1], shape=(1, 4)) thresholds = [-1.0, 2.0] # lower/higher than any values prec, prec_op = metrics.streaming_precision_at_thresholds(predictions, labels, thresholds) rec, rec_op = metrics.streaming_recall_at_thresholds(predictions, labels, thresholds) [prec_low, prec_high] = array_ops.split( value=prec, num_or_size_splits=2, axis=0) [rec_low, rec_high] = array_ops.split( value=rec, num_or_size_splits=2, axis=0) sess.run(variables.local_variables_initializer()) sess.run([prec_op, rec_op]) self.assertAlmostEqual(0.75, prec_low.eval()) self.assertAlmostEqual(0.0, prec_high.eval()) self.assertAlmostEqual(1.0, rec_low.eval()) self.assertAlmostEqual(0.0, rec_high.eval()) def testZeroLabelsPredictions(self): with self.test_session() as sess: predictions = array_ops.zeros([4], dtype=dtypes_lib.float32) labels = array_ops.zeros([4]) thresholds = [0.5] prec, prec_op = metrics.streaming_precision_at_thresholds(predictions, labels, thresholds) rec, rec_op = metrics.streaming_recall_at_thresholds(predictions, labels, thresholds) sess.run(variables.local_variables_initializer()) sess.run([prec_op, rec_op]) self.assertAlmostEqual(0, prec.eval(), 6) self.assertAlmostEqual(0, rec.eval(), 6) def testWithMultipleUpdates(self): num_samples = 1000 batch_size = 10 num_batches = int(num_samples / batch_size) # Create the labels and data. labels = np.random.randint(0, 2, size=(num_samples, 1)) noise = np.random.normal(0.0, scale=0.2, size=(num_samples, 1)) predictions = 0.4 + 0.2 * labels + noise predictions[predictions > 1] = 1 predictions[predictions < 0] = 0 thresholds = [0.3] tp = 0 fp = 0 fn = 0 tn = 0 for i in range(num_samples): if predictions[i] > thresholds[0]: if labels[i] == 1: tp += 1 else: fp += 1 else: if labels[i] == 1: fn += 1 else: tn += 1 epsilon = 1e-7 expected_prec = tp / (epsilon + tp + fp) expected_rec = tp / (epsilon + tp + fn) labels = labels.astype(np.float32) predictions = predictions.astype(np.float32) with self.test_session() as sess: # Reshape the data so its easy to queue up: predictions_batches = predictions.reshape((batch_size, num_batches)) labels_batches = labels.reshape((batch_size, num_batches)) # Enqueue the data: predictions_queue = data_flow_ops.FIFOQueue( num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,)) labels_queue = data_flow_ops.FIFOQueue( num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,)) for i in range(int(num_batches)): tf_prediction = constant_op.constant(predictions_batches[:, i]) tf_label = constant_op.constant(labels_batches[:, i]) sess.run([ predictions_queue.enqueue(tf_prediction), labels_queue.enqueue(tf_label) ]) tf_predictions = predictions_queue.dequeue() tf_labels = labels_queue.dequeue() prec, prec_op = metrics.streaming_precision_at_thresholds(tf_predictions, tf_labels, thresholds) rec, rec_op = metrics.streaming_recall_at_thresholds(tf_predictions, tf_labels, thresholds) sess.run(variables.local_variables_initializer()) for _ in range(int(num_samples / batch_size)): sess.run([prec_op, rec_op]) # Since this is only approximate, we can't expect a 6 digits match. # Although with higher number of samples/thresholds we should see the # accuracy improving self.assertAlmostEqual(expected_prec, prec.eval(), 2) self.assertAlmostEqual(expected_rec, rec.eval(), 2) # TODO(ptucker): Remove when we remove `streaming_recall_at_k`. # This op will be deprecated soon in favor of `streaming_sparse_recall_at_k`. # Until then, this test validates that both ops yield the same results. class StreamingRecallAtKTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() self._batch_size = 4 self._num_classes = 3 self._np_predictions = np.matrix(('0.1 0.2 0.7;' '0.6 0.2 0.2;' '0.0 0.9 0.1;' '0.2 0.0 0.8')) self._np_labels = [0, 0, 0, 0] def testVars(self): metrics.streaming_recall_at_k( predictions=array_ops.ones((self._batch_size, self._num_classes)), labels=array_ops.ones( (self._batch_size,), dtype=dtypes_lib.int32), k=1) _assert_local_variables(self, ('recall_at_1/count:0', 'recall_at_1/total:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_recall_at_k( predictions=array_ops.ones((self._batch_size, self._num_classes)), labels=array_ops.ones( (self._batch_size,), dtype=dtypes_lib.int32), k=1, metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_recall_at_k( predictions=array_ops.ones((self._batch_size, self._num_classes)), labels=array_ops.ones( (self._batch_size,), dtype=dtypes_lib.int32), k=1, updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testSingleUpdateKIs1(self): predictions = constant_op.constant( self._np_predictions, shape=(self._batch_size, self._num_classes), dtype=dtypes_lib.float32) labels = constant_op.constant( self._np_labels, shape=(self._batch_size,), dtype=dtypes_lib.int64) recall, update_op = metrics.streaming_recall_at_k(predictions, labels, k=1) sp_recall, sp_update_op = metrics.streaming_sparse_recall_at_k( predictions, array_ops.reshape(labels, (self._batch_size, 1)), k=1) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0.25, sess.run(update_op)) self.assertEqual(0.25, recall.eval()) self.assertEqual(0.25, sess.run(sp_update_op)) self.assertEqual(0.25, sp_recall.eval()) def testSingleUpdateKIs2(self): predictions = constant_op.constant( self._np_predictions, shape=(self._batch_size, self._num_classes), dtype=dtypes_lib.float32) labels = constant_op.constant( self._np_labels, shape=(self._batch_size,), dtype=dtypes_lib.int64) recall, update_op = metrics.streaming_recall_at_k(predictions, labels, k=2) sp_recall, sp_update_op = metrics.streaming_sparse_recall_at_k( predictions, array_ops.reshape(labels, (self._batch_size, 1)), k=2) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0.5, sess.run(update_op)) self.assertEqual(0.5, recall.eval()) self.assertEqual(0.5, sess.run(sp_update_op)) self.assertEqual(0.5, sp_recall.eval()) def testSingleUpdateKIs3(self): predictions = constant_op.constant( self._np_predictions, shape=(self._batch_size, self._num_classes), dtype=dtypes_lib.float32) labels = constant_op.constant( self._np_labels, shape=(self._batch_size,), dtype=dtypes_lib.int64) recall, update_op = metrics.streaming_recall_at_k(predictions, labels, k=3) sp_recall, sp_update_op = metrics.streaming_sparse_recall_at_k( predictions, array_ops.reshape(labels, (self._batch_size, 1)), k=3) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(1.0, sess.run(update_op)) self.assertEqual(1.0, recall.eval()) self.assertEqual(1.0, sess.run(sp_update_op)) self.assertEqual(1.0, sp_recall.eval()) def testSingleUpdateSomeMissingKIs2(self): predictions = constant_op.constant( self._np_predictions, shape=(self._batch_size, self._num_classes), dtype=dtypes_lib.float32) labels = constant_op.constant( self._np_labels, shape=(self._batch_size,), dtype=dtypes_lib.int64) weights = constant_op.constant( [0, 1, 0, 1], shape=(self._batch_size,), dtype=dtypes_lib.float32) recall, update_op = metrics.streaming_recall_at_k( predictions, labels, k=2, weights=weights) sp_recall, sp_update_op = metrics.streaming_sparse_recall_at_k( predictions, array_ops.reshape(labels, (self._batch_size, 1)), k=2, weights=weights) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(1.0, sess.run(update_op)) self.assertEqual(1.0, recall.eval()) self.assertEqual(1.0, sess.run(sp_update_op)) self.assertEqual(1.0, sp_recall.eval()) class StreamingSparsePrecisionTest(test.TestCase): def _test_streaming_sparse_precision_at_k(self, predictions, labels, k, expected, class_id=None, weights=None): with ops.Graph().as_default() as g, self.test_session(g): if weights is not None: weights = constant_op.constant(weights, dtypes_lib.float32) metric, update = metrics.streaming_sparse_precision_at_k( predictions=constant_op.constant(predictions, dtypes_lib.float32), labels=labels, k=k, class_id=class_id, weights=weights) # Fails without initialized vars. self.assertRaises(errors_impl.OpError, metric.eval) self.assertRaises(errors_impl.OpError, update.eval) variables.variables_initializer(variables.local_variables()).run() # Run per-step op and assert expected values. if math.isnan(expected): _assert_nan(self, update.eval()) _assert_nan(self, metric.eval()) else: self.assertEqual(expected, update.eval()) self.assertEqual(expected, metric.eval()) def _test_streaming_sparse_precision_at_top_k(self, top_k_predictions, labels, expected, class_id=None, weights=None): with ops.Graph().as_default() as g, self.test_session(g): if weights is not None: weights = constant_op.constant(weights, dtypes_lib.float32) metric, update = metrics.streaming_sparse_precision_at_top_k( top_k_predictions=constant_op.constant(top_k_predictions, dtypes_lib.int32), labels=labels, class_id=class_id, weights=weights) # Fails without initialized vars. self.assertRaises(errors_impl.OpError, metric.eval) self.assertRaises(errors_impl.OpError, update.eval) variables.variables_initializer(variables.local_variables()).run() # Run per-step op and assert expected values. if math.isnan(expected): self.assertTrue(math.isnan(update.eval())) self.assertTrue(math.isnan(metric.eval())) else: self.assertEqual(expected, update.eval()) self.assertEqual(expected, metric.eval()) def _test_streaming_sparse_average_precision_at_k(self, predictions, labels, k, expected, weights=None): with ops.Graph().as_default() as g, self.test_session(g): if weights is not None: weights = constant_op.constant(weights, dtypes_lib.float32) predictions = constant_op.constant(predictions, dtypes_lib.float32) metric, update = metrics.streaming_sparse_average_precision_at_k( predictions, labels, k, weights=weights) # Fails without initialized vars. self.assertRaises(errors_impl.OpError, metric.eval) self.assertRaises(errors_impl.OpError, update.eval) local_variables = variables.local_variables() variables.variables_initializer(local_variables).run() # Run per-step op and assert expected values. if math.isnan(expected): _assert_nan(self, update.eval()) _assert_nan(self, metric.eval()) else: self.assertAlmostEqual(expected, update.eval()) self.assertAlmostEqual(expected, metric.eval()) def _test_streaming_sparse_average_precision_at_top_k(self, top_k_predictions, labels, expected, weights=None): with ops.Graph().as_default() as g, self.test_session(g): if weights is not None: weights = constant_op.constant(weights, dtypes_lib.float32) metric, update = metrics.streaming_sparse_average_precision_at_top_k( top_k_predictions, labels, weights=weights) # Fails without initialized vars. self.assertRaises(errors_impl.OpError, metric.eval) self.assertRaises(errors_impl.OpError, update.eval) local_variables = variables.local_variables() variables.variables_initializer(local_variables).run() # Run per-step op and assert expected values. if math.isnan(expected): _assert_nan(self, update.eval()) _assert_nan(self, metric.eval()) else: self.assertAlmostEqual(expected, update.eval()) self.assertAlmostEqual(expected, metric.eval()) def test_top_k_rank_invalid(self): with self.test_session(): # top_k_predictions has rank < 2. top_k_predictions = [9, 4, 6, 2, 0] sp_labels = sparse_tensor.SparseTensorValue( indices=np.array([[0,], [1,], [2,]], np.int64), values=np.array([2, 7, 8], np.int64), dense_shape=np.array([10,], np.int64)) with self.assertRaises(ValueError): precision, _ = metrics.streaming_sparse_precision_at_top_k( top_k_predictions=constant_op.constant(top_k_predictions, dtypes_lib.int64), labels=sp_labels) variables.variables_initializer(variables.local_variables()).run() precision.eval() def test_average_precision(self): # Example 1. # Matches example here: # fastml.com/what-you-wanted-to-know-about-mean-average-precision labels_ex1 = (0, 1, 2, 3, 4) labels = np.array([labels_ex1], dtype=np.int64) predictions_ex1 = (0.2, 0.1, 0.0, 0.4, 0.0, 0.5, 0.3) predictions = (predictions_ex1,) predictions_top_k_ex1 = (5, 3, 6, 0, 1, 2) precision_ex1 = (0.0 / 1, 1.0 / 2, 1.0 / 3, 2.0 / 4) avg_precision_ex1 = (0.0 / 1, precision_ex1[1] / 2, precision_ex1[1] / 3, (precision_ex1[1] + precision_ex1[3]) / 4) for i in xrange(4): k = i + 1 self._test_streaming_sparse_precision_at_k( predictions, labels, k, expected=precision_ex1[i]) self._test_streaming_sparse_precision_at_top_k( (predictions_top_k_ex1[:k],), labels, expected=precision_ex1[i]) self._test_streaming_sparse_average_precision_at_k( predictions, labels, k, expected=avg_precision_ex1[i]) self._test_streaming_sparse_average_precision_at_top_k( (predictions_top_k_ex1[:k],), labels, expected=avg_precision_ex1[i]) # Example 2. labels_ex2 = (0, 2, 4, 5, 6) labels = np.array([labels_ex2], dtype=np.int64) predictions_ex2 = (0.3, 0.5, 0.0, 0.4, 0.0, 0.1, 0.2) predictions = (predictions_ex2,) predictions_top_k_ex2 = (1, 3, 0, 6, 5) precision_ex2 = (0.0 / 1, 0.0 / 2, 1.0 / 3, 2.0 / 4) avg_precision_ex2 = (0.0 / 1, 0.0 / 2, precision_ex2[2] / 3, (precision_ex2[2] + precision_ex2[3]) / 4) for i in xrange(4): k = i + 1 self._test_streaming_sparse_precision_at_k( predictions, labels, k, expected=precision_ex2[i]) self._test_streaming_sparse_precision_at_top_k( (predictions_top_k_ex2[:k],), labels, expected=precision_ex2[i]) self._test_streaming_sparse_average_precision_at_k( predictions, labels, k, expected=avg_precision_ex2[i]) self._test_streaming_sparse_average_precision_at_top_k( (predictions_top_k_ex2[:k],), labels, expected=avg_precision_ex2[i]) # Both examples, we expect both precision and average precision to be the # average of the 2 examples. labels = np.array([labels_ex1, labels_ex2], dtype=np.int64) predictions = (predictions_ex1, predictions_ex2) streaming_precision = [(ex1 + ex2) / 2 for ex1, ex2 in zip(precision_ex1, precision_ex2)] streaming_average_precision = [ (ex1 + ex2) / 2 for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2) ] for i in xrange(4): k = i + 1 self._test_streaming_sparse_precision_at_k( predictions, labels, k, expected=streaming_precision[i]) predictions_top_k = (predictions_top_k_ex1[:k], predictions_top_k_ex2[:k]) self._test_streaming_sparse_precision_at_top_k( predictions_top_k, labels, expected=streaming_precision[i]) self._test_streaming_sparse_average_precision_at_k( predictions, labels, k, expected=streaming_average_precision[i]) self._test_streaming_sparse_average_precision_at_top_k( predictions_top_k, labels, expected=streaming_average_precision[i]) # Weighted examples, we expect streaming average precision to be the # weighted average of the 2 examples. weights = (0.3, 0.6) streaming_average_precision = [ (weights[0] * ex1 + weights[1] * ex2) / (weights[0] + weights[1]) for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2) ] for i in xrange(4): k = i + 1 self._test_streaming_sparse_average_precision_at_k( predictions, labels, k, expected=streaming_average_precision[i], weights=weights) self._test_streaming_sparse_average_precision_at_top_k( (predictions_top_k_ex1[:k], predictions_top_k_ex2[:k]), labels, expected=streaming_average_precision[i], weights=weights) def test_average_precision_some_labels_out_of_range(self): """Tests that labels outside the [0, n_classes) range are ignored.""" labels_ex1 = (-1, 0, 1, 2, 3, 4, 7) labels = np.array([labels_ex1], dtype=np.int64) predictions_ex1 = (0.2, 0.1, 0.0, 0.4, 0.0, 0.5, 0.3) predictions = (predictions_ex1,) predictions_top_k_ex1 = (5, 3, 6, 0, 1, 2) precision_ex1 = (0.0 / 1, 1.0 / 2, 1.0 / 3, 2.0 / 4) avg_precision_ex1 = (0.0 / 1, precision_ex1[1] / 2, precision_ex1[1] / 3, (precision_ex1[1] + precision_ex1[3]) / 4) for i in xrange(4): k = i + 1 self._test_streaming_sparse_precision_at_k( predictions, labels, k, expected=precision_ex1[i]) self._test_streaming_sparse_precision_at_top_k( (predictions_top_k_ex1[:k],), labels, expected=precision_ex1[i]) self._test_streaming_sparse_average_precision_at_k( predictions, labels, k, expected=avg_precision_ex1[i]) self._test_streaming_sparse_average_precision_at_top_k( (predictions_top_k_ex1[:k],), labels, expected=avg_precision_ex1[i]) def test_average_precision_at_top_k_static_shape_check(self): predictions_top_k = array_ops.placeholder(shape=(2, None), dtype=dtypes_lib.int64) labels = np.array(((1,), (2,)), dtype=np.int64) # Fails due to non-static predictions_idx shape. with self.assertRaises(ValueError): metric_ops.streaming_sparse_average_precision_at_top_k(predictions_top_k, labels) predictions_top_k = (2, 1) # Fails since rank of predictions_idx is less than one. with self.assertRaises(ValueError): metric_ops.streaming_sparse_average_precision_at_top_k(predictions_top_k, labels) predictions_top_k = ((2,), (1,)) # Valid static shape. metric_ops.streaming_sparse_average_precision_at_top_k(predictions_top_k, labels) def test_one_label_at_k1_nan(self): predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]] top_k_predictions = [[3], [3]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 0, 1], [0, 0, 1, 0]]) dense_labels = np.array([[3], [2]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Classes 0,1,2 have 0 predictions, classes -1 and 4 are out of range. for class_id in (-1, 0, 1, 2, 4): self._test_streaming_sparse_precision_at_k( predictions, labels, k=1, expected=NAN, class_id=class_id) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=NAN, class_id=class_id) def test_one_label_at_k1(self): predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]] top_k_predictions = [[3], [3]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 0, 1], [0, 0, 1, 0]]) dense_labels = np.array([[3], [2]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Class 3: 1 label, 2 predictions, 1 correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=1, expected=1.0 / 2, class_id=3) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=1.0 / 2, class_id=3) # All classes: 2 labels, 2 predictions, 1 correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=1, expected=1.0 / 2) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=1.0 / 2) def test_three_labels_at_k5_no_predictions(self): predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]] top_k_predictions = [ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]]) dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Classes 1,3,8 have 0 predictions, classes -1 and 10 are out of range. for class_id in (-1, 1, 3, 8, 10): self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=NAN, class_id=class_id) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=NAN, class_id=class_id) def test_three_labels_at_k5_no_labels(self): predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]] top_k_predictions = [ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]]) dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Classes 0,4,6,9: 0 labels, >=1 prediction. for class_id in (0, 4, 6, 9): self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=0.0, class_id=class_id) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=0.0, class_id=class_id) def test_three_labels_at_k5(self): predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]] top_k_predictions = [ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]]) dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Class 2: 2 labels, 2 correct predictions. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=2.0 / 2, class_id=2) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=2.0 / 2, class_id=2) # Class 5: 1 label, 1 correct prediction. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=1.0 / 1, class_id=5) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=1.0 / 1, class_id=5) # Class 7: 1 label, 1 incorrect prediction. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=0.0 / 1, class_id=7) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=0.0 / 1, class_id=7) # All classes: 10 predictions, 3 correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=3.0 / 10) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=3.0 / 10) def test_three_labels_at_k5_some_out_of_range(self): """Tests that labels outside the [0, n_classes) range are ignored.""" predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]] top_k_predictions = [ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ] sp_labels = sparse_tensor.SparseTensorValue( indices=[[0, 0], [0, 1], [0, 2], [0, 3], [1, 0], [1, 1], [1, 2], [1, 3]], # values -1 and 10 are outside the [0, n_classes) range and are ignored. values=np.array([2, 7, -1, 8, 1, 2, 5, 10], np.int64), dense_shape=[2, 4]) # Class 2: 2 labels, 2 correct predictions. self._test_streaming_sparse_precision_at_k( predictions, sp_labels, k=5, expected=2.0 / 2, class_id=2) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, sp_labels, expected=2.0 / 2, class_id=2) # Class 5: 1 label, 1 correct prediction. self._test_streaming_sparse_precision_at_k( predictions, sp_labels, k=5, expected=1.0 / 1, class_id=5) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, sp_labels, expected=1.0 / 1, class_id=5) # Class 7: 1 label, 1 incorrect prediction. self._test_streaming_sparse_precision_at_k( predictions, sp_labels, k=5, expected=0.0 / 1, class_id=7) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, sp_labels, expected=0.0 / 1, class_id=7) # All classes: 10 predictions, 3 correct. self._test_streaming_sparse_precision_at_k( predictions, sp_labels, k=5, expected=3.0 / 10) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, sp_labels, expected=3.0 / 10) def test_3d_nan(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] top_k_predictions = [[ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ], [ [5, 7, 2, 9, 6], [9, 4, 6, 2, 0], ]] labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]]) # Classes 1,3,8 have 0 predictions, classes -1 and 10 are out of range. for class_id in (-1, 1, 3, 8, 10): self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=NAN, class_id=class_id) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=NAN, class_id=class_id) def test_3d_no_labels(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] top_k_predictions = [[ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ], [ [5, 7, 2, 9, 6], [9, 4, 6, 2, 0], ]] labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]]) # Classes 0,4,6,9: 0 labels, >=1 prediction. for class_id in (0, 4, 6, 9): self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=0.0, class_id=class_id) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=0.0, class_id=class_id) def test_3d(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] top_k_predictions = [[ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ], [ [5, 7, 2, 9, 6], [9, 4, 6, 2, 0], ]] labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]]) # Class 2: 4 predictions, all correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=4.0 / 4, class_id=2) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=4.0 / 4, class_id=2) # Class 5: 2 predictions, both correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=2.0 / 2, class_id=5) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=2.0 / 2, class_id=5) # Class 7: 2 predictions, 1 correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=1.0 / 2, class_id=7) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=1.0 / 2, class_id=7) # All classes: 20 predictions, 7 correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=7.0 / 20) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=7.0 / 20) def test_3d_ignore_all(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] top_k_predictions = [[ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ], [ [5, 7, 2, 9, 6], [9, 4, 6, 2, 0], ]] labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]]) for class_id in xrange(10): self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=NAN, class_id=class_id, weights=[[0], [0]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=NAN, class_id=class_id, weights=[[0], [0]]) self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=NAN, class_id=class_id, weights=[[0, 0], [0, 0]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=NAN, class_id=class_id, weights=[[0, 0], [0, 0]]) self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=NAN, weights=[[0], [0]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=NAN, weights=[[0], [0]]) self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=NAN, weights=[[0, 0], [0, 0]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=NAN, weights=[[0, 0], [0, 0]]) def test_3d_ignore_some(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] top_k_predictions = [[ [9, 4, 6, 2, 0], [5, 7, 2, 9, 6], ], [ [5, 7, 2, 9, 6], [9, 4, 6, 2, 0], ]] labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]]) # Class 2: 2 predictions, both correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=2.0 / 2.0, class_id=2, weights=[[1], [0]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=2.0 / 2.0, class_id=2, weights=[[1], [0]]) # Class 2: 2 predictions, both correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=2.0 / 2.0, class_id=2, weights=[[0], [1]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=2.0 / 2.0, class_id=2, weights=[[0], [1]]) # Class 7: 1 incorrect prediction. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=0.0 / 1.0, class_id=7, weights=[[1], [0]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=0.0 / 1.0, class_id=7, weights=[[1], [0]]) # Class 7: 1 correct prediction. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=1.0 / 1.0, class_id=7, weights=[[0], [1]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=1.0 / 1.0, class_id=7, weights=[[0], [1]]) # Class 7: no predictions. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=NAN, class_id=7, weights=[[1, 0], [0, 1]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=NAN, class_id=7, weights=[[1, 0], [0, 1]]) # Class 7: 2 predictions, 1 correct. self._test_streaming_sparse_precision_at_k( predictions, labels, k=5, expected=1.0 / 2.0, class_id=7, weights=[[0, 1], [1, 0]]) self._test_streaming_sparse_precision_at_top_k( top_k_predictions, labels, expected=1.0 / 2.0, class_id=7, weights=[[0, 1], [1, 0]]) def test_sparse_tensor_value(self): predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]] labels = [[0, 0, 0, 1], [0, 0, 1, 0]] expected_precision = 0.5 with self.test_session(): _, precision = metrics.streaming_sparse_precision_at_k( predictions=constant_op.constant(predictions, dtypes_lib.float32), labels=_binary_2d_label_to_sparse_value(labels), k=1) variables.variables_initializer(variables.local_variables()).run() self.assertEqual(expected_precision, precision.eval()) class StreamingSparseRecallTest(test.TestCase): def _test_streaming_sparse_recall_at_k(self, predictions, labels, k, expected, class_id=None, weights=None): with ops.Graph().as_default() as g, self.test_session(g): if weights is not None: weights = constant_op.constant(weights, dtypes_lib.float32) metric, update = metrics.streaming_sparse_recall_at_k( predictions=constant_op.constant(predictions, dtypes_lib.float32), labels=labels, k=k, class_id=class_id, weights=weights) # Fails without initialized vars. self.assertRaises(errors_impl.OpError, metric.eval) self.assertRaises(errors_impl.OpError, update.eval) variables.variables_initializer(variables.local_variables()).run() # Run per-step op and assert expected values. if math.isnan(expected): _assert_nan(self, update.eval()) _assert_nan(self, metric.eval()) else: self.assertEqual(expected, update.eval()) self.assertEqual(expected, metric.eval()) def test_one_label_at_k1_nan(self): predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 0, 1], [0, 0, 1, 0]]) dense_labels = np.array([[3], [2]], dtype=np.int64) # Classes 0,1 have 0 labels, 0 predictions, classes -1 and 4 are out of # range. for labels in (sparse_labels, dense_labels): for class_id in (-1, 0, 1, 4): self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=NAN, class_id=class_id) def test_one_label_at_k1_no_predictions(self): predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 0, 1], [0, 0, 1, 0]]) dense_labels = np.array([[3], [2]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Class 2: 0 predictions. self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=0.0, class_id=2) def test_one_label_at_k1(self): predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 0, 1], [0, 0, 1, 0]]) dense_labels = np.array([[3], [2]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Class 3: 1 label, 2 predictions, 1 correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 1, class_id=3) # All classes: 2 labels, 2 predictions, 1 correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 2) def test_one_label_at_k1_weighted(self): predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 0, 1], [0, 0, 1, 0]]) dense_labels = np.array([[3], [2]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Class 3: 1 label, 2 predictions, 1 correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=NAN, class_id=3, weights=(0.0,)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 1, class_id=3, weights=(1.0,)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 1, class_id=3, weights=(2.0,)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=NAN, class_id=3, weights=(0.0, 0.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=NAN, class_id=3, weights=(0.0, 1.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 1, class_id=3, weights=(1.0, 0.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 1, class_id=3, weights=(1.0, 1.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=2.0 / 2, class_id=3, weights=(2.0, 3.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=3.0 / 3, class_id=3, weights=(3.0, 2.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=0.3 / 0.3, class_id=3, weights=(0.3, 0.6)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=0.6 / 0.6, class_id=3, weights=(0.6, 0.3)) # All classes: 2 labels, 2 predictions, 1 correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=NAN, weights=(0.0,)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 2, weights=(1.0,)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 2, weights=(2.0,)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 1, weights=(1.0, 0.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=0.0 / 1, weights=(0.0, 1.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=1.0 / 2, weights=(1.0, 1.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=2.0 / 5, weights=(2.0, 3.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=3.0 / 5, weights=(3.0, 2.0)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=0.3 / 0.9, weights=(0.3, 0.6)) self._test_streaming_sparse_recall_at_k( predictions, labels, k=1, expected=0.6 / 0.9, weights=(0.6, 0.3)) def test_three_labels_at_k5_nan(self): predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]]) dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Classes 0,3,4,6,9 have 0 labels, class 10 is out of range. for class_id in (0, 3, 4, 6, 9, 10): self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=NAN, class_id=class_id) def test_three_labels_at_k5_no_predictions(self): predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]]) dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Class 8: 1 label, no predictions. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=0.0 / 1, class_id=8) def test_three_labels_at_k5(self): predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]] sparse_labels = _binary_2d_label_to_sparse_value( [[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]]) dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Class 2: 2 labels, both correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=2.0 / 2, class_id=2) # Class 5: 1 label, incorrect. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=1.0 / 1, class_id=5) # Class 7: 1 label, incorrect. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=0.0 / 1, class_id=7) # All classes: 6 labels, 3 correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=3.0 / 6) def test_three_labels_at_k5_some_out_of_range(self): """Tests that labels outside the [0, n_classes) count in denominator.""" predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]] sp_labels = sparse_tensor.SparseTensorValue( indices=[[0, 0], [0, 1], [0, 2], [0, 3], [1, 0], [1, 1], [1, 2], [1, 3]], # values -1 and 10 are outside the [0, n_classes) range. values=np.array([2, 7, -1, 8, 1, 2, 5, 10], np.int64), dense_shape=[2, 4]) # Class 2: 2 labels, both correct. self._test_streaming_sparse_recall_at_k( predictions=predictions, labels=sp_labels, k=5, expected=2.0 / 2, class_id=2) # Class 5: 1 label, incorrect. self._test_streaming_sparse_recall_at_k( predictions=predictions, labels=sp_labels, k=5, expected=1.0 / 1, class_id=5) # Class 7: 1 label, incorrect. self._test_streaming_sparse_recall_at_k( predictions=predictions, labels=sp_labels, k=5, expected=0.0 / 1, class_id=7) # All classes: 8 labels, 3 correct. self._test_streaming_sparse_recall_at_k( predictions=predictions, labels=sp_labels, k=5, expected=3.0 / 8) def test_3d_nan(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] sparse_labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 1, 1, 0]]]) dense_labels = np.array( [[[2, 7, 8], [1, 2, 5]], [ [1, 2, 5], [2, 7, 8], ]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Classes 0,3,4,6,9 have 0 labels, class 10 is out of range. for class_id in (0, 3, 4, 6, 9, 10): self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=NAN, class_id=class_id) def test_3d_no_predictions(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] sparse_labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 1, 1, 0]]]) dense_labels = np.array( [[[2, 7, 8], [1, 2, 5]], [ [1, 2, 5], [2, 7, 8], ]], dtype=np.int64) for labels in (sparse_labels, dense_labels): # Classes 1,8 have 0 predictions, >=1 label. for class_id in (1, 8): self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=0.0, class_id=class_id) def test_3d(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]]) # Class 2: 4 labels, all correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=4.0 / 4, class_id=2) # Class 5: 2 labels, both correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=2.0 / 2, class_id=5) # Class 7: 2 labels, 1 incorrect. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=1.0 / 2, class_id=7) # All classes: 12 labels, 7 correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=7.0 / 12) def test_3d_ignore_all(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]]) for class_id in xrange(10): self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=NAN, class_id=class_id, weights=[[0], [0]]) self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=NAN, class_id=class_id, weights=[[0, 0], [0, 0]]) self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=NAN, weights=[[0], [0]]) self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=NAN, weights=[[0, 0], [0, 0]]) def test_3d_ignore_some(self): predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9], [0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]], [[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6], [0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]] labels = _binary_3d_label_to_sparse_value( [[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]], [[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]]) # Class 2: 2 labels, both correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=2.0 / 2.0, class_id=2, weights=[[1], [0]]) # Class 2: 2 labels, both correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=2.0 / 2.0, class_id=2, weights=[[0], [1]]) # Class 7: 1 label, correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=1.0 / 1.0, class_id=7, weights=[[0], [1]]) # Class 7: 1 label, incorrect. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=0.0 / 1.0, class_id=7, weights=[[1], [0]]) # Class 7: 2 labels, 1 correct. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=1.0 / 2.0, class_id=7, weights=[[1, 0], [1, 0]]) # Class 7: No labels. self._test_streaming_sparse_recall_at_k( predictions, labels, k=5, expected=NAN, class_id=7, weights=[[0, 1], [0, 1]]) def test_sparse_tensor_value(self): predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]] labels = [[0, 0, 1, 0], [0, 0, 0, 1]] expected_recall = 0.5 with self.test_session(): _, recall = metrics.streaming_sparse_recall_at_k( predictions=constant_op.constant(predictions, dtypes_lib.float32), labels=_binary_2d_label_to_sparse_value(labels), k=1) variables.variables_initializer(variables.local_variables()).run() self.assertEqual(expected_recall, recall.eval()) class StreamingMeanAbsoluteErrorTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_mean_absolute_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1))) _assert_local_variables(self, ('mean_absolute_error/count:0', 'mean_absolute_error/total:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_mean_absolute_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_mean_absolute_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_normal((10, 3), seed=1) labels = random_ops.random_normal((10, 3), seed=2) error, update_op = metrics.streaming_mean_absolute_error(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_error = error.eval() for _ in range(10): self.assertEqual(initial_error, error.eval()) def testSingleUpdateWithErrorAndWeights(self): predictions = constant_op.constant( [2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32) weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4)) error, update_op = metrics.streaming_mean_absolute_error(predictions, labels, weights) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(3, sess.run(update_op)) self.assertEqual(3, error.eval()) class StreamingMeanRelativeErrorTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_mean_relative_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), normalizer=array_ops.ones((10, 1))) _assert_local_variables(self, ('mean_relative_error/count:0', 'mean_relative_error/total:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_mean_relative_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), normalizer=array_ops.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_mean_relative_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), normalizer=array_ops.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_normal((10, 3), seed=1) labels = random_ops.random_normal((10, 3), seed=2) normalizer = random_ops.random_normal((10, 3), seed=3) error, update_op = metrics.streaming_mean_relative_error(predictions, labels, normalizer) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_error = error.eval() for _ in range(10): self.assertEqual(initial_error, error.eval()) def testSingleUpdateNormalizedByLabels(self): np_predictions = np.asarray([2, 4, 6, 8], dtype=np.float32) np_labels = np.asarray([1, 3, 2, 3], dtype=np.float32) expected_error = np.mean( np.divide(np.absolute(np_predictions - np_labels), np_labels)) predictions = constant_op.constant( np_predictions, shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant(np_labels, shape=(1, 4)) error, update_op = metrics.streaming_mean_relative_error( predictions, labels, normalizer=labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(expected_error, sess.run(update_op)) self.assertEqual(expected_error, error.eval()) def testSingleUpdateNormalizedByZeros(self): np_predictions = np.asarray([2, 4, 6, 8], dtype=np.float32) predictions = constant_op.constant( np_predictions, shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32) error, update_op = metrics.streaming_mean_relative_error( predictions, labels, normalizer=array_ops.zeros_like(labels)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0.0, sess.run(update_op)) self.assertEqual(0.0, error.eval()) class StreamingMeanSquaredErrorTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_mean_squared_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1))) _assert_local_variables(self, ('mean_squared_error/count:0', 'mean_squared_error/total:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_mean_squared_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_mean_squared_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_normal((10, 3), seed=1) labels = random_ops.random_normal((10, 3), seed=2) error, update_op = metrics.streaming_mean_squared_error(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_error = error.eval() for _ in range(10): self.assertEqual(initial_error, error.eval()) def testSingleUpdateZeroError(self): predictions = array_ops.zeros((1, 3), dtype=dtypes_lib.float32) labels = array_ops.zeros((1, 3), dtype=dtypes_lib.float32) error, update_op = metrics.streaming_mean_squared_error(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, sess.run(update_op)) self.assertEqual(0, error.eval()) def testSingleUpdateWithError(self): predictions = constant_op.constant( [2, 4, 6], shape=(1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2], shape=(1, 3), dtype=dtypes_lib.float32) error, update_op = metrics.streaming_mean_squared_error(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(6, sess.run(update_op)) self.assertEqual(6, error.eval()) def testSingleUpdateWithErrorAndWeights(self): predictions = constant_op.constant( [2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32) weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4)) error, update_op = metrics.streaming_mean_squared_error(predictions, labels, weights) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(13, sess.run(update_op)) self.assertEqual(13, error.eval()) def testMultipleBatchesOfSizeOne(self): with self.test_session() as sess: # Create the queue that populates the predictions. preds_queue = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(1, 3)) _enqueue_vector(sess, preds_queue, [10, 8, 6]) _enqueue_vector(sess, preds_queue, [-4, 3, -1]) predictions = preds_queue.dequeue() # Create the queue that populates the labels. labels_queue = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(1, 3)) _enqueue_vector(sess, labels_queue, [1, 3, 2]) _enqueue_vector(sess, labels_queue, [2, 4, 6]) labels = labels_queue.dequeue() error, update_op = metrics.streaming_mean_squared_error(predictions, labels) sess.run(variables.local_variables_initializer()) sess.run(update_op) self.assertAlmostEqual(208.0 / 6, sess.run(update_op), 5) self.assertAlmostEqual(208.0 / 6, error.eval(), 5) def testMetricsComputedConcurrently(self): with self.test_session() as sess: # Create the queue that populates one set of predictions. preds_queue0 = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(1, 3)) _enqueue_vector(sess, preds_queue0, [10, 8, 6]) _enqueue_vector(sess, preds_queue0, [-4, 3, -1]) predictions0 = preds_queue0.dequeue() # Create the queue that populates one set of predictions. preds_queue1 = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(1, 3)) _enqueue_vector(sess, preds_queue1, [0, 1, 1]) _enqueue_vector(sess, preds_queue1, [1, 1, 0]) predictions1 = preds_queue1.dequeue() # Create the queue that populates one set of labels. labels_queue0 = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(1, 3)) _enqueue_vector(sess, labels_queue0, [1, 3, 2]) _enqueue_vector(sess, labels_queue0, [2, 4, 6]) labels0 = labels_queue0.dequeue() # Create the queue that populates another set of labels. labels_queue1 = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(1, 3)) _enqueue_vector(sess, labels_queue1, [-5, -3, -1]) _enqueue_vector(sess, labels_queue1, [5, 4, 3]) labels1 = labels_queue1.dequeue() mse0, update_op0 = metrics.streaming_mean_squared_error( predictions0, labels0, name='msd0') mse1, update_op1 = metrics.streaming_mean_squared_error( predictions1, labels1, name='msd1') sess.run(variables.local_variables_initializer()) sess.run([update_op0, update_op1]) sess.run([update_op0, update_op1]) mse0, mse1 = sess.run([mse0, mse1]) self.assertAlmostEqual(208.0 / 6, mse0, 5) self.assertAlmostEqual(79.0 / 6, mse1, 5) def testMultipleMetricsOnMultipleBatchesOfSizeOne(self): with self.test_session() as sess: # Create the queue that populates the predictions. preds_queue = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(1, 3)) _enqueue_vector(sess, preds_queue, [10, 8, 6]) _enqueue_vector(sess, preds_queue, [-4, 3, -1]) predictions = preds_queue.dequeue() # Create the queue that populates the labels. labels_queue = data_flow_ops.FIFOQueue( 2, dtypes=dtypes_lib.float32, shapes=(1, 3)) _enqueue_vector(sess, labels_queue, [1, 3, 2]) _enqueue_vector(sess, labels_queue, [2, 4, 6]) labels = labels_queue.dequeue() mae, ma_update_op = metrics.streaming_mean_absolute_error(predictions, labels) mse, ms_update_op = metrics.streaming_mean_squared_error(predictions, labels) sess.run(variables.local_variables_initializer()) sess.run([ma_update_op, ms_update_op]) sess.run([ma_update_op, ms_update_op]) self.assertAlmostEqual(32.0 / 6, mae.eval(), 5) self.assertAlmostEqual(208.0 / 6, mse.eval(), 5) class StreamingRootMeanSquaredErrorTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_root_mean_squared_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1))) _assert_local_variables(self, ('root_mean_squared_error/count:0', 'root_mean_squared_error/total:0')) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_root_mean_squared_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_root_mean_squared_error( predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_normal((10, 3), seed=1) labels = random_ops.random_normal((10, 3), seed=2) error, update_op = metrics.streaming_root_mean_squared_error(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_error = error.eval() for _ in range(10): self.assertEqual(initial_error, error.eval()) def testSingleUpdateZeroError(self): with self.test_session() as sess: predictions = constant_op.constant( 0.0, shape=(1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant(0.0, shape=(1, 3), dtype=dtypes_lib.float32) rmse, update_op = metrics.streaming_root_mean_squared_error(predictions, labels) sess.run(variables.local_variables_initializer()) self.assertEqual(0, sess.run(update_op)) self.assertEqual(0, rmse.eval()) def testSingleUpdateWithError(self): with self.test_session() as sess: predictions = constant_op.constant( [2, 4, 6], shape=(1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2], shape=(1, 3), dtype=dtypes_lib.float32) rmse, update_op = metrics.streaming_root_mean_squared_error(predictions, labels) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(math.sqrt(6), update_op.eval(), 5) self.assertAlmostEqual(math.sqrt(6), rmse.eval(), 5) def testSingleUpdateWithErrorAndWeights(self): with self.test_session() as sess: predictions = constant_op.constant( [2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32) weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4)) rmse, update_op = metrics.streaming_root_mean_squared_error(predictions, labels, weights) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(math.sqrt(13), sess.run(update_op)) self.assertAlmostEqual(math.sqrt(13), rmse.eval(), 5) def _reweight(predictions, labels, weights): return (np.concatenate([[p] * int(w) for p, w in zip(predictions, weights)]), np.concatenate([[l] * int(w) for l, w in zip(labels, weights)])) class StreamingCovarianceTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_covariance( predictions=math_ops.to_float(math_ops.range(10)) + array_ops.ones( [10, 10]), labels=math_ops.to_float(math_ops.range(10)) + array_ops.ones([10, 10])) _assert_local_variables(self, ( 'covariance/comoment:0', 'covariance/count:0', 'covariance/mean_label:0', 'covariance/mean_prediction:0',)) def testMetricsCollection(self): my_collection_name = '__metrics__' cov, _ = metrics.streaming_covariance( predictions=math_ops.to_float(math_ops.range(10)) + array_ops.ones( [10, 10]), labels=math_ops.to_float(math_ops.range(10)) + array_ops.ones([10, 10]), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [cov]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_covariance( predictions=math_ops.to_float(math_ops.range(10)) + array_ops.ones( [10, 10]), labels=math_ops.to_float(math_ops.range(10)) + array_ops.ones([10, 10]), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): labels = random_ops.random_normal((10, 3), seed=2) predictions = labels * 0.5 + random_ops.random_normal((10, 3), seed=1) * 0.5 cov, update_op = metrics.streaming_covariance(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_cov = cov.eval() for _ in range(10): self.assertEqual(initial_cov, cov.eval()) def testSingleUpdateIdentical(self): with self.test_session() as sess: predictions = math_ops.to_float(math_ops.range(10)) labels = math_ops.to_float(math_ops.range(10)) cov, update_op = metrics.streaming_covariance(predictions, labels) expected_cov = np.cov(np.arange(10), np.arange(10))[0, 1] sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(expected_cov, sess.run(update_op), 5) self.assertAlmostEqual(expected_cov, cov.eval(), 5) def testSingleUpdateNonIdentical(self): with self.test_session() as sess: predictions = constant_op.constant( [2, 4, 6], shape=(1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2], shape=(1, 3), dtype=dtypes_lib.float32) cov, update_op = metrics.streaming_covariance(predictions, labels) expected_cov = np.cov([2, 4, 6], [1, 3, 2])[0, 1] sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(expected_cov, update_op.eval()) self.assertAlmostEqual(expected_cov, cov.eval()) def testSingleUpdateWithErrorAndWeights(self): with self.test_session() as sess: predictions = constant_op.constant( [2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2, 7], shape=(1, 4), dtype=dtypes_lib.float32) weights = constant_op.constant( [0, 1, 3, 1], shape=(1, 4), dtype=dtypes_lib.float32) cov, update_op = metrics.streaming_covariance( predictions, labels, weights=weights) p, l = _reweight([2, 4, 6, 8], [1, 3, 2, 7], [0, 1, 3, 1]) expected_cov = np.cov(p, l)[0, 1] sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(expected_cov, sess.run(update_op)) self.assertAlmostEqual(expected_cov, cov.eval()) def testMultiUpdateWithErrorNoWeights(self): with self.test_session() as sess: np.random.seed(123) n = 100 predictions = np.random.randn(n) labels = 0.5 * predictions + np.random.randn(n) stride = 10 predictions_t = array_ops.placeholder(dtypes_lib.float32, [stride]) labels_t = array_ops.placeholder(dtypes_lib.float32, [stride]) cov, update_op = metrics.streaming_covariance(predictions_t, labels_t) sess.run(variables.local_variables_initializer()) prev_expected_cov = 0. for i in range(n // stride): feed_dict = { predictions_t: predictions[stride * i:stride * (i + 1)], labels_t: labels[stride * i:stride * (i + 1)] } self.assertAlmostEqual( prev_expected_cov, sess.run(cov, feed_dict=feed_dict), 5) expected_cov = np.cov(predictions[:stride * (i + 1)], labels[:stride * (i + 1)])[0, 1] self.assertAlmostEqual( expected_cov, sess.run(update_op, feed_dict=feed_dict), 5) self.assertAlmostEqual( expected_cov, sess.run(cov, feed_dict=feed_dict), 5) prev_expected_cov = expected_cov def testMultiUpdateWithErrorAndWeights(self): with self.test_session() as sess: np.random.seed(123) n = 100 predictions = np.random.randn(n) labels = 0.5 * predictions + np.random.randn(n) weights = np.tile(np.arange(n // 10), n // 10) np.random.shuffle(weights) stride = 10 predictions_t = array_ops.placeholder(dtypes_lib.float32, [stride]) labels_t = array_ops.placeholder(dtypes_lib.float32, [stride]) weights_t = array_ops.placeholder(dtypes_lib.float32, [stride]) cov, update_op = metrics.streaming_covariance( predictions_t, labels_t, weights=weights_t) sess.run(variables.local_variables_initializer()) prev_expected_cov = 0. for i in range(n // stride): feed_dict = { predictions_t: predictions[stride * i:stride * (i + 1)], labels_t: labels[stride * i:stride * (i + 1)], weights_t: weights[stride * i:stride * (i + 1)] } self.assertAlmostEqual( prev_expected_cov, sess.run(cov, feed_dict=feed_dict), 5) p, l = _reweight(predictions[:stride * (i + 1)], labels[:stride * (i + 1)], weights[:stride * (i + 1)]) expected_cov = np.cov(p, l)[0, 1] self.assertAlmostEqual( expected_cov, sess.run(update_op, feed_dict=feed_dict), 5) self.assertAlmostEqual( expected_cov, sess.run(cov, feed_dict=feed_dict), 5) prev_expected_cov = expected_cov class StreamingPearsonRTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_pearson_correlation( predictions=math_ops.to_float(math_ops.range(10)) + array_ops.ones( [10, 10]), labels=math_ops.to_float(math_ops.range(10)) + array_ops.ones([10, 10])) _assert_local_variables(self, ( 'pearson_r/covariance/comoment:0', 'pearson_r/covariance/count:0', 'pearson_r/covariance/mean_label:0', 'pearson_r/covariance/mean_prediction:0', 'pearson_r/variance_labels/count:0', 'pearson_r/variance_labels/comoment:0', 'pearson_r/variance_labels/mean_label:0', 'pearson_r/variance_labels/mean_prediction:0', 'pearson_r/variance_predictions/comoment:0', 'pearson_r/variance_predictions/count:0', 'pearson_r/variance_predictions/mean_label:0', 'pearson_r/variance_predictions/mean_prediction:0',)) def testMetricsCollection(self): my_collection_name = '__metrics__' pearson_r, _ = metrics.streaming_pearson_correlation( predictions=math_ops.to_float(math_ops.range(10)) + array_ops.ones( [10, 10]), labels=math_ops.to_float(math_ops.range(10)) + array_ops.ones([10, 10]), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [pearson_r]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_pearson_correlation( predictions=math_ops.to_float(math_ops.range(10)) + array_ops.ones( [10, 10]), labels=math_ops.to_float(math_ops.range(10)) + array_ops.ones([10, 10]), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): labels = random_ops.random_normal((10, 3), seed=2) predictions = labels * 0.5 + random_ops.random_normal((10, 3), seed=1) * 0.5 pearson_r, update_op = metrics.streaming_pearson_correlation(predictions, labels) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_r = pearson_r.eval() for _ in range(10): self.assertEqual(initial_r, pearson_r.eval()) def testSingleUpdateIdentical(self): with self.test_session() as sess: predictions = math_ops.to_float(math_ops.range(10)) labels = math_ops.to_float(math_ops.range(10)) pearson_r, update_op = metrics.streaming_pearson_correlation(predictions, labels) expected_r = np.corrcoef(np.arange(10), np.arange(10))[0, 1] sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(expected_r, sess.run(update_op), 5) self.assertAlmostEqual(expected_r, pearson_r.eval(), 5) def testSingleUpdateNonIdentical(self): with self.test_session() as sess: predictions = constant_op.constant( [2, 4, 6], shape=(1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( [1, 3, 2], shape=(1, 3), dtype=dtypes_lib.float32) pearson_r, update_op = metrics.streaming_pearson_correlation(predictions, labels) expected_r = np.corrcoef([2, 4, 6], [1, 3, 2])[0, 1] sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(expected_r, update_op.eval()) self.assertAlmostEqual(expected_r, pearson_r.eval()) def testSingleUpdateWithErrorAndWeights(self): with self.test_session() as sess: predictions = np.array([2, 4, 6, 8]) labels = np.array([1, 3, 2, 7]) weights = np.array([0, 1, 3, 1]) predictions_t = constant_op.constant( predictions, shape=(1, 4), dtype=dtypes_lib.float32) labels_t = constant_op.constant( labels, shape=(1, 4), dtype=dtypes_lib.float32) weights_t = constant_op.constant( weights, shape=(1, 4), dtype=dtypes_lib.float32) pearson_r, update_op = metrics.streaming_pearson_correlation( predictions_t, labels_t, weights=weights_t) p, l = _reweight(predictions, labels, weights) cmat = np.cov(p, l) expected_r = cmat[0, 1] / np.sqrt(cmat[0, 0] * cmat[1, 1]) sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(expected_r, sess.run(update_op)) self.assertAlmostEqual(expected_r, pearson_r.eval()) def testMultiUpdateWithErrorNoWeights(self): with self.test_session() as sess: np.random.seed(123) n = 100 predictions = np.random.randn(n) labels = 0.5 * predictions + np.random.randn(n) stride = 10 predictions_t = array_ops.placeholder(dtypes_lib.float32, [stride]) labels_t = array_ops.placeholder(dtypes_lib.float32, [stride]) pearson_r, update_op = metrics.streaming_pearson_correlation( predictions_t, labels_t) sess.run(variables.local_variables_initializer()) prev_expected_r = 0. for i in range(n // stride): feed_dict = { predictions_t: predictions[stride * i:stride * (i + 1)], labels_t: labels[stride * i:stride * (i + 1)] } self.assertAlmostEqual( prev_expected_r, sess.run(pearson_r, feed_dict=feed_dict), 5) expected_r = np.corrcoef(predictions[:stride * (i + 1)], labels[:stride * (i + 1)])[0, 1] self.assertAlmostEqual( expected_r, sess.run(update_op, feed_dict=feed_dict), 5) self.assertAlmostEqual( expected_r, sess.run(pearson_r, feed_dict=feed_dict), 5) prev_expected_r = expected_r def testMultiUpdateWithErrorAndWeights(self): with self.test_session() as sess: np.random.seed(123) n = 100 predictions = np.random.randn(n) labels = 0.5 * predictions + np.random.randn(n) weights = np.tile(np.arange(n // 10), n // 10) np.random.shuffle(weights) stride = 10 predictions_t = array_ops.placeholder(dtypes_lib.float32, [stride]) labels_t = array_ops.placeholder(dtypes_lib.float32, [stride]) weights_t = array_ops.placeholder(dtypes_lib.float32, [stride]) pearson_r, update_op = metrics.streaming_pearson_correlation( predictions_t, labels_t, weights=weights_t) sess.run(variables.local_variables_initializer()) prev_expected_r = 0. for i in range(n // stride): feed_dict = { predictions_t: predictions[stride * i:stride * (i + 1)], labels_t: labels[stride * i:stride * (i + 1)], weights_t: weights[stride * i:stride * (i + 1)] } self.assertAlmostEqual( prev_expected_r, sess.run(pearson_r, feed_dict=feed_dict), 5) p, l = _reweight(predictions[:stride * (i + 1)], labels[:stride * (i + 1)], weights[:stride * (i + 1)]) cmat = np.cov(p, l) expected_r = cmat[0, 1] / np.sqrt(cmat[0, 0] * cmat[1, 1]) self.assertAlmostEqual( expected_r, sess.run(update_op, feed_dict=feed_dict), 5) self.assertAlmostEqual( expected_r, sess.run(pearson_r, feed_dict=feed_dict), 5) prev_expected_r = expected_r class StreamingMeanCosineDistanceTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_mean_cosine_distance( predictions=array_ops.ones((10, 3)), labels=array_ops.ones((10, 3)), dim=1) _assert_local_variables(self, ( 'mean_cosine_distance/count:0', 'mean_cosine_distance/total:0',)) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_mean_cosine_distance( predictions=array_ops.ones((10, 3)), labels=array_ops.ones((10, 3)), dim=1, metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_mean_cosine_distance( predictions=array_ops.ones((10, 3)), labels=array_ops.ones((10, 3)), dim=1, updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = random_ops.random_normal((10, 3), seed=1) labels = random_ops.random_normal((10, 3), seed=2) error, update_op = metrics.streaming_mean_cosine_distance( predictions, labels, dim=1) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_error = error.eval() for _ in range(10): self.assertEqual(initial_error, error.eval()) def testSingleUpdateZeroError(self): np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0')) predictions = constant_op.constant( np_labels, shape=(1, 3, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( np_labels, shape=(1, 3, 3), dtype=dtypes_lib.float32) error, update_op = metrics.streaming_mean_cosine_distance( predictions, labels, dim=2) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, sess.run(update_op)) self.assertEqual(0, error.eval()) def testSingleUpdateWithError1(self): np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0')) np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0')) predictions = constant_op.constant( np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32) error, update_op = metrics.streaming_mean_cosine_distance( predictions, labels, dim=2) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(1, sess.run(update_op), 5) self.assertAlmostEqual(1, error.eval(), 5) def testSingleUpdateWithError2(self): np_predictions = np.matrix( ('0.819031913261206 0.567041924552012 0.087465312324590;' '-0.665139432070255 -0.739487441769973 -0.103671883216994;' '0.707106781186548 -0.707106781186548 0')) np_labels = np.matrix( ('0.819031913261206 0.567041924552012 0.087465312324590;' '0.665139432070255 0.739487441769973 0.103671883216994;' '0.707106781186548 0.707106781186548 0')) predictions = constant_op.constant( np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32) error, update_op = metrics.streaming_mean_cosine_distance( predictions, labels, dim=2) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertAlmostEqual(1.0, sess.run(update_op), 5) self.assertAlmostEqual(1.0, error.eval(), 5) def testSingleUpdateWithErrorAndWeights1(self): np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0')) np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0')) predictions = constant_op.constant( np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32) weights = constant_op.constant( [1, 0, 0], shape=(3, 1, 1), dtype=dtypes_lib.float32) error, update_op = metrics.streaming_mean_cosine_distance( predictions, labels, dim=2, weights=weights) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(0, sess.run(update_op)) self.assertEqual(0, error.eval()) def testSingleUpdateWithErrorAndWeights2(self): np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0')) np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0')) predictions = constant_op.constant( np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32) labels = constant_op.constant( np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32) weights = constant_op.constant( [0, 1, 1], shape=(3, 1, 1), dtype=dtypes_lib.float32) error, update_op = metrics.streaming_mean_cosine_distance( predictions, labels, dim=2, weights=weights) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(1.5, update_op.eval()) self.assertEqual(1.5, error.eval()) class PcntBelowThreshTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_percentage_less(values=array_ops.ones((10,)), threshold=2) _assert_local_variables(self, ( 'percentage_below_threshold/count:0', 'percentage_below_threshold/total:0',)) def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_percentage_less( values=array_ops.ones((10,)), threshold=2, metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_percentage_less( values=array_ops.ones((10,)), threshold=2, updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testOneUpdate(self): with self.test_session() as sess: values = constant_op.constant( [2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32) pcnt0, update_op0 = metrics.streaming_percentage_less( values, 100, name='high') pcnt1, update_op1 = metrics.streaming_percentage_less( values, 7, name='medium') pcnt2, update_op2 = metrics.streaming_percentage_less( values, 1, name='low') sess.run(variables.local_variables_initializer()) sess.run([update_op0, update_op1, update_op2]) pcnt0, pcnt1, pcnt2 = sess.run([pcnt0, pcnt1, pcnt2]) self.assertAlmostEqual(1.0, pcnt0, 5) self.assertAlmostEqual(0.75, pcnt1, 5) self.assertAlmostEqual(0.0, pcnt2, 5) def testSomePresentOneUpdate(self): with self.test_session() as sess: values = constant_op.constant( [2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32) weights = constant_op.constant( [1, 0, 0, 1], shape=(1, 4), dtype=dtypes_lib.float32) pcnt0, update_op0 = metrics.streaming_percentage_less( values, 100, weights=weights, name='high') pcnt1, update_op1 = metrics.streaming_percentage_less( values, 7, weights=weights, name='medium') pcnt2, update_op2 = metrics.streaming_percentage_less( values, 1, weights=weights, name='low') sess.run(variables.local_variables_initializer()) self.assertListEqual([1.0, 0.5, 0.0], sess.run([update_op0, update_op1, update_op2])) pcnt0, pcnt1, pcnt2 = sess.run([pcnt0, pcnt1, pcnt2]) self.assertAlmostEqual(1.0, pcnt0, 5) self.assertAlmostEqual(0.5, pcnt1, 5) self.assertAlmostEqual(0.0, pcnt2, 5) class StreamingMeanIOUTest(test.TestCase): def setUp(self): np.random.seed(1) ops.reset_default_graph() def testVars(self): metrics.streaming_mean_iou( predictions=array_ops.ones([10, 1]), labels=array_ops.ones([10, 1]), num_classes=2) _assert_local_variables(self, ('mean_iou/total_confusion_matrix:0',)) def testMetricsCollections(self): my_collection_name = '__metrics__' mean_iou, _ = metrics.streaming_mean_iou( predictions=array_ops.ones([10, 1]), labels=array_ops.ones([10, 1]), num_classes=2, metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [mean_iou]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_mean_iou( predictions=array_ops.ones([10, 1]), labels=array_ops.ones([10, 1]), num_classes=2, updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self): predictions = array_ops.ones([10, 3]) labels = array_ops.ones([10, 4]) with self.assertRaises(ValueError): metrics.streaming_mean_iou(predictions, labels, num_classes=2) def testLabelsAndWeightsOfDifferentSizeRaisesValueError(self): predictions = array_ops.ones([10]) labels = array_ops.ones([10]) weights = array_ops.zeros([9]) with self.assertRaises(ValueError): metrics.streaming_mean_iou( predictions, labels, num_classes=2, weights=weights) def testValueTensorIsIdempotent(self): num_classes = 3 predictions = random_ops.random_uniform( [10], maxval=num_classes, dtype=dtypes_lib.int64, seed=1) labels = random_ops.random_uniform( [10], maxval=num_classes, dtype=dtypes_lib.int64, seed=2) miou, update_op = metrics.streaming_mean_iou( predictions, labels, num_classes=num_classes) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_miou = miou.eval() for _ in range(10): self.assertEqual(initial_miou, miou.eval()) def testMultipleUpdates(self): num_classes = 3 with self.test_session() as sess: # Create the queue that populates the predictions. preds_queue = data_flow_ops.FIFOQueue( 5, dtypes=dtypes_lib.int32, shapes=(1, 1)) _enqueue_vector(sess, preds_queue, [0]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [2]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [0]) predictions = preds_queue.dequeue() # Create the queue that populates the labels. labels_queue = data_flow_ops.FIFOQueue( 5, dtypes=dtypes_lib.int32, shapes=(1, 1)) _enqueue_vector(sess, labels_queue, [0]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [2]) _enqueue_vector(sess, labels_queue, [1]) labels = labels_queue.dequeue() miou, update_op = metrics.streaming_mean_iou(predictions, labels, num_classes) sess.run(variables.local_variables_initializer()) for _ in range(5): sess.run(update_op) desired_output = np.mean([1.0 / 2.0, 1.0 / 4.0, 0.]) self.assertEqual(desired_output, miou.eval()) def testMultipleUpdatesWithWeights(self): num_classes = 2 with self.test_session() as sess: # Create the queue that populates the predictions. preds_queue = data_flow_ops.FIFOQueue( 6, dtypes=dtypes_lib.int32, shapes=(1, 1)) _enqueue_vector(sess, preds_queue, [0]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [0]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [0]) _enqueue_vector(sess, preds_queue, [1]) predictions = preds_queue.dequeue() # Create the queue that populates the labels. labels_queue = data_flow_ops.FIFOQueue( 6, dtypes=dtypes_lib.int32, shapes=(1, 1)) _enqueue_vector(sess, labels_queue, [0]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [0]) _enqueue_vector(sess, labels_queue, [0]) _enqueue_vector(sess, labels_queue, [1]) labels = labels_queue.dequeue() # Create the queue that populates the weights. weights_queue = data_flow_ops.FIFOQueue( 6, dtypes=dtypes_lib.float32, shapes=(1, 1)) _enqueue_vector(sess, weights_queue, [1.0]) _enqueue_vector(sess, weights_queue, [1.0]) _enqueue_vector(sess, weights_queue, [1.0]) _enqueue_vector(sess, weights_queue, [0.0]) _enqueue_vector(sess, weights_queue, [1.0]) _enqueue_vector(sess, weights_queue, [0.0]) weights = weights_queue.dequeue() miou, update_op = metrics.streaming_mean_iou( predictions, labels, num_classes, weights=weights) sess.run(variables.local_variables_initializer()) for _ in range(6): sess.run(update_op) desired_output = np.mean([2.0 / 3.0, 1.0 / 2.0]) self.assertAlmostEqual(desired_output, miou.eval()) def testMultipleUpdatesWithMissingClass(self): # Test the case where there are no predicions and labels for # one class, and thus there is one row and one column with # zero entries in the confusion matrix. num_classes = 3 with self.test_session() as sess: # Create the queue that populates the predictions. # There is no prediction for class 2. preds_queue = data_flow_ops.FIFOQueue( 5, dtypes=dtypes_lib.int32, shapes=(1, 1)) _enqueue_vector(sess, preds_queue, [0]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [1]) _enqueue_vector(sess, preds_queue, [0]) predictions = preds_queue.dequeue() # Create the queue that populates the labels. # There is label for class 2. labels_queue = data_flow_ops.FIFOQueue( 5, dtypes=dtypes_lib.int32, shapes=(1, 1)) _enqueue_vector(sess, labels_queue, [0]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [1]) _enqueue_vector(sess, labels_queue, [0]) _enqueue_vector(sess, labels_queue, [1]) labels = labels_queue.dequeue() miou, update_op = metrics.streaming_mean_iou(predictions, labels, num_classes) sess.run(variables.local_variables_initializer()) for _ in range(5): sess.run(update_op) desired_output = np.mean([1.0 / 3.0, 2.0 / 4.0, 0.]) self.assertAlmostEqual(desired_output, miou.eval()) def testUpdateOpEvalIsAccumulatedConfusionMatrix(self): predictions = array_ops.concat( [ constant_op.constant( 0, shape=[5]), constant_op.constant( 1, shape=[5]) ], 0) labels = array_ops.concat( [ constant_op.constant( 0, shape=[3]), constant_op.constant( 1, shape=[7]) ], 0) num_classes = 2 with self.test_session() as sess: miou, update_op = metrics.streaming_mean_iou(predictions, labels, num_classes) sess.run(variables.local_variables_initializer()) confusion_matrix = update_op.eval() self.assertAllEqual([[3, 0], [2, 5]], confusion_matrix) desired_miou = np.mean([3. / 5., 5. / 7.]) self.assertAlmostEqual(desired_miou, miou.eval()) def testAllCorrect(self): predictions = array_ops.zeros([40]) labels = array_ops.zeros([40]) num_classes = 1 with self.test_session() as sess: miou, update_op = metrics.streaming_mean_iou(predictions, labels, num_classes) sess.run(variables.local_variables_initializer()) self.assertEqual(40, update_op.eval()[0]) self.assertEqual(1.0, miou.eval()) def testAllWrong(self): predictions = array_ops.zeros([40]) labels = array_ops.ones([40]) num_classes = 2 with self.test_session() as sess: miou, update_op = metrics.streaming_mean_iou(predictions, labels, num_classes) sess.run(variables.local_variables_initializer()) self.assertAllEqual([[0, 0], [40, 0]], update_op.eval()) self.assertEqual(0., miou.eval()) def testResultsWithSomeMissing(self): predictions = array_ops.concat( [ constant_op.constant( 0, shape=[5]), constant_op.constant( 1, shape=[5]) ], 0) labels = array_ops.concat( [ constant_op.constant( 0, shape=[3]), constant_op.constant( 1, shape=[7]) ], 0) num_classes = 2 weights = array_ops.concat( [ constant_op.constant( 0, shape=[1]), constant_op.constant( 1, shape=[8]), constant_op.constant( 0, shape=[1]) ], 0) with self.test_session() as sess: miou, update_op = metrics.streaming_mean_iou( predictions, labels, num_classes, weights=weights) sess.run(variables.local_variables_initializer()) self.assertAllEqual([[2, 0], [2, 4]], update_op.eval()) desired_miou = np.mean([2. / 4., 4. / 6.]) self.assertAlmostEqual(desired_miou, miou.eval()) class StreamingConcatTest(test.TestCase): def setUp(self): ops.reset_default_graph() def testVars(self): metrics.streaming_concat(values=array_ops.ones((10,))) _assert_local_variables(self, ( 'streaming_concat/array:0', 'streaming_concat/size:0',)) def testMetricsCollection(self): my_collection_name = '__metrics__' value, _ = metrics.streaming_concat( values=array_ops.ones((10,)), metrics_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [value]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_concat( values=array_ops.ones((10,)), updates_collections=[my_collection_name]) self.assertListEqual(ops.get_collection(my_collection_name), [update_op]) def testNextArraySize(self): next_array_size = metric_ops._next_array_size # pylint: disable=protected-access with self.test_session(): self.assertEqual(next_array_size(2, growth_factor=2).eval(), 2) self.assertEqual(next_array_size(3, growth_factor=2).eval(), 4) self.assertEqual(next_array_size(4, growth_factor=2).eval(), 4) self.assertEqual(next_array_size(5, growth_factor=2).eval(), 8) self.assertEqual(next_array_size(6, growth_factor=2).eval(), 8) def testStreamingConcat(self): with self.test_session() as sess: values = array_ops.placeholder(dtypes_lib.int32, [None]) concatenated, update_op = metrics.streaming_concat(values) sess.run(variables.local_variables_initializer()) self.assertAllEqual([], concatenated.eval()) sess.run([update_op], feed_dict={values: [0, 1, 2]}) self.assertAllEqual([0, 1, 2], concatenated.eval()) sess.run([update_op], feed_dict={values: [3, 4]}) self.assertAllEqual([0, 1, 2, 3, 4], concatenated.eval()) sess.run([update_op], feed_dict={values: [5, 6, 7, 8, 9]}) self.assertAllEqual(np.arange(10), concatenated.eval()) def testStreamingConcatStringValues(self): with self.test_session() as sess: values = array_ops.placeholder(dtypes_lib.string, [None]) concatenated, update_op = metrics.streaming_concat(values) sess.run(variables.local_variables_initializer()) self.assertItemsEqual([], concatenated.eval()) sess.run([update_op], feed_dict={values: ['a', 'b', 'c']}) self.assertItemsEqual([b'a', b'b', b'c'], concatenated.eval()) sess.run([update_op], feed_dict={values: ['d', 'e']}) self.assertItemsEqual([b'a', b'b', b'c', b'd', b'e'], concatenated.eval()) sess.run([update_op], feed_dict={values: ['f', 'g', 'h', 'i', 'j']}) self.assertItemsEqual( [b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h', b'i', b'j'], concatenated.eval()) def testStreamingConcatMaxSize(self): with self.test_session() as sess: values = math_ops.range(3) concatenated, update_op = metrics.streaming_concat(values, max_size=5) sess.run(variables.local_variables_initializer()) self.assertAllEqual([], concatenated.eval()) sess.run([update_op]) self.assertAllEqual([0, 1, 2], concatenated.eval()) sess.run([update_op]) self.assertAllEqual([0, 1, 2, 0, 1], concatenated.eval()) sess.run([update_op]) self.assertAllEqual([0, 1, 2, 0, 1], concatenated.eval()) def testStreamingConcat2D(self): with self.test_session() as sess: values = array_ops.reshape(math_ops.range(3), (3, 1)) concatenated, update_op = metrics.streaming_concat(values, axis=-1) sess.run(variables.local_variables_initializer()) for _ in range(10): sess.run([update_op]) self.assertAllEqual([[0] * 10, [1] * 10, [2] * 10], concatenated.eval()) def testStreamingConcatErrors(self): with self.assertRaises(ValueError): metrics.streaming_concat(array_ops.placeholder(dtypes_lib.float32)) values = array_ops.zeros((2, 3)) with self.assertRaises(ValueError): metrics.streaming_concat(values, axis=-3, max_size=3) with self.assertRaises(ValueError): metrics.streaming_concat(values, axis=2, max_size=3) with self.assertRaises(ValueError): metrics.streaming_concat( array_ops.placeholder(dtypes_lib.float32, [None, None])) def testStreamingConcatReset(self): with self.test_session() as sess: values = array_ops.placeholder(dtypes_lib.int32, [None]) concatenated, update_op = metrics.streaming_concat(values) sess.run(variables.local_variables_initializer()) self.assertAllEqual([], concatenated.eval()) sess.run([update_op], feed_dict={values: [0, 1, 2]}) self.assertAllEqual([0, 1, 2], concatenated.eval()) sess.run(variables.local_variables_initializer()) sess.run([update_op], feed_dict={values: [3, 4]}) self.assertAllEqual([3, 4], concatenated.eval()) class AggregateMetricsTest(test.TestCase): def testAggregateNoMetricsRaisesValueError(self): with self.assertRaises(ValueError): metrics.aggregate_metrics() def testAggregateSingleMetricReturnsOneItemLists(self): values = array_ops.ones((10, 4)) value_tensors, update_ops = metrics.aggregate_metrics( metrics.streaming_mean(values)) self.assertEqual(len(value_tensors), 1) self.assertEqual(len(update_ops), 1) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(1, update_ops[0].eval()) self.assertEqual(1, value_tensors[0].eval()) def testAggregateMultipleMetricsReturnsListsInOrder(self): predictions = array_ops.ones((10, 4)) labels = array_ops.ones((10, 4)) * 3 value_tensors, update_ops = metrics.aggregate_metrics( metrics.streaming_mean_absolute_error(predictions, labels), metrics.streaming_mean_squared_error(predictions, labels)) self.assertEqual(len(value_tensors), 2) self.assertEqual(len(update_ops), 2) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(2, update_ops[0].eval()) self.assertEqual(4, update_ops[1].eval()) self.assertEqual(2, value_tensors[0].eval()) self.assertEqual(4, value_tensors[1].eval()) class AggregateMetricMapTest(test.TestCase): def testAggregateMultipleMetricsReturnsListsInOrder(self): predictions = array_ops.ones((10, 4)) labels = array_ops.ones((10, 4)) * 3 names_to_values, names_to_updates = metrics.aggregate_metric_map({ 'm1': metrics.streaming_mean_absolute_error(predictions, labels), 'm2': metrics.streaming_mean_squared_error(predictions, labels), }) self.assertEqual(2, len(names_to_values)) self.assertEqual(2, len(names_to_updates)) with self.test_session() as sess: sess.run(variables.local_variables_initializer()) self.assertEqual(2, names_to_updates['m1'].eval()) self.assertEqual(4, names_to_updates['m2'].eval()) self.assertEqual(2, names_to_values['m1'].eval()) self.assertEqual(4, names_to_values['m2'].eval()) if __name__ == '__main__': test.main()
whn09/tensorflow
tensorflow/contrib/metrics/python/ops/metric_ops_test.py
Python
apache-2.0
188,973
//===--- CFGPrinter.cpp - CFG printer pass --------------------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// // // This file defines external functions that can be called to explicitly // instantiate the CFG printer. // //===----------------------------------------------------------------------===// #include "swift/SILOptimizer/PassManager/Passes.h" #include "swift/SILOptimizer/PassManager/Transforms.h" #include "swift/SIL/CFG.h" #include "swift/SIL/SILBasicBlock.h" #include "swift/SIL/SILInstruction.h" #include "swift/SIL/SILFunction.h" #include "llvm/Support/CommandLine.h" using namespace swift; //===----------------------------------------------------------------------===// // Options //===----------------------------------------------------------------------===// llvm::cl::opt<std::string> SILViewCFGOnlyFun( "sil-view-cfg-only-function", llvm::cl::init(""), llvm::cl::desc("Only produce a graphviz file for this function")); llvm::cl::opt<std::string> SILViewCFGOnlyFuns( "sil-view-cfg-only-functions", llvm::cl::init(""), llvm::cl::desc("Only produce a graphviz file for the sil for the functions " "whose name contains this substring")); //===----------------------------------------------------------------------===// // Top Level Driver //===----------------------------------------------------------------------===// namespace { class SILCFGPrinter : public SILFunctionTransform { /// The entry point to the transformation. void run() override { SILFunction *F = getFunction(); // If we are not supposed to dump view this cfg, return. if (!SILViewCFGOnlyFun.empty() && F && F->getName() != SILViewCFGOnlyFun) return; if (!SILViewCFGOnlyFuns.empty() && F && F->getName().find(SILViewCFGOnlyFuns, 0) == StringRef::npos) return; F->viewCFG(); } }; } // end anonymous namespace SILTransform *swift::createCFGPrinter() { return new SILCFGPrinter(); }
allevato/swift
lib/SILOptimizer/UtilityPasses/CFGPrinter.cpp
C++
apache-2.0
2,434
/* * Copyright (c) 2014. Marshal Chen. */ package com.marshalchen.common.uimodule.triangle; import android.content.Context; import android.graphics.*; import android.os.Handler; import android.os.Message; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.view.ViewTreeObserver.OnPreDrawListener; import com.marshalchen.common.uimodule.R; /** * 转盘 * * @author Administrator * */ public class RotaryView extends View { private float min; private float[] humidity; private float[] Sweep; private float[] SWEEP_INC = { 0, 0, 0, 0, 0 }; public Bitmap bitmapScale;// 刻度 private float dp; private final int[] ARC_COLORS = new int[] { 0xa03cbeff, 0xa0ff5e7d, 0xa04cd964, 0xa0ffc71e, 0xa0ff6f2f }; private RectF rect, rectf; private Paint paint; private Paint paintWhite; private Paint bitmapRefreshPaint; private int angleWhite; private boolean arcBool = false; private int discStart = 0;// 角度起点 private float brWidth, brHeight, scale; private float WidthCenter, HeightCenter; private RotatingEndListener rotatingEndListener = null; public RotaryView(Context context, float[] humidity) { super(context); setAngle(humidity); init(); } public void init() { paint = new Paint(); paint.setAntiAlias(true); paint.setStyle(Paint.Style.FILL_AND_STROKE); paintWhite = new Paint(); paintWhite.setAntiAlias(true); paintWhite.setColor(0xffffffff); paintWhite.setStyle(Paint.Style.FILL_AND_STROKE); bitmapRefreshPaint = new Paint(); bitmapRefreshPaint.setAntiAlias(true); setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); this.getViewTreeObserver().addOnPreDrawListener(// 绘制完毕 new OnPreDrawListener() { public boolean onPreDraw() { Init(); getViewTreeObserver().removeOnPreDrawListener(this); return false; } }); } public void Init() { dp = getResources().getDimension(R.dimen.triangle_dp); bitmapScale = BitmapFactory.decodeResource(getResources(), R.drawable.triangle_icon_round_calibration); brWidth = bitmapScale.getWidth(); brHeight = bitmapScale.getHeight(); WidthCenter = getWidth() / 2; HeightCenter = getHeight() / 2; zoom(0f); rectf = new RectF(); rectf.set(dp * 0.1f, dp * 0.1f, getWidth() - dp * 0.1f, getHeight() - dp * 0.1f); } public void setAngle(float[] humidity) { this.humidity = humidity; float[] temp = new float[humidity.length-1]; for (int i = 0; i < temp.length; i++) { temp[i] = humidity[i + 1]; } Sweep = temp; min = getMin(temp); for (int i = 0; i < SWEEP_INC.length; i++) { SWEEP_INC[i] = humidity[i + 1] / min; } } protected void onDraw(Canvas c) { drawArc(c); drawPointer(c); drawArcWhe(c); } public void drawArc(Canvas canvas) { float start = humidity[0]; for (int i = 1; i < humidity.length; i++) { paint.setColor(ARC_COLORS[i - 1]); canvas.drawArc(rect, start - discStart, Sweep[i - 1], true, paint); start += humidity[i]; if (Sweep[i - 1] < humidity[i]) { Sweep[i - 1] += SWEEP_INC[i - 1]; } } } public void drawPointer(Canvas c) { Matrix matrix = new Matrix(); // 设置缩放 matrix.postScale(scale, scale); // 开始转 matrix.preRotate((float) -discStart); // 转轴还原 matrix.preTranslate(-(float) brWidth / 2, -(float) brHeight / 2); // 将位置送到view的中心 matrix.postTranslate(WidthCenter, HeightCenter); // 绘制图片 c.drawBitmap(bitmapScale, matrix, bitmapRefreshPaint); } public void drawArcWhe(Canvas c) { if (arcBool) { c.drawArc(rectf, -90 + angleWhite, 360 - angleWhite, true, paintWhite); } } public float getMin(float[] humidity) { float min = humidity[0]; for (int i = 1; i < humidity.length; i++) { if (humidity[i] < min) { min = humidity[i]; } } return min; } protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); } public boolean isArcBool() { return arcBool; } public void setArcBool(boolean arcBool) { this.arcBool = arcBool; } public void zoom(float ratio) { float than = ratio + dp * 0.3f; rect = new RectF(); rect.set(than, than, getWidth() - than, getHeight() - than); scale = (getWidth() - than * 2) / brWidth; invalidate(); } public void rotatingStart(float angle) { discStart = 0; postInvalidate(); new RotationArc(angle); } public void setTargetAngle(float weight) { discStart = (int) (weight * 2.4f); invalidate(); } /** * 转盘转动动画 * * @author Administrator * */ class RotationArc implements Runnable { // 手指离开屏幕返回动画 private Thread thread; private float targetAngle; public RotationArc(float angle) { targetAngle = (int) (angle * 2.4f); thread = new Thread(this); thread.start(); } public void run() { while (true) { try { Thread.sleep(4); discStart++; postInvalidate(); if (discStart >= targetAngle) { // 旋转结束发送消息并回调 Message message = new Message(); rotatingEnd.sendMessage(message); break; } } catch (InterruptedException e) { e.printStackTrace(); } } } } Handler rotatingEnd = new Handler() { public void handleMessage(Message msg) { if (rotatingEndListener != null) { rotatingEndListener.onRotatingEnd(); } super.handleMessage(msg); } }; public void RotatingShowStart() { new RotatingShow(); } /** * 旋转显示动画 * * @author Administrator * */ class RotatingShow implements Runnable { private Thread thread; public RotatingShow() { angleWhite = 0; thread = new Thread(this); thread.start(); } public void run() { while (true) { try { Thread.sleep(6); if (angleWhite == 360) { break; } angleWhite += 5; postInvalidate(); } catch (InterruptedException e) { e.printStackTrace(); } } } } public void setRotatingEndListener(RotatingEndListener rotatingEndListener) { this.rotatingEndListener = rotatingEndListener; } public interface RotatingEndListener { // 旋转动态监听 public void onRotatingEnd(); } }
Godchin1990/UltimateAndroid
UltimateAndroidNormal/UltimateAndroidUi/src/com/marshalchen/common/uimodule/triangle/RotaryView.java
Java
apache-2.0
6,256
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.pagemem.wal.record.delta; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.pagemem.PageMemory; import org.apache.ignite.internal.processors.cache.persistence.tree.io.AbstractDataPageIO; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO; import org.apache.ignite.internal.util.typedef.internal.S; import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.DATA_PAGE_SET_FREE_LIST_PAGE; /** * */ public class DataPageSetFreeListPageRecord extends PageDeltaRecord { /** */ private long freeListPage; /** * @param grpId Cache group ID. * @param pageId Page ID. * @param freeListPage Free list page ID. */ public DataPageSetFreeListPageRecord(int grpId, long pageId, long freeListPage) { super(grpId, pageId); this.freeListPage = freeListPage; } /** * @return Free list page ID. */ public long freeListPage() { return freeListPage; } /** {@inheritDoc} */ @Override public void applyDelta(PageMemory pageMem, long pageAddr) throws IgniteCheckedException { AbstractDataPageIO io = PageIO.getPageIO(pageAddr); io.setFreeListPageId(pageAddr, freeListPage); } /** {@inheritDoc} */ @Override public RecordType type() { return DATA_PAGE_SET_FREE_LIST_PAGE; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DataPageSetFreeListPageRecord.class, this, "super", super.toString()); } }
BiryukovVA/ignite
modules/core/src/main/java/org/apache/ignite/internal/pagemem/wal/record/delta/DataPageSetFreeListPageRecord.java
Java
apache-2.0
2,387
/* * Kendo UI v2015.3.1111 (http://www.telerik.com/kendo-ui) * Copyright 2015 Telerik AD. All rights reserved. * * Kendo UI commercial licenses may be obtained at * http://www.telerik.com/purchase/license-agreement/kendo-ui-complete * If you do not own a commercial license, this file shall be governed by the trial license terms. */ !function(e,define){define([],e)}(function(){return function(e){var t=e.kendo||(e.kendo={cultures:{}});t.cultures["lt-LT"]={name:"lt-LT",numberFormat:{pattern:["-n"],decimals:2,",":" ",".":",",groupSize:[3],percent:{pattern:["-n %","n %"],decimals:2,",":" ",".":",",groupSize:[3],symbol:"%"},currency:{name:"Euro",abbr:"EUR",pattern:["-n $","n $"],decimals:2,",":" ",".":",",groupSize:[3],symbol:"€"}},calendars:{standard:{days:{names:["sekmadienis","pirmadienis","antradienis","trečiadienis","ketvirtadienis","penktadienis","šeštadienis"],namesAbbr:["Sk","Pr","An","Tr","Kt","Pn","Št"],namesShort:["S","P","A","T","K","Pn","Š"]},months:{names:["sausis","vasaris","kovas","balandis","gegužė","birželis","liepa","rugpjūtis","rugsėjis","spalis","lapkritis","gruodis"],namesAbbr:["Sau","Vas","Kov","Bal","Geg","Bir","Lie","Rgp","Rgs","Spl","Lap","Grd"]},AM:[""],PM:[""],patterns:{d:"yyyy-MM-dd",D:"yyyy 'm.' MMMM d 'd.'",F:"yyyy 'm.' MMMM d 'd.' HH:mm:ss",g:"yyyy-MM-dd HH:mm",G:"yyyy-MM-dd HH:mm:ss",m:"MMMM d 'd.'",M:"MMMM d 'd.'",s:"yyyy'-'MM'-'dd'T'HH':'mm':'ss",t:"HH:mm",T:"HH:mm:ss",u:"yyyy'-'MM'-'dd HH':'mm':'ss'Z'",y:"yyyy 'm.' MMMM",Y:"yyyy 'm.' MMMM"},"/":"-",":":":",firstDay:1}}}}(this),window.kendo},"function"==typeof define&&define.amd?define:function(e,t){t()});
arfian/gdrj
web2/assets/kendo-ui/js/cultures/kendo.culture.lt-LT.min.js
JavaScript
apache-2.0
1,623
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.api.jms.management; import javax.jms.JMSException; import javax.jms.Message; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.management.ManagementHelper; import org.apache.activemq.artemis.jms.client.ActiveMQMessage; /** * Helper class to use JMS messages to manage ActiveMQ Artemis server resources. */ public class JMSManagementHelper { private static ClientMessage getCoreMessage(final Message jmsMessage) { if (jmsMessage instanceof ActiveMQMessage == false) { throw new IllegalArgumentException("Cannot send a foreign message as a management message " + jmsMessage.getClass().getName()); } return ((ActiveMQMessage) jmsMessage).getCoreMessage(); } /** * Stores a resource attribute in a JMS message to retrieve the value from the server resource. * * @param message JMS message * @param resourceName the name of the resource * @param attribute the name of the attribute * @throws JMSException if an exception occurs while putting the information in the message * @see org.apache.activemq.artemis.api.core.management.ResourceNames */ public static void putAttribute(final Message message, final String resourceName, final String attribute) throws JMSException { ManagementHelper.putAttribute(JMSManagementHelper.getCoreMessage(message), resourceName, attribute); } /** * Stores an operation invocation in a JMS message to invoke the corresponding operation the value from the server resource. * * @param message JMS message * @param resourceName the name of the resource * @param operationName the name of the operation to invoke on the resource * @throws JMSException if an exception occurs while putting the information in the message * @see org.apache.activemq.artemis.api.core.management.ResourceNames */ public static void putOperationInvocation(final Message message, final String resourceName, final String operationName) throws JMSException { try { ManagementHelper.putOperationInvocation(JMSManagementHelper.getCoreMessage(message), resourceName, operationName); } catch (Exception e) { throw JMSManagementHelper.convertFromException(e); } } private static JMSException convertFromException(final Exception e) { JMSException jmse = new JMSException(e.getMessage()); jmse.initCause(e); return jmse; } /** * Stores an operation invocation in a JMS message to invoke the corresponding operation the value from the server resource. * * @param message JMS message * @param resourceName the name of the server resource * @param operationName the name of the operation to invoke on the server resource * @param parameters the parameters to use to invoke the server resource * @throws JMSException if an exception occurs while putting the information in the message * @see org.apache.activemq.artemis.api.core.management.ResourceNames */ public static void putOperationInvocation(final Message message, final String resourceName, final String operationName, final Object... parameters) throws JMSException { try { ManagementHelper.putOperationInvocation(JMSManagementHelper.getCoreMessage(message), resourceName, operationName, parameters); } catch (Exception e) { throw JMSManagementHelper.convertFromException(e); } } /** * Returns whether the JMS message corresponds to the result of a management operation invocation. */ public static boolean isOperationResult(final Message message) throws JMSException { return ManagementHelper.isOperationResult(JMSManagementHelper.getCoreMessage(message)); } /** * Returns whether the JMS message corresponds to the result of a management attribute value. */ public static boolean isAttributesResult(final Message message) throws JMSException { return ManagementHelper.isAttributesResult(JMSManagementHelper.getCoreMessage(message)); } /** * Returns whether the invocation of the management operation on the server resource succeeded. */ public static boolean hasOperationSucceeded(final Message message) throws JMSException { return ManagementHelper.hasOperationSucceeded(JMSManagementHelper.getCoreMessage(message)); } /** * Returns the result of an operation invocation or an attribute value. * <br> * If an error occurred on the server, {@link #hasOperationSucceeded(Message)} will return {@code false}. * and the result will be a String corresponding to the server exception. */ public static Object[] getResults(final Message message) throws Exception { return ManagementHelper.getResults(JMSManagementHelper.getCoreMessage(message)); } /** * Returns the result of an operation invocation or an attribute value. * <br> * If an error occurred on the server, {@link #hasOperationSucceeded(Message)} will return {@code false}. * and the result will be a String corresponding to the server exception. */ public static Object getResult(final Message message) throws Exception { return getResult(message, null); } /** * Returns the result of an operation invocation or an attribute value. * <br> * If an error occurred on the server, {@link #hasOperationSucceeded(Message)} will return {@code false}. * and the result will be a String corresponding to the server exception. */ public static Object getResult(final Message message, Class desiredType) throws Exception { return ManagementHelper.getResult(JMSManagementHelper.getCoreMessage(message), desiredType); } private JMSManagementHelper() { // Utility class } }
gaohoward/activemq-artemis
artemis-jms-client/src/main/java/org/apache/activemq/artemis/api/jms/management/JMSManagementHelper.java
Java
apache-2.0
6,949
/* Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.test.unit.ct; import javax.jms.Connection; import javax.jms.Message; import javax.jms.MessageProducer; import javax.jms.Session; import javax.jms.TextMessage; import javax.jms.Topic; import javax.jms.TopicConnection; import javax.jms.TopicConnectionFactory; import javax.jms.TopicPublisher; import javax.jms.TopicSession; import javax.jms.TopicSubscriber; import org.wso2.andes.client.AMQConnection; import org.wso2.andes.client.AMQQueue; import org.wso2.andes.client.AMQSession; import org.wso2.andes.client.AMQTopic; import org.wso2.andes.test.utils.QpidBrokerTestCase; /** * Crash Recovery tests for durable subscription * */ public class DurableSubscriberTest extends QpidBrokerTestCase { private final String _topicName = "durableSubscriberTopic"; /** * test strategy: * create and register a durable subscriber then close it * create a publisher and send a persistant message followed by a non persistant message * crash and restart the broker * recreate the durable subscriber and check that only the first message is received */ public void testDurSubRestoredAfterNonPersistentMessageSent() throws Exception { if (isBrokerStorePersistent()) { TopicConnectionFactory factory = getConnectionFactory(); Topic topic = (Topic) getInitialContext().lookup(_topicName); //create and register a durable subscriber then close it TopicConnection durConnection = factory.createTopicConnection("guest", "guest"); TopicSession durSession = durConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub1 = durSession.createDurableSubscriber(topic, "dursub"); durConnection.start(); durSub1.close(); durSession.close(); durConnection.stop(); //create a publisher and send a persistant message followed by a non persistant message TopicConnection pubConnection = factory.createTopicConnection("guest", "guest"); TopicSession pubSession = pubConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicPublisher publisher = pubSession.createPublisher(topic); Message message = pubSession.createMessage(); message.setIntProperty("count", 1); publisher.publish(message, javax.jms.DeliveryMode.PERSISTENT, javax.jms.Message.DEFAULT_PRIORITY, javax.jms.Message.DEFAULT_TIME_TO_LIVE); message.setIntProperty("count", 2); publisher.publish(message, javax.jms.DeliveryMode.NON_PERSISTENT, javax.jms.Message.DEFAULT_PRIORITY, javax.jms.Message.DEFAULT_TIME_TO_LIVE); publisher.close(); pubSession.close(); //now stop the server try { restartBroker(); } catch (Exception e) { _logger.error("problems restarting broker: " + e); throw e; } //now recreate the durable subscriber and check the received messages factory = getConnectionFactory(); topic = (Topic) getInitialContext().lookup(_topicName); TopicConnection durConnection2 = factory.createTopicConnection("guest", "guest"); TopicSession durSession2 = durConnection2.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub2 = durSession2.createDurableSubscriber(topic, "dursub"); durConnection2.start(); Message m1 = durSub2.receive(1000); if (m1 == null) { assertTrue("testDurSubRestoredAfterNonPersistentMessageSent test failed. no message was returned", false); } assertTrue("testDurSubRestoredAfterNonPersistentMessageSent test failed. Wrong message was returned.", m1.getIntProperty("count") == 1); durSession2.unsubscribe("dursub"); durConnection2.close(); } } /** * create and register a durable subscriber with a message selector and then close it * crash the broker * create a publisher and send 5 right messages and 5 wrong messages * recreate the durable subscriber and check we receive the 5 expected messages */ public void testDurSubRestoresMessageSelector() throws Exception { if (isBrokerStorePersistent()) { TopicConnectionFactory factory = getConnectionFactory(); Topic topic = (Topic) getInitialContext().lookup(_topicName); //create and register a durable subscriber with a message selector and then close it TopicConnection durConnection = factory.createTopicConnection("guest", "guest"); TopicSession durSession = durConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub1 = durSession.createDurableSubscriber(topic, "dursub", "testprop='true'", false); durConnection.start(); durSub1.close(); durSession.close(); durConnection.stop(); //now stop the server try { restartBroker(); } catch (Exception e) { _logger.error("problems restarting broker: " + e); throw e; } topic = (Topic) getInitialContext().lookup(_topicName); factory = getConnectionFactory(); TopicConnection pubConnection = factory.createTopicConnection("guest", "guest"); TopicSession pubSession = pubConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicPublisher publisher = pubSession.createPublisher(topic); for (int i = 0; i < 5; i++) { Message message = pubSession.createMessage(); message.setStringProperty("testprop", "true"); publisher.publish(message); message = pubSession.createMessage(); message.setStringProperty("testprop", "false"); publisher.publish(message); } publisher.close(); pubSession.close(); //now recreate the durable subscriber and check the received messages TopicConnection durConnection2 = factory.createTopicConnection("guest", "guest"); TopicSession durSession2 = durConnection2.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub2 = durSession2.createDurableSubscriber(topic, "dursub", "testprop='true'", false); durConnection2.start(); for (int i = 0; i < 5; i++) { Message message = durSub2.receive(1000); if (message == null) { assertTrue("testDurSubRestoresMessageSelector test failed. no message was returned", false); } else { assertTrue("testDurSubRestoresMessageSelector test failed. message selector not reset", message.getStringProperty("testprop").equals("true")); } } durSession2.unsubscribe("dursub"); durConnection2.close(); } } /** * create and register a durable subscriber without a message selector and then unsubscribe it * create and register a durable subscriber with a message selector and then close it * restart the broker * send matching and non matching messages * recreate and register the durable subscriber with a message selector * verify only the matching messages are received */ public void testDurSubChangedToHaveSelectorThenRestart() throws Exception { if (! isBrokerStorePersistent()) { _logger.warn("Test skipped due to requirement of a persistent store"); return; } final String SUB_NAME=getTestQueueName(); TopicConnectionFactory factory = getConnectionFactory(); Topic topic = (Topic) getInitialContext().lookup(_topicName); //create and register a durable subscriber then unsubscribe it TopicConnection durConnection = factory.createTopicConnection("guest", "guest"); TopicSession durSession = durConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub1 = durSession.createDurableSubscriber(topic, SUB_NAME); durConnection.start(); durSub1.close(); durSession.unsubscribe(SUB_NAME); durSession.close(); durConnection.close(); //create and register a durable subscriber with a message selector and then close it TopicConnection durConnection2 = factory.createTopicConnection("guest", "guest"); TopicSession durSession2 = durConnection2.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub2 = durSession2.createDurableSubscriber(topic, SUB_NAME, "testprop='true'", false); durConnection2.start(); durSub2.close(); durSession2.close(); durConnection2.close(); //now restart the server try { restartBroker(); } catch (Exception e) { _logger.error("problems restarting broker: " + e); throw e; } //send messages matching and not matching the selector TopicConnection pubConnection = factory.createTopicConnection("guest", "guest"); TopicSession pubSession = pubConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicPublisher publisher = pubSession.createPublisher(topic); for (int i = 0; i < 5; i++) { Message message = pubSession.createMessage(); message.setStringProperty("testprop", "true"); publisher.publish(message); message = pubSession.createMessage(); message.setStringProperty("testprop", "false"); publisher.publish(message); } publisher.close(); pubSession.close(); //now recreate the durable subscriber with selector to check there are no exceptions generated //and then verify the messages are received correctly TopicConnection durConnection3 = (TopicConnection) factory.createConnection("guest", "guest"); TopicSession durSession3 = (TopicSession) durConnection3.createSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub3 = durSession3.createDurableSubscriber(topic, SUB_NAME, "testprop='true'", false); durConnection3.start(); for (int i = 0; i < 5; i++) { Message message = durSub3.receive(2000); if (message == null) { fail("testDurSubChangedToHaveSelectorThenRestart test failed. Expected message " + i + " was not returned"); } else { assertTrue("testDurSubChangedToHaveSelectorThenRestart test failed. Got message not matching selector", message.getStringProperty("testprop").equals("true")); } } durSub3.close(); durSession3.unsubscribe(SUB_NAME); durSession3.close(); durConnection3.close(); } /** * create and register a durable subscriber with a message selector and then unsubscribe it * create and register a durable subscriber without a message selector and then close it * restart the broker * send matching and non matching messages * recreate and register the durable subscriber without a message selector * verify ALL the sent messages are received */ public void testDurSubChangedToNotHaveSelectorThenRestart() throws Exception { if (! isBrokerStorePersistent()) { _logger.warn("Test skipped due to requirement of a persistent store"); return; } final String SUB_NAME=getTestQueueName(); TopicConnectionFactory factory = getConnectionFactory(); Topic topic = (Topic) getInitialContext().lookup(_topicName); //create and register a durable subscriber with selector then unsubscribe it TopicConnection durConnection = factory.createTopicConnection("guest", "guest"); TopicSession durSession = durConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub1 = durSession.createDurableSubscriber(topic, SUB_NAME, "testprop='true'", false); durConnection.start(); durSub1.close(); durSession.unsubscribe(SUB_NAME); durSession.close(); durConnection.close(); //create and register a durable subscriber without the message selector and then close it TopicConnection durConnection2 = factory.createTopicConnection("guest", "guest"); TopicSession durSession2 = durConnection2.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub2 = durSession2.createDurableSubscriber(topic, SUB_NAME); durConnection2.start(); durSub2.close(); durSession2.close(); durConnection2.close(); //now restart the server try { restartBroker(); } catch (Exception e) { _logger.error("problems restarting broker: " + e); throw e; } //send messages matching and not matching the original used selector TopicConnection pubConnection = factory.createTopicConnection("guest", "guest"); TopicSession pubSession = pubConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicPublisher publisher = pubSession.createPublisher(topic); for (int i = 1; i <= 5; i++) { Message message = pubSession.createMessage(); message.setStringProperty("testprop", "true"); publisher.publish(message); message = pubSession.createMessage(); message.setStringProperty("testprop", "false"); publisher.publish(message); } publisher.close(); pubSession.close(); //now recreate the durable subscriber without selector to check there are no exceptions generated //then verify ALL messages sent are received TopicConnection durConnection3 = (TopicConnection) factory.createConnection("guest", "guest"); TopicSession durSession3 = (TopicSession) durConnection3.createSession(false, Session.AUTO_ACKNOWLEDGE); TopicSubscriber durSub3 = durSession3.createDurableSubscriber(topic, SUB_NAME); durConnection3.start(); for (int i = 1; i <= 10; i++) { Message message = durSub3.receive(2000); if (message == null) { fail("testDurSubChangedToNotHaveSelectorThenRestart test failed. Expected message " + i + " was not received"); } } durSub3.close(); durSession3.unsubscribe(SUB_NAME); durSession3.close(); durConnection3.close(); } public void testResubscribeWithChangedSelectorAndRestart() throws Exception { if (! isBrokerStorePersistent()) { _logger.warn("Test skipped due to requirement of a persistent store"); return; } Connection conn = getConnection(); conn.start(); Session session = conn.createSession(false, Session.AUTO_ACKNOWLEDGE); AMQTopic topic = new AMQTopic((AMQConnection) conn, "testResubscribeWithChangedSelectorAndRestart"); MessageProducer producer = session.createProducer(topic); // Create durable subscriber that matches A TopicSubscriber subA = session.createDurableSubscriber(topic, "testResubscribeWithChangedSelector", "Match = True", false); // Send 1 matching message and 1 non-matching message TextMessage msg = session.createTextMessage("testResubscribeWithChangedSelectorAndRestart1"); msg.setBooleanProperty("Match", true); producer.send(msg); msg = session.createTextMessage("testResubscribeWithChangedSelectorAndRestart2"); msg.setBooleanProperty("Match", false); producer.send(msg); Message rMsg = subA.receive(1000); assertNotNull(rMsg); assertEquals("Content was wrong", "testResubscribeWithChangedSelectorAndRestart1", ((TextMessage) rMsg).getText()); // Queue has no messages left AMQQueue subQueueTmp = new AMQQueue("amq.topic", "clientid" + ":" + "testResubscribeWithChangedSelectorAndRestart"); assertEquals("Msg count should be 0", 0, ((AMQSession<?, ?>) session).getQueueDepth(subQueueTmp)); rMsg = subA.receive(1000); assertNull(rMsg); // Send another 1 matching message and 1 non-matching message msg = session.createTextMessage("testResubscribeWithChangedSelectorAndRestart1"); msg.setBooleanProperty("Match", true); producer.send(msg); msg = session.createTextMessage("testResubscribeWithChangedSelectorAndRestart2"); msg.setBooleanProperty("Match", false); producer.send(msg); // Disconnect subscriber without receiving the message to //leave it on the underlying queue subA.close(); // Reconnect with new selector that matches B TopicSubscriber subB = session.createDurableSubscriber(topic, "testResubscribeWithChangedSelectorAndRestart", "Match = false", false); //verify no messages are now present on the queue as changing selector should have issued //an unsubscribe and thus deleted the previous durable backing queue for the subscription. //check the dur sub's underlying queue now has msg count 0 AMQQueue subQueue = new AMQQueue("amq.topic", "clientid" + ":" + "testResubscribeWithChangedSelectorAndRestart"); assertEquals("Msg count should be 0", 0, ((AMQSession<?, ?>) session).getQueueDepth(subQueue)); // Check that new messages are received properly msg = session.createTextMessage("testResubscribeWithChangedSelectorAndRestart1"); msg.setBooleanProperty("Match", true); producer.send(msg); msg = session.createTextMessage("testResubscribeWithChangedSelectorAndRestart2"); msg.setBooleanProperty("Match", false); producer.send(msg); rMsg = subB.receive(1000); assertNotNull(rMsg); assertEquals("Content was wrong", "testResubscribeWithChangedSelectorAndRestart2", ((TextMessage) rMsg).getText()); rMsg = subB.receive(1000); assertNull(rMsg); //check the dur sub's underlying queue now has msg count 0 subQueue = new AMQQueue("amq.topic", "clientid" + ":" + "testResubscribeWithChangedSelectorAndRestart"); assertEquals("Msg count should be 0", 0, ((AMQSession<?, ?>) session).getQueueDepth(subQueue)); //now restart the server try { restartBroker(); } catch (Exception e) { _logger.error("problems restarting broker: " + e); throw e; } // Reconnect to broker Connection connection = getConnectionFactory().createConnection("guest", "guest"); connection.start(); session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); topic = new AMQTopic((AMQConnection) connection, "testResubscribeWithChangedSelectorAndRestart"); producer = session.createProducer(topic); //verify no messages now present on the queue after we restart the broker //check the dur sub's underlying queue now has msg count 0 subQueue = new AMQQueue("amq.topic", "clientid" + ":" + "testResubscribeWithChangedSelectorAndRestart"); assertEquals("Msg count should be 0", 0, ((AMQSession<?, ?>) session).getQueueDepth(subQueue)); // Reconnect with new selector that matches B TopicSubscriber subC = session.createDurableSubscriber(topic, "testResubscribeWithChangedSelectorAndRestart", "Match = False", false); // Check that new messages are still sent and recieved properly msg = session.createTextMessage("testResubscribeWithChangedSelectorAndRestart1"); msg.setBooleanProperty("Match", true); producer.send(msg); msg = session.createTextMessage("testResubscribeWithChangedSelectorAndRestart2"); msg.setBooleanProperty("Match", false); producer.send(msg); //check the dur sub's underlying queue now has msg count 1 subQueue = new AMQQueue("amq.topic", "clientid" + ":" + "testResubscribeWithChangedSelectorAndRestart"); assertEquals("Msg count should be 1", 1, ((AMQSession<?, ?>) session).getQueueDepth(subQueue)); rMsg = subC.receive(1000); assertNotNull(rMsg); assertEquals("Content was wrong", "testResubscribeWithChangedSelectorAndRestart2", ((TextMessage) rMsg).getText()); rMsg = subC.receive(1000); assertNull(rMsg); session.unsubscribe("testResubscribeWithChangedSelectorAndRestart"); subC.close(); session.close(); connection.close(); } }
ThilankaBowala/andes
modules/andes-core/systests/src/main/java/org/wso2/andes/test/unit/ct/DurableSubscriberTest.java
Java
apache-2.0
22,707
//go:build linux // +build linux /* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package ipvs import ( "fmt" "reflect" "syscall" "testing" netutils "k8s.io/utils/net" libipvs "github.com/moby/ipvs" ) func Test_toVirtualServer(t *testing.T) { Tests := []struct { ipvsService libipvs.Service virtualServer VirtualServer expectError bool reason string }{ { libipvs.Service{ Flags: 0x0, }, VirtualServer{}, true, fmt.Sprintf("IPVS Service Flags should be >= %d, got 0x0", FlagHashed), }, { libipvs.Service{ Flags: 0x1, }, VirtualServer{}, true, fmt.Sprintf("IPVS Service Flags should be >= %d, got 0x1", FlagHashed), }, { libipvs.Service{ Protocol: syscall.IPPROTO_TCP, Port: 80, FWMark: 0, SchedName: "", Flags: uint32(FlagPersistent + FlagHashed), Timeout: 0, Netmask: 0xffffffff, AddressFamily: syscall.AF_INET, Address: nil, PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("0.0.0.0"), Protocol: "TCP", Port: 80, Scheduler: "", Flags: ServiceFlags(FlagPersistent), Timeout: 0, }, false, "", }, { libipvs.Service{ Protocol: syscall.IPPROTO_UDP, Port: 33434, FWMark: 0, SchedName: "wlc", Flags: uint32(0 + FlagHashed), Timeout: 100, Netmask: 128, AddressFamily: syscall.AF_INET6, Address: netutils.ParseIPSloppy("2012::beef"), PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("2012::beef"), Protocol: "UDP", Port: 33434, Scheduler: "wlc", Flags: ServiceFlags(0), Timeout: 100, }, false, "", }, { libipvs.Service{ Protocol: 0, Port: 0, FWMark: 0, SchedName: "lc", Flags: uint32(0 + FlagHashed), Timeout: 0, Netmask: 0xffffffff, AddressFamily: syscall.AF_INET, Address: netutils.ParseIPSloppy("1.2.3.4"), PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("1.2.3.4"), Protocol: "", Port: 0, Scheduler: "lc", Flags: ServiceFlags(0), Timeout: 0, }, false, "", }, { libipvs.Service{ Protocol: 0, Port: 0, FWMark: 0, SchedName: "wrr", Flags: uint32(FlagPersistent + FlagHashed), Timeout: 0, Netmask: 128, AddressFamily: syscall.AF_INET6, Address: nil, PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("::0"), Protocol: "", Port: 0, Scheduler: "wrr", Flags: ServiceFlags(FlagPersistent), Timeout: 0, }, false, "", }, { libipvs.Service{ Protocol: syscall.IPPROTO_SCTP, Port: 80, FWMark: 0, SchedName: "", Flags: uint32(FlagPersistent + FlagHashed), Timeout: 0, Netmask: 0xffffffff, AddressFamily: syscall.AF_INET, Address: nil, PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("0.0.0.0"), Protocol: "SCTP", Port: 80, Scheduler: "", Flags: ServiceFlags(FlagPersistent), Timeout: 0, }, false, "", }, } for i := range Tests { got, err := toVirtualServer(&Tests[i].ipvsService) if Tests[i].expectError && err == nil { t.Errorf("case: %d, expected error: %s, got nil", i, Tests[i].reason) } if !Tests[i].expectError && err != nil { t.Errorf("case: %d, unexpected error: %v", i, err) } if got != nil { if !reflect.DeepEqual(*got, Tests[i].virtualServer) { t.Errorf("case: %d, got %#v, want %#v", i, *got, Tests[i].virtualServer) } } } } func Test_toIPVSService(t *testing.T) { Tests := []struct { ipvsService libipvs.Service virtualServer VirtualServer }{ { libipvs.Service{ Protocol: syscall.IPPROTO_TCP, Port: 80, FWMark: 0, SchedName: "", Flags: 0, Timeout: 0, Netmask: 0xffffffff, AddressFamily: syscall.AF_INET, Address: netutils.ParseIPSloppy("0.0.0.0"), PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("0.0.0.0"), Protocol: "TCP", Port: 80, Scheduler: "", Flags: 0, Timeout: 0, }, }, { libipvs.Service{ Protocol: syscall.IPPROTO_UDP, Port: 33434, FWMark: 0, SchedName: "wlc", Flags: 1234, Timeout: 100, Netmask: 128, AddressFamily: syscall.AF_INET6, Address: netutils.ParseIPSloppy("2012::beef"), PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("2012::beef"), Protocol: "UDP", Port: 33434, Scheduler: "wlc", Flags: 1234, Timeout: 100, }, }, { libipvs.Service{ Protocol: 0, Port: 0, FWMark: 0, SchedName: "lc", Flags: 0, Timeout: 0, Netmask: 0xffffffff, AddressFamily: syscall.AF_INET, Address: netutils.ParseIPSloppy("1.2.3.4"), PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("1.2.3.4"), Protocol: "", Port: 0, Scheduler: "lc", Flags: 0, Timeout: 0, }, }, { libipvs.Service{ Protocol: 0, Port: 0, FWMark: 0, SchedName: "wrr", Flags: 0, Timeout: 0, Netmask: 128, AddressFamily: syscall.AF_INET6, Address: netutils.ParseIPSloppy("::0"), PEName: "", }, VirtualServer{ Address: netutils.ParseIPSloppy("::0"), Protocol: "", Port: 0, Scheduler: "wrr", Flags: 0, Timeout: 0, }, }, } for i := range Tests { got, err := toIPVSService(&Tests[i].virtualServer) if err != nil { t.Errorf("case: %d, unexpected error: %v", i, err) } if !reflect.DeepEqual(*got, Tests[i].ipvsService) { t.Errorf("case: %d - got %#v, want %#v", i, *got, Tests[i].ipvsService) } } } func Test_toRealServer(t *testing.T) { Tests := []struct { ipvsDestination libipvs.Destination realServer RealServer }{ { libipvs.Destination{ Port: 54321, ConnectionFlags: 0, Weight: 1, Address: netutils.ParseIPSloppy("1.2.3.4"), }, RealServer{ Address: netutils.ParseIPSloppy("1.2.3.4"), Port: 54321, Weight: 1, }, }, { libipvs.Destination{ Port: 53, ConnectionFlags: 0, Weight: 1, Address: netutils.ParseIPSloppy("2002::cafe"), }, RealServer{ Address: netutils.ParseIPSloppy("2002::cafe"), Port: 53, Weight: 1, }, }, } for i := range Tests { got, err := toRealServer(&Tests[i].ipvsDestination) if err != nil { t.Errorf("case %d unexpected error: %v", i, err) } if !reflect.DeepEqual(*got, Tests[i].realServer) { t.Errorf("case %d Failed to translate Destination - got %#v, want %#v", i, *got, Tests[i].realServer) } } } func Test_toIPVSDestination(t *testing.T) { Tests := []struct { realServer RealServer ipvsDestination libipvs.Destination }{ { RealServer{ Address: netutils.ParseIPSloppy("1.2.3.4"), Port: 54321, Weight: 1, }, libipvs.Destination{ Port: 54321, ConnectionFlags: 0, Weight: 1, Address: netutils.ParseIPSloppy("1.2.3.4"), }, }, { RealServer{ Address: netutils.ParseIPSloppy("2002::cafe"), Port: 53, Weight: 1, }, libipvs.Destination{ Port: 53, ConnectionFlags: 0, Weight: 1, Address: netutils.ParseIPSloppy("2002::cafe"), }, }, } for i := range Tests { got, err := toIPVSDestination(&Tests[i].realServer) if err != nil { t.Errorf("case %d unexpected error: %v", i, err) } if !reflect.DeepEqual(*got, Tests[i].ipvsDestination) { t.Errorf("case %d failed to translate Destination - got %#v, want %#v", i, *got, Tests[i].ipvsDestination) } } } func Test_stringToProtocol(t *testing.T) { tests := []string{ "TCP", "UDP", "ICMP", "SCTP", } expected := []uint16{ uint16(syscall.IPPROTO_TCP), uint16(syscall.IPPROTO_UDP), uint16(0), uint16(syscall.IPPROTO_SCTP), } for i := range tests { got := stringToProtocol(tests[i]) if got != expected[i] { t.Errorf("stringToProtocol() failed - got %#v, want %#v", got, expected[i]) } } } func Test_protocolToString(t *testing.T) { tests := []Protocol{ syscall.IPPROTO_TCP, syscall.IPPROTO_UDP, Protocol(0), syscall.IPPROTO_SCTP, } expected := []string{ "TCP", "UDP", "", "SCTP", } for i := range tests { got := protocolToString(tests[i]) if got != expected[i] { t.Errorf("protocolToString() failed - got %#v, want %#v", got, expected[i]) } } }
thockin/kubernetes
pkg/util/ipvs/ipvs_linux_test.go
GO
apache-2.0
9,619
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ecs.model.transform; import java.util.Map; import java.util.Map.Entry; import com.amazonaws.services.ecs.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * Describe Task Definition Result JSON Unmarshaller */ public class DescribeTaskDefinitionResultJsonUnmarshaller implements Unmarshaller<DescribeTaskDefinitionResult, JsonUnmarshallerContext> { public DescribeTaskDefinitionResult unmarshall(JsonUnmarshallerContext context) throws Exception { DescribeTaskDefinitionResult describeTaskDefinitionResult = new DescribeTaskDefinitionResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) return null; while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("taskDefinition", targetDepth)) { context.nextToken(); describeTaskDefinitionResult.setTaskDefinition(TaskDefinitionJsonUnmarshaller.getInstance().unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return describeTaskDefinitionResult; } private static DescribeTaskDefinitionResultJsonUnmarshaller instance; public static DescribeTaskDefinitionResultJsonUnmarshaller getInstance() { if (instance == null) instance = new DescribeTaskDefinitionResultJsonUnmarshaller(); return instance; } }
akiradeveloper/aws-sdk-java
aws-java-sdk-ecs/src/main/java/com/amazonaws/services/ecs/model/transform/DescribeTaskDefinitionResultJsonUnmarshaller.java
Java
apache-2.0
2,776
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.dx.io.instructions; /** * A decoded Dalvik instruction which contains the payload for * a {@code packed-switch} instruction. */ public final class FillArrayDataPayloadDecodedInstruction extends DecodedInstruction { /** data array */ private final Object data; /** number of elements */ private final int size; /** element width */ private final int elementWidth; /** * Constructs an instance. This private instance doesn't check the * type of the data array. */ private FillArrayDataPayloadDecodedInstruction(InstructionCodec format, int opcode, Object data, int size, int elementWidth) { super(format, opcode, 0, null, 0, 0L); this.data = data; this.size = size; this.elementWidth = elementWidth; } /** * Constructs an instance. */ public FillArrayDataPayloadDecodedInstruction(InstructionCodec format, int opcode, byte[] data) { this(format, opcode, data, data.length, 1); } /** * Constructs an instance. */ public FillArrayDataPayloadDecodedInstruction(InstructionCodec format, int opcode, short[] data) { this(format, opcode, data, data.length, 2); } /** * Constructs an instance. */ public FillArrayDataPayloadDecodedInstruction(InstructionCodec format, int opcode, int[] data) { this(format, opcode, data, data.length, 4); } /** * Constructs an instance. */ public FillArrayDataPayloadDecodedInstruction(InstructionCodec format, int opcode, long[] data) { this(format, opcode, data, data.length, 8); } /** {@inheritDoc} */ public int getRegisterCount() { return 0; } public short getElementWidthUnit() { return (short) elementWidth; } public int getSize() { return size; } public Object getData() { return data; } /** {@inheritDoc} */ public DecodedInstruction withIndex(int newIndex) { throw new UnsupportedOperationException("no index in instruction"); } }
SeleniumHQ/buck
third-party/java/dx/src/com/android/dx/io/instructions/FillArrayDataPayloadDecodedInstruction.java
Java
apache-2.0
2,782
////////////////////////////////////////////////////////////////// // // lazy_thunk_tests.cpp // // Tests for thunk functions. // // /*============================================================================= Copyright (c) 2000-2003 Brian McNamara and Yannis Smaragdakis Copyright (c) 2001-2007 Joel de Guzman Copyright (c) 2015 John Fletcher Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) ==============================================================================*/ #include <iostream> #include <boost/phoenix/core.hpp> #include <boost/phoenix/function.hpp> #include <boost/shared_ptr.hpp> #include <boost/phoenix/function/lazy_prelude.hpp> #include <boost/detail/lightweight_test.hpp> using namespace boost::phoenix; using std::cout; using std::endl; int main() { using boost::phoenix::arg_names::arg1; using boost::phoenix::arg_names::arg2; BOOST_TEST( thunk1(inc,1)()() == 2); BOOST_TEST( thunk1(inc,arg1)(1)() == 2); BOOST_TEST( thunk2(plus,1,2)()() == 3); BOOST_TEST( thunk2(plus,arg1,arg2)(1,2)() == 3); list<int> l = enum_from_to(1,5); list<int> l4 = take(4,l)(); BOOST_TEST( foldl(plus,0,l4)() == 10); BOOST_TEST( thunk3(foldl,plus,0,l4)()() == 10); BOOST_TEST( thunk3(foldl,plus,arg1,l4)(0)() == 10); return boost::report_errors(); }
fceller/arangodb
3rdParty/boost/1.69.0/libs/phoenix/test/function/lazy_thunk_tests.cpp
C++
apache-2.0
1,466
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.kms.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.kms.AWSKMS#listKeyPolicies(ListKeyPoliciesRequest) ListKeyPolicies operation}. * <p> * Retrieves a list of policies attached to a key. * </p> * * @see com.amazonaws.services.kms.AWSKMS#listKeyPolicies(ListKeyPoliciesRequest) */ public class ListKeyPoliciesRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * A unique identifier for the customer master key. This value can be a * globally unique identifier, a fully specified ARN to either an alias * or a key, or an alias name prefixed by "alias/". <ul> <li>Key ARN * Example - * arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</li> * <li>Alias ARN Example - * arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</li> <li>Globally * Unique Key ID Example - 12345678-1234-1234-1234-123456789012</li> * <li>Alias Name Example - alias/MyAliasName</li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 256<br/> */ private String keyId; /** * Specify this parameter only when paginating results to indicate the * maximum number of policies you want listed in the response. If there * are additional policies beyond the maximum you specify, the * <code>Truncated</code> response element will be set to * <code>true.</code> * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 1000<br/> */ private Integer limit; /** * Use this parameter only when paginating results, and only in a * subsequent request after you've received a response where the results * are truncated. Set it to the value of the <code>NextMarker</code> in * the response you just received. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 320<br/> * <b>Pattern: </b>[&#92;u0020-&#92;u00FF]*<br/> */ private String marker; /** * A unique identifier for the customer master key. This value can be a * globally unique identifier, a fully specified ARN to either an alias * or a key, or an alias name prefixed by "alias/". <ul> <li>Key ARN * Example - * arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</li> * <li>Alias ARN Example - * arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</li> <li>Globally * Unique Key ID Example - 12345678-1234-1234-1234-123456789012</li> * <li>Alias Name Example - alias/MyAliasName</li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 256<br/> * * @return A unique identifier for the customer master key. This value can be a * globally unique identifier, a fully specified ARN to either an alias * or a key, or an alias name prefixed by "alias/". <ul> <li>Key ARN * Example - * arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</li> * <li>Alias ARN Example - * arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</li> <li>Globally * Unique Key ID Example - 12345678-1234-1234-1234-123456789012</li> * <li>Alias Name Example - alias/MyAliasName</li> </ul> */ public String getKeyId() { return keyId; } /** * A unique identifier for the customer master key. This value can be a * globally unique identifier, a fully specified ARN to either an alias * or a key, or an alias name prefixed by "alias/". <ul> <li>Key ARN * Example - * arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</li> * <li>Alias ARN Example - * arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</li> <li>Globally * Unique Key ID Example - 12345678-1234-1234-1234-123456789012</li> * <li>Alias Name Example - alias/MyAliasName</li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 256<br/> * * @param keyId A unique identifier for the customer master key. This value can be a * globally unique identifier, a fully specified ARN to either an alias * or a key, or an alias name prefixed by "alias/". <ul> <li>Key ARN * Example - * arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</li> * <li>Alias ARN Example - * arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</li> <li>Globally * Unique Key ID Example - 12345678-1234-1234-1234-123456789012</li> * <li>Alias Name Example - alias/MyAliasName</li> </ul> */ public void setKeyId(String keyId) { this.keyId = keyId; } /** * A unique identifier for the customer master key. This value can be a * globally unique identifier, a fully specified ARN to either an alias * or a key, or an alias name prefixed by "alias/". <ul> <li>Key ARN * Example - * arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</li> * <li>Alias ARN Example - * arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</li> <li>Globally * Unique Key ID Example - 12345678-1234-1234-1234-123456789012</li> * <li>Alias Name Example - alias/MyAliasName</li> </ul> * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 256<br/> * * @param keyId A unique identifier for the customer master key. This value can be a * globally unique identifier, a fully specified ARN to either an alias * or a key, or an alias name prefixed by "alias/". <ul> <li>Key ARN * Example - * arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012</li> * <li>Alias ARN Example - * arn:aws:kms:us-east-1:123456789012:alias/MyAliasName</li> <li>Globally * Unique Key ID Example - 12345678-1234-1234-1234-123456789012</li> * <li>Alias Name Example - alias/MyAliasName</li> </ul> * * @return A reference to this updated object so that method calls can be chained * together. */ public ListKeyPoliciesRequest withKeyId(String keyId) { this.keyId = keyId; return this; } /** * Specify this parameter only when paginating results to indicate the * maximum number of policies you want listed in the response. If there * are additional policies beyond the maximum you specify, the * <code>Truncated</code> response element will be set to * <code>true.</code> * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 1000<br/> * * @return Specify this parameter only when paginating results to indicate the * maximum number of policies you want listed in the response. If there * are additional policies beyond the maximum you specify, the * <code>Truncated</code> response element will be set to * <code>true.</code> */ public Integer getLimit() { return limit; } /** * Specify this parameter only when paginating results to indicate the * maximum number of policies you want listed in the response. If there * are additional policies beyond the maximum you specify, the * <code>Truncated</code> response element will be set to * <code>true.</code> * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 1000<br/> * * @param limit Specify this parameter only when paginating results to indicate the * maximum number of policies you want listed in the response. If there * are additional policies beyond the maximum you specify, the * <code>Truncated</code> response element will be set to * <code>true.</code> */ public void setLimit(Integer limit) { this.limit = limit; } /** * Specify this parameter only when paginating results to indicate the * maximum number of policies you want listed in the response. If there * are additional policies beyond the maximum you specify, the * <code>Truncated</code> response element will be set to * <code>true.</code> * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 1000<br/> * * @param limit Specify this parameter only when paginating results to indicate the * maximum number of policies you want listed in the response. If there * are additional policies beyond the maximum you specify, the * <code>Truncated</code> response element will be set to * <code>true.</code> * * @return A reference to this updated object so that method calls can be chained * together. */ public ListKeyPoliciesRequest withLimit(Integer limit) { this.limit = limit; return this; } /** * Use this parameter only when paginating results, and only in a * subsequent request after you've received a response where the results * are truncated. Set it to the value of the <code>NextMarker</code> in * the response you just received. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 320<br/> * <b>Pattern: </b>[&#92;u0020-&#92;u00FF]*<br/> * * @return Use this parameter only when paginating results, and only in a * subsequent request after you've received a response where the results * are truncated. Set it to the value of the <code>NextMarker</code> in * the response you just received. */ public String getMarker() { return marker; } /** * Use this parameter only when paginating results, and only in a * subsequent request after you've received a response where the results * are truncated. Set it to the value of the <code>NextMarker</code> in * the response you just received. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 320<br/> * <b>Pattern: </b>[&#92;u0020-&#92;u00FF]*<br/> * * @param marker Use this parameter only when paginating results, and only in a * subsequent request after you've received a response where the results * are truncated. Set it to the value of the <code>NextMarker</code> in * the response you just received. */ public void setMarker(String marker) { this.marker = marker; } /** * Use this parameter only when paginating results, and only in a * subsequent request after you've received a response where the results * are truncated. Set it to the value of the <code>NextMarker</code> in * the response you just received. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 320<br/> * <b>Pattern: </b>[&#92;u0020-&#92;u00FF]*<br/> * * @param marker Use this parameter only when paginating results, and only in a * subsequent request after you've received a response where the results * are truncated. Set it to the value of the <code>NextMarker</code> in * the response you just received. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListKeyPoliciesRequest withMarker(String marker) { this.marker = marker; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getKeyId() != null) sb.append("KeyId: " + getKeyId() + ","); if (getLimit() != null) sb.append("Limit: " + getLimit() + ","); if (getMarker() != null) sb.append("Marker: " + getMarker() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getKeyId() == null) ? 0 : getKeyId().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListKeyPoliciesRequest == false) return false; ListKeyPoliciesRequest other = (ListKeyPoliciesRequest)obj; if (other.getKeyId() == null ^ this.getKeyId() == null) return false; if (other.getKeyId() != null && other.getKeyId().equals(this.getKeyId()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; return true; } @Override public ListKeyPoliciesRequest clone() { return (ListKeyPoliciesRequest) super.clone(); } }
omnifone/aws-sdk-java
aws-java-sdk-kms/src/main/java/com/amazonaws/services/kms/model/ListKeyPoliciesRequest.java
Java
apache-2.0
14,551
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.network.util; import java.util.LinkedList; import com.google.common.base.Preconditions; import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; /** * A customized frame decoder that allows intercepting raw data. * <p> * This behaves like Netty's frame decoder (with hard coded parameters that match this library's * needs), except it allows an interceptor to be installed to read data directly before it's * framed. * <p> * Unlike Netty's frame decoder, each frame is dispatched to child handlers as soon as it's * decoded, instead of building as many frames as the current buffer allows and dispatching * all of them. This allows a child handler to install an interceptor if needed. * <p> * If an interceptor is installed, framing stops, and data is instead fed directly to the * interceptor. When the interceptor indicates that it doesn't need to read any more data, * framing resumes. Interceptors should not hold references to the data buffers provided * to their handle() method. */ public class TransportFrameDecoder extends ChannelInboundHandlerAdapter { public static final String HANDLER_NAME = "frameDecoder"; private static final int LENGTH_SIZE = 8; private static final int MAX_FRAME_SIZE = Integer.MAX_VALUE; private static final int UNKNOWN_FRAME_SIZE = -1; private final LinkedList<ByteBuf> buffers = new LinkedList<>(); private final ByteBuf frameLenBuf = Unpooled.buffer(LENGTH_SIZE, LENGTH_SIZE); private long totalSize = 0; private long nextFrameSize = UNKNOWN_FRAME_SIZE; private volatile Interceptor interceptor; @Override public void channelRead(ChannelHandlerContext ctx, Object data) throws Exception { ByteBuf in = (ByteBuf) data; buffers.add(in); totalSize += in.readableBytes(); while (!buffers.isEmpty()) { // First, feed the interceptor, and if it's still, active, try again. if (interceptor != null) { ByteBuf first = buffers.getFirst(); int available = first.readableBytes(); if (feedInterceptor(first)) { assert !first.isReadable() : "Interceptor still active but buffer has data."; } int read = available - first.readableBytes(); if (read == available) { buffers.removeFirst().release(); } totalSize -= read; } else { // Interceptor is not active, so try to decode one frame. ByteBuf frame = decodeNext(); if (frame == null) { break; } ctx.fireChannelRead(frame); } } } private long decodeFrameSize() { if (nextFrameSize != UNKNOWN_FRAME_SIZE || totalSize < LENGTH_SIZE) { return nextFrameSize; } // We know there's enough data. If the first buffer contains all the data, great. Otherwise, // hold the bytes for the frame length in a composite buffer until we have enough data to read // the frame size. Normally, it should be rare to need more than one buffer to read the frame // size. ByteBuf first = buffers.getFirst(); if (first.readableBytes() >= LENGTH_SIZE) { nextFrameSize = first.readLong() - LENGTH_SIZE; totalSize -= LENGTH_SIZE; if (!first.isReadable()) { buffers.removeFirst().release(); } return nextFrameSize; } while (frameLenBuf.readableBytes() < LENGTH_SIZE) { ByteBuf next = buffers.getFirst(); int toRead = Math.min(next.readableBytes(), LENGTH_SIZE - frameLenBuf.readableBytes()); frameLenBuf.writeBytes(next, toRead); if (!next.isReadable()) { buffers.removeFirst().release(); } } nextFrameSize = frameLenBuf.readLong() - LENGTH_SIZE; totalSize -= LENGTH_SIZE; frameLenBuf.clear(); return nextFrameSize; } private ByteBuf decodeNext() { long frameSize = decodeFrameSize(); if (frameSize == UNKNOWN_FRAME_SIZE || totalSize < frameSize) { return null; } // Reset size for next frame. nextFrameSize = UNKNOWN_FRAME_SIZE; Preconditions.checkArgument(frameSize < MAX_FRAME_SIZE, "Too large frame: %s", frameSize); Preconditions.checkArgument(frameSize > 0, "Frame length should be positive: %s", frameSize); // If the first buffer holds the entire frame, return it. int remaining = (int) frameSize; if (buffers.getFirst().readableBytes() >= remaining) { return nextBufferForFrame(remaining); } // Otherwise, create a composite buffer. CompositeByteBuf frame = buffers.getFirst().alloc().compositeBuffer(Integer.MAX_VALUE); while (remaining > 0) { ByteBuf next = nextBufferForFrame(remaining); remaining -= next.readableBytes(); frame.addComponent(next).writerIndex(frame.writerIndex() + next.readableBytes()); } assert remaining == 0; return frame; } /** * Takes the first buffer in the internal list, and either adjust it to fit in the frame * (by taking a slice out of it) or remove it from the internal list. */ private ByteBuf nextBufferForFrame(int bytesToRead) { ByteBuf buf = buffers.getFirst(); ByteBuf frame; if (buf.readableBytes() > bytesToRead) { frame = buf.retain().readSlice(bytesToRead); totalSize -= bytesToRead; } else { frame = buf; buffers.removeFirst(); totalSize -= frame.readableBytes(); } return frame; } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { for (ByteBuf b : buffers) { b.release(); } if (interceptor != null) { interceptor.channelInactive(); } frameLenBuf.release(); super.channelInactive(ctx); } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { if (interceptor != null) { interceptor.exceptionCaught(cause); } super.exceptionCaught(ctx, cause); } public void setInterceptor(Interceptor interceptor) { Preconditions.checkState(this.interceptor == null, "Already have an interceptor."); this.interceptor = interceptor; } /** * @return Whether the interceptor is still active after processing the data. */ private boolean feedInterceptor(ByteBuf buf) throws Exception { if (interceptor != null && !interceptor.handle(buf)) { interceptor = null; } return interceptor != null; } public interface Interceptor { /** * Handles data received from the remote end. * * @param data Buffer containing data. * @return "true" if the interceptor expects more data, "false" to uninstall the interceptor. */ boolean handle(ByteBuf data) throws Exception; /** Called if an exception is thrown in the channel pipeline. */ void exceptionCaught(Throwable cause) throws Exception; /** Called if the channel is closed and the interceptor is still installed. */ void channelInactive() throws Exception; } }
brad-kaiser/spark
common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
Java
apache-2.0
7,863
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package testing import ( "fmt" "sync" "time" "github.com/golang/glog" "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/kubernetes/fake" core "k8s.io/client-go/testing" "k8s.io/kubernetes/pkg/volume" "k8s.io/kubernetes/pkg/volume/util" ) const TestPluginName = "kubernetes.io/testPlugin" // GetTestVolumeSpec returns a test volume spec func GetTestVolumeSpec(volumeName string, diskName v1.UniqueVolumeName) *volume.Spec { return &volume.Spec{ Volume: &v1.Volume{ Name: volumeName, VolumeSource: v1.VolumeSource{ GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{ PDName: string(diskName), FSType: "fake", ReadOnly: false, }, }, }, PersistentVolume: &v1.PersistentVolume{ Spec: v1.PersistentVolumeSpec{ AccessModes: []v1.PersistentVolumeAccessMode{ v1.ReadWriteOnce, }, }, }, } } var extraPods *v1.PodList func CreateTestClient() *fake.Clientset { fakeClient := &fake.Clientset{} extraPods = &v1.PodList{} fakeClient.AddReactor("list", "pods", func(action core.Action) (handled bool, ret runtime.Object, err error) { obj := &v1.PodList{} podNamePrefix := "mypod" namespace := "mynamespace" for i := 0; i < 5; i++ { podName := fmt.Sprintf("%s-%d", podNamePrefix, i) pod := v1.Pod{ Status: v1.PodStatus{ Phase: v1.PodRunning, }, ObjectMeta: metav1.ObjectMeta{ Name: podName, UID: types.UID(podName), Namespace: namespace, Labels: map[string]string{ "name": podName, }, }, Spec: v1.PodSpec{ Containers: []v1.Container{ { Name: "containerName", Image: "containerImage", VolumeMounts: []v1.VolumeMount{ { Name: "volumeMountName", ReadOnly: false, MountPath: "/mnt", }, }, }, }, Volumes: []v1.Volume{ { Name: "volumeName", VolumeSource: v1.VolumeSource{ GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{ PDName: "pdName", FSType: "ext4", ReadOnly: false, }, }, }, }, NodeName: "mynode", }, } obj.Items = append(obj.Items, pod) } for _, pod := range extraPods.Items { obj.Items = append(obj.Items, pod) } return true, obj, nil }) fakeClient.AddReactor("create", "pods", func(action core.Action) (handled bool, ret runtime.Object, err error) { createAction := action.(core.CreateAction) pod := createAction.GetObject().(*v1.Pod) extraPods.Items = append(extraPods.Items, *pod) return true, createAction.GetObject(), nil }) fakeClient.AddReactor("list", "nodes", func(action core.Action) (handled bool, ret runtime.Object, err error) { obj := &v1.NodeList{} nodeNamePrefix := "mynode" for i := 0; i < 5; i++ { var nodeName string if i != 0 { nodeName = fmt.Sprintf("%s-%d", nodeNamePrefix, i) } else { // We want also the "mynode" node since all the testing pods live there nodeName = nodeNamePrefix } node := v1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: nodeName, Labels: map[string]string{ "name": nodeName, }, Annotations: map[string]string{ util.ControllerManagedAttachAnnotation: "true", }, }, Status: v1.NodeStatus{ VolumesAttached: []v1.AttachedVolume{ { Name: TestPluginName + "/lostVolumeName", DevicePath: "fake/path", }, }, }, Spec: v1.NodeSpec{ExternalID: string(nodeName)}, } obj.Items = append(obj.Items, node) } return true, obj, nil }) fakeWatch := watch.NewFake() fakeClient.AddWatchReactor("*", core.DefaultWatchReactor(fakeWatch, nil)) return fakeClient } // NewPod returns a test pod object func NewPod(uid, name string) *v1.Pod { return &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ UID: types.UID(uid), Name: name, Namespace: name, }, } } // NewPod returns a test pod object func NewPodWithVolume(podName, volumeName, nodeName string) *v1.Pod { return &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ UID: types.UID(podName), Name: podName, Namespace: "mynamespace", Labels: map[string]string{ "name": podName, }, }, Spec: v1.PodSpec{ Containers: []v1.Container{ { Name: "containerName", Image: "containerImage", VolumeMounts: []v1.VolumeMount{ { Name: "volumeMountName", ReadOnly: false, MountPath: "/mnt", }, }, }, }, Volumes: []v1.Volume{ { Name: volumeName, VolumeSource: v1.VolumeSource{ GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{ PDName: "pdName", FSType: "ext4", ReadOnly: false, }, }, }, }, NodeName: nodeName, }, } } type TestPlugin struct { ErrorEncountered bool attachedVolumeMap map[string][]string detachedVolumeMap map[string][]string pluginLock *sync.RWMutex } func (plugin *TestPlugin) Init(host volume.VolumeHost) error { return nil } func (plugin *TestPlugin) GetPluginName() string { return TestPluginName } func (plugin *TestPlugin) GetVolumeName(spec *volume.Spec) (string, error) { plugin.pluginLock.Lock() defer plugin.pluginLock.Unlock() if spec == nil { glog.Errorf("GetVolumeName called with nil volume spec") plugin.ErrorEncountered = true } return spec.Name(), nil } func (plugin *TestPlugin) CanSupport(spec *volume.Spec) bool { plugin.pluginLock.Lock() defer plugin.pluginLock.Unlock() if spec == nil { glog.Errorf("CanSupport called with nil volume spec") plugin.ErrorEncountered = true } return true } func (plugin *TestPlugin) RequiresRemount() bool { return false } func (plugin *TestPlugin) NewMounter(spec *volume.Spec, podRef *v1.Pod, opts volume.VolumeOptions) (volume.Mounter, error) { plugin.pluginLock.Lock() defer plugin.pluginLock.Unlock() if spec == nil { glog.Errorf("NewMounter called with nil volume spec") plugin.ErrorEncountered = true } return nil, nil } func (plugin *TestPlugin) NewUnmounter(name string, podUID types.UID) (volume.Unmounter, error) { return nil, nil } func (plugin *TestPlugin) ConstructVolumeSpec(volumeName, mountPath string) (*volume.Spec, error) { fakeVolume := &v1.Volume{ Name: volumeName, VolumeSource: v1.VolumeSource{ GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{ PDName: "pdName", FSType: "ext4", ReadOnly: false, }, }, } return volume.NewSpecFromVolume(fakeVolume), nil } func (plugin *TestPlugin) NewAttacher() (volume.Attacher, error) { attacher := testPluginAttacher{ ErrorEncountered: &plugin.ErrorEncountered, attachedVolumeMap: plugin.attachedVolumeMap, pluginLock: plugin.pluginLock, } return &attacher, nil } func (plugin *TestPlugin) NewDetacher() (volume.Detacher, error) { detacher := testPluginDetacher{ detachedVolumeMap: plugin.detachedVolumeMap, pluginLock: plugin.pluginLock, } return &detacher, nil } func (plugin *TestPlugin) GetDeviceMountRefs(deviceMountPath string) ([]string, error) { return []string{}, nil } func (plugin *TestPlugin) SupportsMountOption() bool { return false } func (plugin *TestPlugin) SupportsBulkVolumeVerification() bool { return false } func (plugin *TestPlugin) GetErrorEncountered() bool { plugin.pluginLock.RLock() defer plugin.pluginLock.RUnlock() return plugin.ErrorEncountered } func (plugin *TestPlugin) GetAttachedVolumes() map[string][]string { plugin.pluginLock.RLock() defer plugin.pluginLock.RUnlock() ret := make(map[string][]string) for nodeName, volumeList := range plugin.attachedVolumeMap { ret[nodeName] = make([]string, len(volumeList)) copy(ret[nodeName], volumeList) } return ret } func (plugin *TestPlugin) GetDetachedVolumes() map[string][]string { plugin.pluginLock.RLock() defer plugin.pluginLock.RUnlock() ret := make(map[string][]string) for nodeName, volumeList := range plugin.detachedVolumeMap { ret[nodeName] = make([]string, len(volumeList)) copy(ret[nodeName], volumeList) } return ret } func CreateTestPlugin() []volume.VolumePlugin { attachedVolumes := make(map[string][]string) detachedVolumes := make(map[string][]string) return []volume.VolumePlugin{&TestPlugin{ ErrorEncountered: false, attachedVolumeMap: attachedVolumes, detachedVolumeMap: detachedVolumes, pluginLock: &sync.RWMutex{}, }} } // Attacher type testPluginAttacher struct { ErrorEncountered *bool attachedVolumeMap map[string][]string pluginLock *sync.RWMutex } func (attacher *testPluginAttacher) Attach(spec *volume.Spec, nodeName types.NodeName) (string, error) { attacher.pluginLock.Lock() defer attacher.pluginLock.Unlock() if spec == nil { *attacher.ErrorEncountered = true glog.Errorf("Attach called with nil volume spec") return "", fmt.Errorf("Attach called with nil volume spec") } attacher.attachedVolumeMap[string(nodeName)] = append(attacher.attachedVolumeMap[string(nodeName)], spec.Name()) return spec.Name(), nil } func (attacher *testPluginAttacher) VolumesAreAttached(specs []*volume.Spec, nodeName types.NodeName) (map[*volume.Spec]bool, error) { return nil, nil } func (attacher *testPluginAttacher) WaitForAttach(spec *volume.Spec, devicePath string, pod *v1.Pod, timeout time.Duration) (string, error) { attacher.pluginLock.Lock() defer attacher.pluginLock.Unlock() if spec == nil { *attacher.ErrorEncountered = true glog.Errorf("WaitForAttach called with nil volume spec") return "", fmt.Errorf("WaitForAttach called with nil volume spec") } fakePath := fmt.Sprintf("%s/%s", devicePath, spec.Name()) return fakePath, nil } func (attacher *testPluginAttacher) GetDeviceMountPath(spec *volume.Spec) (string, error) { attacher.pluginLock.Lock() defer attacher.pluginLock.Unlock() if spec == nil { *attacher.ErrorEncountered = true glog.Errorf("GetDeviceMountPath called with nil volume spec") return "", fmt.Errorf("GetDeviceMountPath called with nil volume spec") } return "", nil } func (attacher *testPluginAttacher) MountDevice(spec *volume.Spec, devicePath string, deviceMountPath string) error { attacher.pluginLock.Lock() defer attacher.pluginLock.Unlock() if spec == nil { *attacher.ErrorEncountered = true glog.Errorf("MountDevice called with nil volume spec") return fmt.Errorf("MountDevice called with nil volume spec") } return nil } // Detacher type testPluginDetacher struct { detachedVolumeMap map[string][]string pluginLock *sync.RWMutex } func (detacher *testPluginDetacher) Detach(volumeName string, nodeName types.NodeName) error { detacher.pluginLock.Lock() defer detacher.pluginLock.Unlock() detacher.detachedVolumeMap[string(nodeName)] = append(detacher.detachedVolumeMap[string(nodeName)], volumeName) return nil } func (detacher *testPluginDetacher) UnmountDevice(deviceMountPath string) error { return nil }
gaocegege/kubernetes
pkg/controller/volume/attachdetach/testing/testvolumespec.go
GO
apache-2.0
11,617
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <meta http-equiv="X-UA-Compatible" content="IE=9"/> <title>Boost.Locale: Class Members</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <link href="doxygen.css" rel="stylesheet" type="text/css" /> <link href="navtree.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="resize.js"></script> <script type="text/javascript" src="navtree.js"></script> <script type="text/javascript"> $(document).ready(initResizable); </script> </head> <body> <div id="top"><!-- do not remove this div! --> <div id="titlearea"> <table cellspacing="0" cellpadding="0"> <tbody> <tr style="height: 56px;"> <td id="projectlogo"><img alt="Logo" src="boost-small.png"/></td> <td style="padding-left: 0.5em;"> <div id="projectname">Boost.Locale </div> </td> </tr> </tbody> </table> </div> <!-- Generated by Doxygen 1.7.6.1 --> <div id="navrow1" class="tabs"> <ul class="tablist"> <li><a href="index.html"><span>Main&#160;Page</span></a></li> <li><a href="modules.html"><span>Modules</span></a></li> <li><a href="namespaces.html"><span>Namespaces</span></a></li> <li class="current"><a href="annotated.html"><span>Classes</span></a></li> <li><a href="files.html"><span>Files</span></a></li> <li><a href="examples.html"><span>Examples</span></a></li> </ul> </div> <div id="navrow2" class="tabs2"> <ul class="tablist"> <li><a href="annotated.html"><span>Class&#160;List</span></a></li> <li><a href="hierarchy.html"><span>Class&#160;Hierarchy</span></a></li> <li class="current"><a href="functions.html"><span>Class&#160;Members</span></a></li> </ul> </div> <div id="navrow3" class="tabs2"> <ul class="tablist"> <li class="current"><a href="functions.html"><span>All</span></a></li> <li><a href="functions_func.html"><span>Functions</span></a></li> <li><a href="functions_vars.html"><span>Variables</span></a></li> <li><a href="functions_type.html"><span>Typedefs</span></a></li> <li><a href="functions_enum.html"><span>Enumerations</span></a></li> <li><a href="functions_eval.html"><span>Enumerator</span></a></li> </ul> </div> <div id="navrow4" class="tabs3"> <ul class="tablist"> <li><a href="functions.html#index_a"><span>a</span></a></li> <li><a href="functions_0x62.html#index_b"><span>b</span></a></li> <li><a href="functions_0x63.html#index_c"><span>c</span></a></li> <li><a href="functions_0x64.html#index_d"><span>d</span></a></li> <li><a href="functions_0x65.html#index_e"><span>e</span></a></li> <li><a href="functions_0x66.html#index_f"><span>f</span></a></li> <li><a href="functions_0x67.html#index_g"><span>g</span></a></li> <li><a href="functions_0x68.html#index_h"><span>h</span></a></li> <li><a href="functions_0x69.html#index_i"><span>i</span></a></li> <li><a href="functions_0x6c.html#index_l"><span>l</span></a></li> <li><a href="functions_0x6d.html#index_m"><span>m</span></a></li> <li><a href="functions_0x6e.html#index_n"><span>n</span></a></li> <li><a href="functions_0x6f.html#index_o"><span>o</span></a></li> <li><a href="functions_0x70.html#index_p"><span>p</span></a></li> <li><a href="functions_0x71.html#index_q"><span>q</span></a></li> <li><a href="functions_0x72.html#index_r"><span>r</span></a></li> <li><a href="functions_0x73.html#index_s"><span>s</span></a></li> <li class="current"><a href="functions_0x74.html#index_t"><span>t</span></a></li> <li><a href="functions_0x75.html#index_u"><span>u</span></a></li> <li><a href="functions_0x76.html#index_v"><span>v</span></a></li> <li><a href="functions_0x77.html#index_w"><span>w</span></a></li> <li><a href="functions_0x7e.html#index_0x7e"><span>~</span></a></li> </ul> </div> </div> <div id="side-nav" class="ui-resizable side-nav-resizable"> <div id="nav-tree"> <div id="nav-tree-contents"> </div> </div> <div id="splitbar" style="-moz-user-select:none;" class="ui-resizable-handle"> </div> </div> <script type="text/javascript"> initNavTree('functions_0x74.html',''); </script> <div id="doc-content"> <div class="contents"> <div class="textblock">Here is a list of all documented class members with links to the class documentation for each member:</div> <h3><a class="anchor" id="index_t"></a>- t -</h3><ul> <li>tertiary : <a class="el" href="classboost_1_1locale_1_1collator__base.html#a73c12de809733273304fef7f0af28b22a3a852752e9663b7b3340c435d0cfe36a">boost::locale::collator_base</a> </li> <li>time() : <a class="el" href="classboost_1_1locale_1_1date__time.html#a17aa2b54462ebcf1860f8e4db9f7868e">boost::locale::date_time</a> </li> <li>time_flags() : <a class="el" href="classboost_1_1locale_1_1ios__info.html#a06bdad5c9b11e57c16ad623776ce5096">boost::locale::ios_info</a> </li> <li>time_zone() : <a class="el" href="classboost_1_1locale_1_1ios__info.html#a3f140278815b521f1568c52d0a9fea11">boost::locale::ios_info</a> </li> <li>title_case : <a class="el" href="classboost_1_1locale_1_1converter__base.html#a726bc2bbcbb6f1e550cca14163fb669bac136b257286085de7bd7eb4a7876dfa7">boost::locale::converter_base</a> </li> <li>to_unicode() : <a class="el" href="classboost_1_1locale_1_1util_1_1base__converter.html#a27181b314e09f62ae9ea8fcd30d4e7c4">boost::locale::util::base_converter</a> </li> <li>trail_length() : <a class="el" href="structboost_1_1locale_1_1utf_1_1utf__traits.html#a8a6b72ba87a817652f522018df51a9a7">boost::locale::utf::utf_traits&lt; CharType, size &gt;</a> </li> <li>transform() : <a class="el" href="classboost_1_1locale_1_1collator.html#a8dc6443fb193616332ca50f207a9b189">boost::locale::collator&lt; CharType &gt;</a> </li> <li>type : <a class="el" href="structboost_1_1locale_1_1date__time__period.html#aa6511600eb5264c8597f700668e9c628">boost::locale::date_time_period</a> </li> </ul> </div><!-- contents --> </div> <div id="nav-path" class="navpath"> <ul> <li class="footer"> &copy; Copyright 2009-2012 Artyom Beilis, Distributed under the <a href="http://www.boost.org/LICENSE_1_0.txt">Boost Software License</a>, Version 1.0. </li> </ul> </div> </body> </html>
biospi/seamass-windeps
src/boost_1_57_0/libs/locale/doc/html/functions_0x74.html
HTML
apache-2.0
6,553
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.kinesis.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import static com.amazonaws.util.StringUtils.COMMA_SEPARATOR; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.kinesis.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.json.*; /** * List Tags For Stream Request Marshaller */ public class ListTagsForStreamRequestMarshaller implements Marshaller<Request<ListTagsForStreamRequest>, ListTagsForStreamRequest> { public Request<ListTagsForStreamRequest> marshall(ListTagsForStreamRequest listTagsForStreamRequest) { if (listTagsForStreamRequest == null) { throw new AmazonClientException("Invalid argument passed to marshall(...)"); } Request<ListTagsForStreamRequest> request = new DefaultRequest<ListTagsForStreamRequest>(listTagsForStreamRequest, "AmazonKinesis"); String target = "Kinesis_20131202.ListTagsForStream"; request.addHeader("X-Amz-Target", target); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { StringWriter stringWriter = new StringWriter(); JSONWriter jsonWriter = new JSONWriter(stringWriter); jsonWriter.object(); if (listTagsForStreamRequest.getStreamName() != null) { jsonWriter.key("StreamName").value(listTagsForStreamRequest.getStreamName()); } if (listTagsForStreamRequest.getExclusiveStartTagKey() != null) { jsonWriter.key("ExclusiveStartTagKey").value(listTagsForStreamRequest.getExclusiveStartTagKey()); } if (listTagsForStreamRequest.getLimit() != null) { jsonWriter.key("Limit").value(listTagsForStreamRequest.getLimit()); } jsonWriter.endObject(); String snippet = stringWriter.toString(); byte[] content = snippet.getBytes(UTF8); request.setContent(new StringInputStream(snippet)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", "application/x-amz-json-1.1"); } catch(Throwable t) { throw new AmazonClientException("Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
mahaliachante/aws-sdk-java
aws-java-sdk-kinesis/src/main/java/com/amazonaws/services/kinesis/model/transform/ListTagsForStreamRequestMarshaller.java
Java
apache-2.0
3,504
// Protocol Buffers for Go with Gadgets // // Copyright (c) 2013, The GoGo Authors. All rights reserved. // http://github.com/gogo/protobuf // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package proto import ( "fmt" "reflect" ) func (tm *TextMarshaler) writeEnum(w *textWriter, v reflect.Value, props *Properties) error { m, ok := enumStringMaps[props.Enum] if !ok { if err := tm.writeAny(w, v, props); err != nil { return err } } key := int32(0) if v.Kind() == reflect.Ptr { key = int32(v.Elem().Int()) } else { key = int32(v.Int()) } s, ok := m[key] if !ok { if err := tm.writeAny(w, v, props); err != nil { return err } } _, err := fmt.Fprint(w, s) return err }
xiaozhu36/terraform-provider
vendor/github.com/gogo/protobuf/proto/text_gogo.go
GO
apache-2.0
1,960
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.tableexists; import java.util.Arrays; import java.util.List; import org.junit.ClassRule; import org.pentaho.di.job.entry.loadSave.JobEntryLoadSaveTestSupport; import org.pentaho.di.junit.rules.RestorePDIEngineEnvironment; public class JobEntryTableExistsLoadSaveTest extends JobEntryLoadSaveTestSupport<JobEntryTableExists> { @ClassRule public static RestorePDIEngineEnvironment env = new RestorePDIEngineEnvironment(); @Override protected Class<JobEntryTableExists> getJobEntryClass() { return JobEntryTableExists.class; } @Override protected List<String> listCommonAttributes() { return Arrays.asList( new String[] { "tablename", "schemaname", "database" } ); } }
mbatchelor/pentaho-kettle
engine/src/test/java/org/pentaho/di/job/entries/tableexists/JobEntryTableExistsLoadSaveTest.java
Java
apache-2.0
1,643
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.codepipeline.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import static com.amazonaws.util.StringUtils.COMMA_SEPARATOR; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.codepipeline.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.json.*; /** * GetPipelineRequest Marshaller */ public class GetPipelineRequestMarshaller implements Marshaller<Request<GetPipelineRequest>, GetPipelineRequest> { public Request<GetPipelineRequest> marshall( GetPipelineRequest getPipelineRequest) { if (getPipelineRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<GetPipelineRequest> request = new DefaultRequest<GetPipelineRequest>( getPipelineRequest, "AWSCodePipeline"); request.addHeader("X-Amz-Target", "CodePipeline_20150709.GetPipeline"); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { StringWriter stringWriter = new StringWriter(); JSONWriter jsonWriter = new JSONWriter(stringWriter); jsonWriter.object(); if (getPipelineRequest.getName() != null) { jsonWriter.key("name").value(getPipelineRequest.getName()); } if (getPipelineRequest.getVersion() != null) { jsonWriter.key("version") .value(getPipelineRequest.getVersion()); } jsonWriter.endObject(); String snippet = stringWriter.toString(); byte[] content = snippet.getBytes(UTF8); request.setContent(new StringInputStream(snippet)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", "application/x-amz-json-1.1"); } catch (Throwable t) { throw new AmazonClientException( "Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
xuzha/aws-sdk-java
aws-java-sdk-codepipeline/src/main/java/com/amazonaws/services/codepipeline/model/transform/GetPipelineRequestMarshaller.java
Java
apache-2.0
3,301
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.codepipeline.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import static com.amazonaws.util.StringUtils.COMMA_SEPARATOR; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.codepipeline.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.json.*; /** * UpdatePipelineRequest Marshaller */ public class UpdatePipelineRequestMarshaller implements Marshaller<Request<UpdatePipelineRequest>, UpdatePipelineRequest> { public Request<UpdatePipelineRequest> marshall( UpdatePipelineRequest updatePipelineRequest) { if (updatePipelineRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<UpdatePipelineRequest> request = new DefaultRequest<UpdatePipelineRequest>( updatePipelineRequest, "AWSCodePipeline"); request.addHeader("X-Amz-Target", "CodePipeline_20150709.UpdatePipeline"); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { StringWriter stringWriter = new StringWriter(); JSONWriter jsonWriter = new JSONWriter(stringWriter); jsonWriter.object(); if (updatePipelineRequest.getPipeline() != null) { jsonWriter.key("pipeline"); PipelineDeclarationJsonMarshaller.getInstance().marshall( updatePipelineRequest.getPipeline(), jsonWriter); } jsonWriter.endObject(); String snippet = stringWriter.toString(); byte[] content = snippet.getBytes(UTF8); request.setContent(new StringInputStream(snippet)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", "application/x-amz-json-1.1"); } catch (Throwable t) { throw new AmazonClientException( "Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
malti1yadav/aws-sdk-java
aws-java-sdk-codepipeline/src/main/java/com/amazonaws/services/codepipeline/model/transform/UpdatePipelineRequestMarshaller.java
Java
apache-2.0
3,295
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.cluster.repositories.put; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; /** * Register repository request. * <p/> * Registers a repository with given name, type and settings. If the repository with the same name already * exists in the cluster, the new repository will replace the existing repository. */ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryRequest> { private String name; private String type; private boolean verify = true; private Settings settings = EMPTY_SETTINGS; public PutRepositoryRequest() { } /** * Constructs a new put repository request with the provided name. */ public PutRepositoryRequest(String name) { this.name = name; } @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (name == null) { validationException = addValidationError("name is missing", validationException); } if (type == null) { validationException = addValidationError("type is missing", validationException); } return validationException; } /** * Sets the name of the repository. * * @param name repository name */ public PutRepositoryRequest name(String name) { this.name = name; return this; } /** * The name of the repository. * * @return repository name */ public String name() { return this.name; } /** * The type of the repository * <p/> * <ul> * <li>"fs" - shared filesystem repository</li> * </ul> * * @param type repository type * @return this request */ public PutRepositoryRequest type(String type) { this.type = type; return this; } /** * Returns repository type * * @return repository type */ public String type() { return this.type; } /** * Sets the repository settings * * @param settings repository settings * @return this request */ public PutRepositoryRequest settings(Settings settings) { this.settings = settings; return this; } /** * Sets the repository settings * * @param settings repository settings * @return this request */ public PutRepositoryRequest settings(Settings.Builder settings) { this.settings = settings.build(); return this; } /** * Sets the repository settings. * * @param source repository settings in json, yaml or properties format * @return this request */ public PutRepositoryRequest settings(String source) { this.settings = Settings.settingsBuilder().loadFromSource(source).build(); return this; } /** * Sets the repository settings. * * @param source repository settings * @return this request */ public PutRepositoryRequest settings(Map<String, Object> source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); settings(builder.string()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } return this; } /** * Returns repository settings * * @return repository settings */ public Settings settings() { return this.settings; } /** * Sets whether or not the repository should be verified after creation */ public PutRepositoryRequest verify(boolean verify) { this.verify = verify; return this; } /** * Returns true if repository should be verified after creation */ public boolean verify() { return this.verify; } /** * Parses repository definition. * * @param repositoryDefinition repository definition */ public PutRepositoryRequest source(XContentBuilder repositoryDefinition) { return source(repositoryDefinition.bytes()); } /** * Parses repository definition. * * @param repositoryDefinition repository definition */ public PutRepositoryRequest source(Map repositoryDefinition) { Map<String, Object> source = repositoryDefinition; for (Map.Entry<String, Object> entry : source.entrySet()) { String name = entry.getKey(); if (name.equals("type")) { type(entry.getValue().toString()); } else if (name.equals("settings")) { if (!(entry.getValue() instanceof Map)) { throw new IllegalArgumentException("Malformed settings section, should include an inner object"); } settings((Map<String, Object>) entry.getValue()); } } return this; } /** * Parses repository definition. * JSON, Smile and YAML formats are supported * * @param repositoryDefinition repository definition */ public PutRepositoryRequest source(String repositoryDefinition) { try (XContentParser parser = XContentFactory.xContent(repositoryDefinition).createParser(repositoryDefinition)) { return source(parser.mapOrdered()); } catch (IOException e) { throw new IllegalArgumentException("failed to parse repository source [" + repositoryDefinition + "]", e); } } /** * Parses repository definition. * JSON, Smile and YAML formats are supported * * @param repositoryDefinition repository definition */ public PutRepositoryRequest source(byte[] repositoryDefinition) { return source(repositoryDefinition, 0, repositoryDefinition.length); } /** * Parses repository definition. * JSON, Smile and YAML formats are supported * * @param repositoryDefinition repository definition */ public PutRepositoryRequest source(byte[] repositoryDefinition, int offset, int length) { try (XContentParser parser = XContentFactory.xContent(repositoryDefinition, offset, length).createParser(repositoryDefinition, offset, length)) { return source(parser.mapOrdered()); } catch (IOException e) { throw new IllegalArgumentException("failed to parse repository source", e); } } /** * Parses repository definition. * JSON, Smile and YAML formats are supported * * @param repositoryDefinition repository definition */ public PutRepositoryRequest source(BytesReference repositoryDefinition) { try (XContentParser parser = XContentFactory.xContent(repositoryDefinition).createParser(repositoryDefinition)) { return source(parser.mapOrdered()); } catch (IOException e) { throw new IllegalArgumentException("failed to parse template source", e); } } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); name = in.readString(); type = in.readString(); settings = readSettingsFromStream(in); readTimeout(in); verify = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(name); out.writeString(type); writeSettingsToStream(settings, out); writeTimeout(out); out.writeBoolean(verify); } }
rento19962/elasticsearch
core/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java
Java
apache-2.0
9,394
require 'singleton' # The Inflector transforms words from singular to plural, class names to table names, modularized class names to ones without, # and class names to foreign keys. The default inflections for pluralization, singularization, and uncountable words are kept # in inflections.rb. module Inflector # A singleton instance of this class is yielded by Inflector.inflections, which can then be used to specify additional # inflection rules. Examples: # # Inflector.inflections do |inflect| # inflect.plural /^(ox)$/i, '\1\2en' # inflect.singular /^(ox)en/i, '\1' # # inflect.irregular 'octopus', 'octopi' # # inflect.uncountable "equipment" # end # # New rules are added at the top. So in the example above, the irregular rule for octopus will now be the first of the # pluralization and singularization rules that is runs. This guarantees that your rules run before any of the rules that may # already have been loaded. class Inflections include Singleton attr_reader :plurals, :singulars, :uncountables def initialize @plurals, @singulars, @uncountables = [], [], [] end # Specifies a new pluralization rule and its replacement. The rule can either be a string or a regular expression. # The replacement should always be a string that may include references to the matched data from the rule. def plural(rule, replacement) @plurals.insert(0, [rule, replacement]) end # Specifies a new singularization rule and its replacement. The rule can either be a string or a regular expression. # The replacement should always be a string that may include references to the matched data from the rule. def singular(rule, replacement) @singulars.insert(0, [rule, replacement]) end # Specifies a new irregular that applies to both pluralization and singularization at the same time. This can only be used # for strings, not regular expressions. You simply pass the irregular in singular and plural form. # # Examples: # irregular 'octopus', 'octopi' # irregular 'person', 'people' def irregular(singular, plural) if singular[0,1].upcase == plural[0,1].upcase plural(Regexp.new("(#{singular[0,1]})#{singular[1..-1]}$", "i"), '\1' + plural[1..-1]) singular(Regexp.new("(#{plural[0,1]})#{plural[1..-1]}$", "i"), '\1' + singular[1..-1]) else plural(Regexp.new("#{singular[0,1].upcase}(?i)#{singular[1..-1]}$"), plural[0,1].upcase + plural[1..-1]) plural(Regexp.new("#{singular[0,1].downcase}(?i)#{singular[1..-1]}$"), plural[0,1].downcase + plural[1..-1]) singular(Regexp.new("#{plural[0,1].upcase}(?i)#{plural[1..-1]}$"), singular[0,1].upcase + singular[1..-1]) singular(Regexp.new("#{plural[0,1].downcase}(?i)#{plural[1..-1]}$"), singular[0,1].downcase + singular[1..-1]) end end # Add uncountable words that shouldn't be attempted inflected. # # Examples: # uncountable "money" # uncountable "money", "information" # uncountable %w( money information rice ) def uncountable(*words) (@uncountables << words).flatten! end # Clears the loaded inflections within a given scope (default is :all). Give the scope as a symbol of the inflection type, # the options are: :plurals, :singulars, :uncountables # # Examples: # clear :all # clear :plurals def clear(scope = :all) case scope when :all @plurals, @singulars, @uncountables = [], [], [] else instance_variable_set "@#{scope}", [] end end end extend self def inflections if block_given? yield Inflections.instance else Inflections.instance end end # Returns the plural form of the word in the string. # # Examples # "post".pluralize #=> "posts" # "octopus".pluralize #=> "octopi" # "sheep".pluralize #=> "sheep" # "words".pluralize #=> "words" # "the blue mailman".pluralize #=> "the blue mailmen" # "CamelOctopus".pluralize #=> "CamelOctopi" def pluralize(word) result = word.to_s.dup if word.empty? || inflections.uncountables.include?(result.downcase) result else inflections.plurals.each { |(rule, replacement)| break if result.gsub!(rule, replacement) } result end end # The reverse of pluralize, returns the singular form of a word in a string. # # Examples # "posts".singularize #=> "post" # "octopi".singularize #=> "octopus" # "sheep".singluarize #=> "sheep" # "word".singluarize #=> "word" # "the blue mailmen".singularize #=> "the blue mailman" # "CamelOctopi".singularize #=> "CamelOctopus" def singularize(word) result = word.to_s.dup if inflections.uncountables.include?(result.downcase) result else inflections.singulars.each { |(rule, replacement)| break if result.gsub!(rule, replacement) } result end end # By default, camelize converts strings to UpperCamelCase. If the argument to camelize # is set to ":lower" then camelize produces lowerCamelCase. # # camelize will also convert '/' to '::' which is useful for converting paths to namespaces # # Examples # "active_record".camelize #=> "ActiveRecord" # "active_record".camelize(:lower) #=> "activeRecord" # "active_record/errors".camelize #=> "ActiveRecord::Errors" # "active_record/errors".camelize(:lower) #=> "activeRecord::Errors" def camelize(lower_case_and_underscored_word, first_letter_in_uppercase = true) if first_letter_in_uppercase lower_case_and_underscored_word.to_s.gsub(/\/(.?)/) { "::" + $1.upcase }.gsub(/(^|_)(.)/) { $2.upcase } else lower_case_and_underscored_word.first + camelize(lower_case_and_underscored_word)[1..-1] end end # Capitalizes all the words and replaces some characters in the string to create # a nicer looking title. Titleize is meant for creating pretty output. It is not # used in the Rails internals. # # titleize is also aliased as as titlecase # # Examples # "man from the boondocks".titleize #=> "Man From The Boondocks" # "x-men: the last stand".titleize #=> "X Men: The Last Stand" def titleize(word) humanize(underscore(word)).gsub(/\b([a-z])/) { $1.capitalize } end # The reverse of +camelize+. Makes an underscored form from the expression in the string. # # Changes '::' to '/' to convert namespaces to paths. # # Examples # "ActiveRecord".underscore #=> "active_record" # "ActiveRecord::Errors".underscore #=> active_record/errors def underscore(camel_cased_word) camel_cased_word.to_s.gsub(/::/, '/'). gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2'). gsub(/([a-z\d])([A-Z])/,'\1_\2'). tr("-", "_"). downcase end # Replaces underscores with dashes in the string. # # Example # "puni_puni" #=> "puni-puni" def dasherize(underscored_word) underscored_word.gsub(/_/, '-') end # Capitalizes the first word and turns underscores into spaces and strips _id. # Like titleize, this is meant for creating pretty output. # # Examples # "employee_salary" #=> "Employee salary" # "author_id" #=> "Author" def humanize(lower_case_and_underscored_word) lower_case_and_underscored_word.to_s.gsub(/_id$/, "").gsub(/_/, " ").capitalize end # Removes the module part from the expression in the string # # Examples # "ActiveRecord::CoreExtensions::String::Inflections".demodulize #=> "Inflections" # "Inflections".demodulize #=> "Inflections" def demodulize(class_name_in_module) class_name_in_module.to_s.gsub(/^.*::/, '') end # Create the name of a table like Rails does for models to table names. This method # uses the pluralize method on the last word in the string. # # Examples # "RawScaledScorer".tableize #=> "raw_scaled_scorers" # "egg_and_ham".tableize #=> "egg_and_hams" # "fancyCategory".tableize #=> "fancy_categories" def tableize(class_name) pluralize(underscore(class_name)) end # Create a class name from a table name like Rails does for table names to models. # Note that this returns a string and not a Class. (To convert to an actual class # follow classify with constantize.) # # Examples # "egg_and_hams".classify #=> "EggAndHam" # "post".classify #=> "Post" def classify(table_name) # strip out any leading schema name camelize(singularize(table_name.to_s.sub(/.*\./, ''))) end # Creates a foreign key name from a class name. # +separate_class_name_and_id_with_underscore+ sets whether # the method should put '_' between the name and 'id'. # # Examples # "Message".foreign_key #=> "message_id" # "Message".foreign_key(false) #=> "messageid" # "Admin::Post".foreign_key #=> "post_id" def foreign_key(class_name, separate_class_name_and_id_with_underscore = true) underscore(demodulize(class_name)) + (separate_class_name_and_id_with_underscore ? "_id" : "id") end # Constantize tries to find a declared constant with the name specified # in the string. It raises a NameError when the name is not in CamelCase # or is not initialized. # # Examples # "Module".constantize #=> Module # "Class".constantize #=> Class def constantize(camel_cased_word) unless /\A(?:::)?([A-Z]\w*(?:::[A-Z]\w*)*)\z/ =~ camel_cased_word raise NameError, "#{camel_cased_word.inspect} is not a valid constant name!" end Object.module_eval("::#{$1}", __FILE__, __LINE__) end # Ordinalize turns a number into an ordinal string used to denote the # position in an ordered sequence such as 1st, 2nd, 3rd, 4th. # # Examples # ordinalize(1) # => "1st" # ordinalize(2) # => "2nd" # ordinalize(1002) # => "1002nd" # ordinalize(1003) # => "1003rd" def ordinalize(number) if (11..13).include?(number.to_i % 100) "#{number}th" else case number.to_i % 10 when 1; "#{number}st" when 2; "#{number}nd" when 3; "#{number}rd" else "#{number}th" end end end end require File.dirname(__FILE__) + '/inflections'
weimingtom/xruby
lib/ruby/gems/1.8/gems/activesupport-2.0.1/lib/active_support/inflector.rb
Ruby
apache-2.0
10,210
# Copyright 2010 The Closure Library Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utility to use the Closure Compiler CLI from Python.""" import logging import os import re import subprocess import tempfile # Pulls just the major and minor version numbers from the first line of # 'java -version'. Versions are in the format of [0-9]+\.[0-9]+\..* See: # http://www.oracle.com/technetwork/java/javase/versioning-naming-139433.html _VERSION_REGEX = re.compile(r'"([0-9]+)\.([0-9]+)') class JsCompilerError(Exception): """Raised if there's an error in calling the compiler.""" pass def _GetJavaVersionString(): """Get the version string from the Java VM.""" return subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT) def _ParseJavaVersion(version_string): """Returns a 2-tuple for the current version of Java installed. Args: version_string: String of the Java version (e.g. '1.7.2-ea'). Returns: The major and minor versions, as a 2-tuple (e.g. (1, 7)). """ match = _VERSION_REGEX.search(version_string) if match: version = tuple(int(x, 10) for x in match.groups()) assert len(version) == 2 return version def _JavaSupports32BitMode(): """Determines whether the JVM supports 32-bit mode on the platform.""" # Suppresses process output to stderr and stdout from showing up in the # console as we're only trying to determine 32-bit JVM support. supported = False try: devnull = open(os.devnull, 'wb') return subprocess.call(['java', '-d32', '-version'], stdout=devnull, stderr=devnull) == 0 except IOError: pass else: devnull.close() return supported def _GetJsCompilerArgs(compiler_jar_path, java_version, jvm_flags): """Assembles arguments for call to JsCompiler.""" if java_version < (1, 7): raise JsCompilerError('Closure Compiler requires Java 1.7 or higher. ' 'Please visit http://www.java.com/getjava') args = ['java'] # Add JVM flags we believe will produce the best performance. See # https://groups.google.com/forum/#!topic/closure-library-discuss/7w_O9-vzlj4 # Attempt 32-bit mode if available (Java 7 on Mac OS X does not support 32-bit # mode, for example). if _JavaSupports32BitMode(): args += ['-d32'] # Prefer the "client" VM. args += ['-client'] # Add JVM flags, if any if jvm_flags: args += jvm_flags # Add the application JAR. args += ['-jar', compiler_jar_path] return args def _GetFlagFile(source_paths, compiler_flags): """Writes given source paths and compiler flags to a --flagfile. The given source_paths will be written as '--js' flags and the compiler_flags are written as-is. Args: source_paths: List of string js source paths. compiler_flags: List of string compiler flags. Returns: The file to which the flags were written. """ args = [] for path in source_paths: args += ['--js', path] # Add compiler flags, if any. if compiler_flags: args += compiler_flags flags_file = tempfile.NamedTemporaryFile(delete=False) flags_file.write(' '.join(args)) flags_file.close() return flags_file def Compile(compiler_jar_path, source_paths, jvm_flags=None, compiler_flags=None): """Prepares command-line call to Closure Compiler. Args: compiler_jar_path: Path to the Closure compiler .jar file. source_paths: Source paths to build, in order. jvm_flags: A list of additional flags to pass on to JVM. compiler_flags: A list of additional flags to pass on to Closure Compiler. Returns: The compiled source, as a string, or None if compilation failed. """ java_version = _ParseJavaVersion(str(_GetJavaVersionString())) args = _GetJsCompilerArgs(compiler_jar_path, java_version, jvm_flags) # Write source path arguments to flag file for avoiding "The filename or # extension is too long" error in big projects. See # https://github.com/google/closure-library/pull/678 flags_file = _GetFlagFile(source_paths, compiler_flags) args += ['--flagfile', flags_file.name] logging.info('Compiling with the following command: %s', ' '.join(args)) try: return subprocess.check_output(args) except subprocess.CalledProcessError: raise JsCompilerError('JavaScript compilation failed.') finally: os.remove(flags_file.name)
Dominator008/closure-library
closure/bin/build/jscompiler.py
Python
apache-2.0
4,953
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Generic; using System.Composition; using Microsoft.CodeAnalysis.ExtractInterface; using Microsoft.CodeAnalysis.Host.Mef; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Notification; namespace Microsoft.CodeAnalysis.Editor.UnitTests.ExtractInterface { [ExportWorkspaceService(typeof(IExtractInterfaceOptionsService), ServiceLayer.Default), Shared] internal class TestExtractInterfaceOptionsService : IExtractInterfaceOptionsService { public IEnumerable<ISymbol> AllExtractableMembers { get; private set; } public string DefaultInterfaceName { get; private set; } public List<string> ConflictingTypeNames { get; private set; } public string DefaultNamespace { get; private set; } public string GeneratedNameTypeParameterSuffix { get; set; } public bool IsCancelled { get; set; } public string ChosenInterfaceName { get; set; } public string ChosenFileName { get; set; } public IEnumerable<ISymbol> ChosenMembers { get; set; } public ExtractInterfaceOptionsResult GetExtractInterfaceOptions( ISyntaxFactsService syntaxFactsService, INotificationService notificationService, List<ISymbol> extractableMembers, string defaultInterfaceName, List<string> conflictingTypeNames, string defaultNamespace, string generatedNameTypeParameterSuffix, string languageName) { this.AllExtractableMembers = extractableMembers; this.DefaultInterfaceName = defaultInterfaceName; this.ConflictingTypeNames = conflictingTypeNames; this.DefaultNamespace = defaultNamespace; this.GeneratedNameTypeParameterSuffix = generatedNameTypeParameterSuffix; return IsCancelled ? ExtractInterfaceOptionsResult.Cancelled : new ExtractInterfaceOptionsResult( isCancelled: false, includedMembers: ChosenMembers ?? AllExtractableMembers, interfaceName: ChosenInterfaceName ?? defaultInterfaceName, fileName: ChosenFileName ?? defaultInterfaceName); } } }
paulvanbrenk/roslyn
src/EditorFeatures/TestUtilities/ExtractInterface/TestExtractInterfaceOptions.cs
C#
apache-2.0
2,430
/* * Copyright 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sample.webservices.service; import java.util.Date; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; @Service public class StubHumanResourceService implements HumanResourceService { private final Logger logger = LoggerFactory.getLogger(StubHumanResourceService.class); @Override public void bookHoliday(Date startDate, Date endDate, String name) { this.logger.info("Booking holiday for [{} - {}] for [{}] ", startDate, endDate, name); } }
qerub/spring-boot
spring-boot-samples/spring-boot-sample-webservices/src/main/java/sample/webservices/service/StubHumanResourceService.java
Java
apache-2.0
1,140
{% extends "horizon/common/_modal_form.html" %} {% load i18n %} {% block ng_controller %}hzNamespaceResourceTypeFormController as ctrl{% endblock %} {% block form_name %}manageResourceTypesForm{% endblock %} {% block form_validation %}novalidate{% endblock %} {% block modal-body %} <div class="resource-types membership"> <div class="left"> <div> <div class="header"> <div class="fake_table fake_table_header"> <span class="members_title">{% trans "Available Types" %}</span> <div class="form-group has-feedback"> <input id="resource_type_filter" class="filter form-control input-sm" type="text" placeholder="{% trans "Filter" %}" name="resource_type_filter" ng-model="searchResource" ng-change="filter_changed()"/> <span class="fa fa-search form-control-feedback"></span> </div> </div> </div> <div class="fake_table fake_update_members_table"> <div class="available_members available_update_members"> <ul ng-repeat="resource_type in ctrl.resource_types | filter:searchResource" class="nav nav-pills btn-group {$$last ? ' last_stripe': ''$}" ng-class-odd="'dark_stripe'" ng-class-even="'light_stripe'" style="margin-left: 0px;"> <li class="select_resource"> <input type="checkbox" ng-model="resource_type.selected"/> </li> <li class="display_name"> <span> {$ resource_type.name $} </span> </li> <li class="scope"> <input type="text" class="form-control input-sm" placeholder="Prefix" ng-show="resource_type.selected" ng-model="resource_type.prefix"/> </li> </ul> </div> </div> </div> </div> <div class="right"> <h3>{% trans "Description" %}:</h3> <p>{% trans "Namespaces can be associated to different resource types. This makes the properties in the namespace visible in the 'Update Metadata' action for that type of resource." %}</p> <p>{% trans "Additionally, some resource types may require a prefix to be used when applying the metadata. In certain cases, the prefix may differ between the resource type (for example, flavor vs image)." %}</p> <p>{% trans "Example: The prefix 'hw:' is added to OS::Nova::Flavor for the Virtual CPU Topology namespace so that the properties will be prefixed with 'hw:' when applied to flavors." %}</p> <p>{% trans "Do not use a colon ':' with OS::Glance::Images. This resource type does not support the use of colons." %}</p> </div> </div> <script type="text/javascript"> var resource_types = {{ resource_types|safe }}; </script> {% endblock %} {% block modal-footer %} <div> <input class="btn btn-primary pull-right" type="submit" ng:click="ctrl.saveResourceTypes()" value="{% trans "Save" %}"/> <a class="btn btn-default secondary cancel close">{% trans "Cancel" %}</a> <input type="hidden" name="resource_types" ng-value="ctrl.resource_types" ng-model="ctrl.resource_types"> </div> {% endblock %}
FNST-OpenStack/horizon
openstack_dashboard/dashboards/admin/metadata_defs/templates/metadata_defs/resource_types.html
HTML
apache-2.0
3,423
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // This file was automatically generated by informer-gen package internalversion import ( v1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" schema "k8s.io/apimachinery/pkg/runtime/schema" cache "k8s.io/client-go/tools/cache" internalversion "k8s.io/code-generator/_examples/apiserver/clientset/internalversion" example "k8s.io/code-generator/_examples/apiserver/informers/internalversion/example" example2 "k8s.io/code-generator/_examples/apiserver/informers/internalversion/example2" internalinterfaces "k8s.io/code-generator/_examples/apiserver/informers/internalversion/internalinterfaces" reflect "reflect" sync "sync" time "time" ) type sharedInformerFactory struct { client internalversion.Interface namespace string tweakListOptions internalinterfaces.TweakListOptionsFunc lock sync.Mutex defaultResync time.Duration informers map[reflect.Type]cache.SharedIndexInformer // startedInformers is used for tracking which informers have been started. // This allows Start() to be called multiple times safely. startedInformers map[reflect.Type]bool } // NewSharedInformerFactory constructs a new instance of sharedInformerFactory func NewSharedInformerFactory(client internalversion.Interface, defaultResync time.Duration) SharedInformerFactory { return NewFilteredSharedInformerFactory(client, defaultResync, v1.NamespaceAll, nil) } // NewFilteredSharedInformerFactory constructs a new instance of sharedInformerFactory. // Listers obtained via this SharedInformerFactory will be subject to the same filters // as specified here. func NewFilteredSharedInformerFactory(client internalversion.Interface, defaultResync time.Duration, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) SharedInformerFactory { return &sharedInformerFactory{ client: client, namespace: namespace, tweakListOptions: tweakListOptions, defaultResync: defaultResync, informers: make(map[reflect.Type]cache.SharedIndexInformer), startedInformers: make(map[reflect.Type]bool), } } // Start initializes all requested informers. func (f *sharedInformerFactory) Start(stopCh <-chan struct{}) { f.lock.Lock() defer f.lock.Unlock() for informerType, informer := range f.informers { if !f.startedInformers[informerType] { go informer.Run(stopCh) f.startedInformers[informerType] = true } } } // WaitForCacheSync waits for all started informers' cache were synced. func (f *sharedInformerFactory) WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool { informers := func() map[reflect.Type]cache.SharedIndexInformer { f.lock.Lock() defer f.lock.Unlock() informers := map[reflect.Type]cache.SharedIndexInformer{} for informerType, informer := range f.informers { if f.startedInformers[informerType] { informers[informerType] = informer } } return informers }() res := map[reflect.Type]bool{} for informType, informer := range informers { res[informType] = cache.WaitForCacheSync(stopCh, informer.HasSynced) } return res } // InternalInformerFor returns the SharedIndexInformer for obj using an internal // client. func (f *sharedInformerFactory) InformerFor(obj runtime.Object, newFunc internalinterfaces.NewInformerFunc) cache.SharedIndexInformer { f.lock.Lock() defer f.lock.Unlock() informerType := reflect.TypeOf(obj) informer, exists := f.informers[informerType] if exists { return informer } informer = newFunc(f.client, f.defaultResync) f.informers[informerType] = informer return informer } // SharedInformerFactory provides shared informers for resources in all known // API group versions. type SharedInformerFactory interface { internalinterfaces.SharedInformerFactory ForResource(resource schema.GroupVersionResource) (GenericInformer, error) WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool Example() example.Interface SecondExample() example2.Interface } func (f *sharedInformerFactory) Example() example.Interface { return example.New(f, f.namespace, f.tweakListOptions) } func (f *sharedInformerFactory) SecondExample() example2.Interface { return example2.New(f, f.namespace, f.tweakListOptions) }
yiqinguo/kubernetes
staging/src/k8s.io/code-generator/_examples/apiserver/informers/internalversion/factory.go
GO
apache-2.0
4,809
<?php //============================================================+ // File name : swe.php // Begin : 2004-03-03 // Last Update : 2010-10-26 // // Description : Language module for TCPDF // (contains translated texts) // Swedish // // Author: Nicola Asuni // // (c) Copyright: // Nicola Asuni // Tecnick.com LTD // Manor Coach House, Church Hill // Aldershot, Hants, GU12 4RQ // UK // www.tecnick.com // info@tecnick.com //============================================================+ /** * TCPDF language file (contains translated texts). * @package com.tecnick.tcpdf * @brief TCPDF language file: Swedish * @author Nicola Asuni * @since 2004-03-03 */ // Swedish global $l; $l = Array(); // PAGE META DESCRIPTORS -------------------------------------- $l['a_meta_charset'] = 'UTF-8'; $l['a_meta_dir'] = 'ltr'; $l['a_meta_language'] = 'sv'; // TRANSLATIONS -------------------------------------- $l['w_page'] = 'sida'; //============================================================+ // END OF FILE //============================================================+
rjw57/findsorguk
library/tcpdf/config/lang/swe.php
PHP
apache-2.0
1,211
// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package fileutil import ( "os" "path" "sort" "strings" "time" ) func PurgeFile(dirname string, suffix string, max uint, interval time.Duration, stop <-chan struct{}) <-chan error { errC := make(chan error, 1) go func() { for { fnames, err := ReadDir(dirname) if err != nil { errC <- err return } newfnames := make([]string, 0) for _, fname := range fnames { if strings.HasSuffix(fname, suffix) { newfnames = append(newfnames, fname) } } sort.Strings(newfnames) for len(newfnames) > int(max) { f := path.Join(dirname, newfnames[0]) l, err := NewLock(f) if err != nil { errC <- err return } err = l.TryLock() if err != nil { break } err = os.Remove(f) if err != nil { errC <- err return } err = l.Unlock() if err != nil { plog.Errorf("error unlocking %s when purging file (%v)", l.Name(), err) errC <- err return } err = l.Destroy() if err != nil { plog.Errorf("error destroying lock %s when purging file (%v)", l.Name(), err) errC <- err return } plog.Infof("purged file %s successfully", f) newfnames = newfnames[1:] } select { case <-time.After(interval): case <-stop: return } } }() return errC }
bsalamat/kubernetes
third_party/forked/etcd221/pkg/fileutil/purge.go
GO
apache-2.0
1,895
// +build amd64,darwin // Created by cgo -godefs - DO NOT EDIT // cgo -godefs types_darwin.go package unix const ( sizeofPtr = 0x8 sizeofShort = 0x2 sizeofInt = 0x4 sizeofLong = 0x8 sizeofLongLong = 0x8 ) type ( _C_short int16 _C_int int32 _C_long int64 _C_long_long int64 ) type Timespec struct { Sec int64 Nsec int64 } type Timeval struct { Sec int64 Usec int32 Pad_cgo_0 [4]byte } type Timeval32 struct { Sec int32 Usec int32 } type Rusage struct { Utime Timeval Stime Timeval Maxrss int64 Ixrss int64 Idrss int64 Isrss int64 Minflt int64 Majflt int64 Nswap int64 Inblock int64 Oublock int64 Msgsnd int64 Msgrcv int64 Nsignals int64 Nvcsw int64 Nivcsw int64 } type Rlimit struct { Cur uint64 Max uint64 } type _Gid_t uint32 type Stat_t struct { Dev int32 Mode uint16 Nlink uint16 Ino uint64 Uid uint32 Gid uint32 Rdev int32 Pad_cgo_0 [4]byte Atimespec Timespec Mtimespec Timespec Ctimespec Timespec Birthtimespec Timespec Size int64 Blocks int64 Blksize int32 Flags uint32 Gen uint32 Lspare int32 Qspare [2]int64 } type Statfs_t struct { Bsize uint32 Iosize int32 Blocks uint64 Bfree uint64 Bavail uint64 Files uint64 Ffree uint64 Fsid Fsid Owner uint32 Type uint32 Flags uint32 Fssubtype uint32 Fstypename [16]int8 Mntonname [1024]int8 Mntfromname [1024]int8 Reserved [8]uint32 } type Flock_t struct { Start int64 Len int64 Pid int32 Type int16 Whence int16 } type Fstore_t struct { Flags uint32 Posmode int32 Offset int64 Length int64 Bytesalloc int64 } type Radvisory_t struct { Offset int64 Count int32 Pad_cgo_0 [4]byte } type Fbootstraptransfer_t struct { Offset int64 Length uint64 Buffer *byte } type Log2phys_t struct { Flags uint32 Pad_cgo_0 [8]byte Pad_cgo_1 [8]byte } type Fsid struct { Val [2]int32 } type Dirent struct { Ino uint64 Seekoff uint64 Reclen uint16 Namlen uint16 Type uint8 Name [1024]int8 Pad_cgo_0 [3]byte } type RawSockaddrInet4 struct { Len uint8 Family uint8 Port uint16 Addr [4]byte /* in_addr */ Zero [8]int8 } type RawSockaddrInet6 struct { Len uint8 Family uint8 Port uint16 Flowinfo uint32 Addr [16]byte /* in6_addr */ Scope_id uint32 } type RawSockaddrUnix struct { Len uint8 Family uint8 Path [104]int8 } type RawSockaddrDatalink struct { Len uint8 Family uint8 Index uint16 Type uint8 Nlen uint8 Alen uint8 Slen uint8 Data [12]int8 } type RawSockaddr struct { Len uint8 Family uint8 Data [14]int8 } type RawSockaddrAny struct { Addr RawSockaddr Pad [92]int8 } type _Socklen uint32 type Linger struct { Onoff int32 Linger int32 } type Iovec struct { Base *byte Len uint64 } type IPMreq struct { Multiaddr [4]byte /* in_addr */ Interface [4]byte /* in_addr */ } type IPv6Mreq struct { Multiaddr [16]byte /* in6_addr */ Interface uint32 } type Msghdr struct { Name *byte Namelen uint32 Pad_cgo_0 [4]byte Iov *Iovec Iovlen int32 Pad_cgo_1 [4]byte Control *byte Controllen uint32 Flags int32 } type Cmsghdr struct { Len uint32 Level int32 Type int32 } type Inet4Pktinfo struct { Ifindex uint32 Spec_dst [4]byte /* in_addr */ Addr [4]byte /* in_addr */ } type Inet6Pktinfo struct { Addr [16]byte /* in6_addr */ Ifindex uint32 } type IPv6MTUInfo struct { Addr RawSockaddrInet6 Mtu uint32 } type ICMPv6Filter struct { Filt [8]uint32 } const ( SizeofSockaddrInet4 = 0x10 SizeofSockaddrInet6 = 0x1c SizeofSockaddrAny = 0x6c SizeofSockaddrUnix = 0x6a SizeofSockaddrDatalink = 0x14 SizeofLinger = 0x8 SizeofIPMreq = 0x8 SizeofIPv6Mreq = 0x14 SizeofMsghdr = 0x30 SizeofCmsghdr = 0xc SizeofInet4Pktinfo = 0xc SizeofInet6Pktinfo = 0x14 SizeofIPv6MTUInfo = 0x20 SizeofICMPv6Filter = 0x20 ) const ( PTRACE_TRACEME = 0x0 PTRACE_CONT = 0x7 PTRACE_KILL = 0x8 ) type Kevent_t struct { Ident uint64 Filter int16 Flags uint16 Fflags uint32 Data int64 Udata *byte } type FdSet struct { Bits [32]int32 } const ( SizeofIfMsghdr = 0x70 SizeofIfData = 0x60 SizeofIfaMsghdr = 0x14 SizeofIfmaMsghdr = 0x10 SizeofIfmaMsghdr2 = 0x14 SizeofRtMsghdr = 0x5c SizeofRtMetrics = 0x38 ) type IfMsghdr struct { Msglen uint16 Version uint8 Type uint8 Addrs int32 Flags int32 Index uint16 Pad_cgo_0 [2]byte Data IfData } type IfData struct { Type uint8 Typelen uint8 Physical uint8 Addrlen uint8 Hdrlen uint8 Recvquota uint8 Xmitquota uint8 Unused1 uint8 Mtu uint32 Metric uint32 Baudrate uint32 Ipackets uint32 Ierrors uint32 Opackets uint32 Oerrors uint32 Collisions uint32 Ibytes uint32 Obytes uint32 Imcasts uint32 Omcasts uint32 Iqdrops uint32 Noproto uint32 Recvtiming uint32 Xmittiming uint32 Lastchange Timeval32 Unused2 uint32 Hwassist uint32 Reserved1 uint32 Reserved2 uint32 } type IfaMsghdr struct { Msglen uint16 Version uint8 Type uint8 Addrs int32 Flags int32 Index uint16 Pad_cgo_0 [2]byte Metric int32 } type IfmaMsghdr struct { Msglen uint16 Version uint8 Type uint8 Addrs int32 Flags int32 Index uint16 Pad_cgo_0 [2]byte } type IfmaMsghdr2 struct { Msglen uint16 Version uint8 Type uint8 Addrs int32 Flags int32 Index uint16 Pad_cgo_0 [2]byte Refcount int32 } type RtMsghdr struct { Msglen uint16 Version uint8 Type uint8 Index uint16 Pad_cgo_0 [2]byte Flags int32 Addrs int32 Pid int32 Seq int32 Errno int32 Use int32 Inits uint32 Rmx RtMetrics } type RtMetrics struct { Locks uint32 Mtu uint32 Hopcount uint32 Expire int32 Recvpipe uint32 Sendpipe uint32 Ssthresh uint32 Rtt uint32 Rttvar uint32 Pksent uint32 Filler [4]uint32 } const ( SizeofBpfVersion = 0x4 SizeofBpfStat = 0x8 SizeofBpfProgram = 0x10 SizeofBpfInsn = 0x8 SizeofBpfHdr = 0x14 ) type BpfVersion struct { Major uint16 Minor uint16 } type BpfStat struct { Recv uint32 Drop uint32 } type BpfProgram struct { Len uint32 Pad_cgo_0 [4]byte Insns *BpfInsn } type BpfInsn struct { Code uint16 Jt uint8 Jf uint8 K uint32 } type BpfHdr struct { Tstamp Timeval32 Caplen uint32 Datalen uint32 Hdrlen uint16 Pad_cgo_0 [2]byte } type Termios struct { Iflag uint64 Oflag uint64 Cflag uint64 Lflag uint64 Cc [20]uint8 Pad_cgo_0 [4]byte Ispeed uint64 Ospeed uint64 } const ( AT_FDCWD = -0x2 AT_SYMLINK_NOFOLLOW = 0x20 )
jasonbishop/contrib
ingress/Godeps/_workspace/src/github.com/fsouza/go-dockerclient/external/golang.org/x/sys/unix/ztypes_darwin_amd64.go
GO
apache-2.0
7,114
/* * Based on JUEL 2.2.1 code, 2006-2009 Odysseus Software GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.juel; import org.camunda.bpm.engine.impl.javax.el.ELContext; public class AstBinary extends AstRightValue { public interface Operator { public Object eval(Bindings bindings, ELContext context, AstNode left, AstNode right); } public static abstract class SimpleOperator implements Operator { public Object eval(Bindings bindings, ELContext context, AstNode left, AstNode right) { return apply(bindings, left.eval(bindings, context), right.eval(bindings, context)); } protected abstract Object apply(TypeConverter converter, Object o1, Object o2); } public static final Operator ADD = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return NumberOperations.add(converter, o1, o2); } @Override public String toString() { return "+"; } }; public static final Operator AND = new Operator() { public Object eval(Bindings bindings, ELContext context, AstNode left, AstNode right) { Boolean l = bindings.convert(left.eval(bindings, context), Boolean.class); return Boolean.TRUE.equals(l) ? bindings.convert(right.eval(bindings, context), Boolean.class) : Boolean.FALSE; } @Override public String toString() { return "&&"; } }; public static final Operator DIV = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return NumberOperations.div(converter, o1, o2); } @Override public String toString() { return "/"; } }; public static final Operator EQ = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return BooleanOperations.eq(converter, o1, o2); } @Override public String toString() { return "=="; } }; public static final Operator GE = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return BooleanOperations.ge(converter, o1, o2); } @Override public String toString() { return ">="; } }; public static final Operator GT = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return BooleanOperations.gt(converter, o1, o2); } @Override public String toString() { return ">"; } }; public static final Operator LE = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return BooleanOperations.le(converter, o1, o2); } @Override public String toString() { return "<="; } }; public static final Operator LT = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return BooleanOperations.lt(converter, o1, o2); } @Override public String toString() { return "<"; } }; public static final Operator MOD = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return NumberOperations.mod(converter, o1, o2); } @Override public String toString() { return "%"; } }; public static final Operator MUL = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return NumberOperations.mul(converter, o1, o2); } @Override public String toString() { return "*"; } }; public static final Operator NE = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return BooleanOperations.ne(converter, o1, o2); } @Override public String toString() { return "!="; } }; public static final Operator OR = new Operator() { public Object eval(Bindings bindings, ELContext context, AstNode left, AstNode right) { Boolean l = bindings.convert(left.eval(bindings, context), Boolean.class); return Boolean.TRUE.equals(l) ? Boolean.TRUE : bindings.convert(right.eval(bindings, context), Boolean.class); } @Override public String toString() { return "||"; } }; public static final Operator SUB = new SimpleOperator() { @Override public Object apply(TypeConverter converter, Object o1, Object o2) { return NumberOperations.sub(converter, o1, o2); } @Override public String toString() { return "-"; } }; private final Operator operator; private final AstNode left, right; public AstBinary(AstNode left, AstNode right, Operator operator) { this.left = left; this.right = right; this.operator = operator; } public Operator getOperator() { return operator; } @Override public Object eval(Bindings bindings, ELContext context) { return operator.eval(bindings, context, left, right); } @Override public String toString() { return "'" + operator.toString() + "'"; } @Override public void appendStructure(StringBuilder b, Bindings bindings) { left.appendStructure(b, bindings); b.append(' '); b.append(operator); b.append(' '); right.appendStructure(b, bindings); } public int getCardinality() { return 2; } public AstNode getChild(int i) { return i == 0 ? left : i == 1 ? right : null; } }
hawky-4s-/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/engine/impl/juel/AstBinary.java
Java
apache-2.0
5,577
/* * Copyright (C) 2015 Willi Ye * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.grarak.kerneladiutor.utils.kernel; import android.content.Context; import com.grarak.kerneladiutor.utils.Constants; import com.grarak.kerneladiutor.utils.Utils; import com.grarak.kerneladiutor.utils.root.Control; /** * Created by willi on 03.01.15. */ public class Battery implements Constants { public static void setChargingRate(int value, Context context) { Control.runCommand(String.valueOf(value), CUSTOM_CHARGING_RATE, Control.CommandType.GENERIC, context); } public static int getChargingRate() { return Utils.stringToInt(Utils.readFile(CUSTOM_CHARGING_RATE)); } public static boolean hasChargingRate() { return Utils.existFile(CUSTOM_CHARGING_RATE); } public static void activateCustomChargeRate(boolean active, Context context) { Control.runCommand(active ? "1" : "0", CHARGE_RATE_ENABLE, Control.CommandType.GENERIC, context); } public static boolean isCustomChargeRateActive() { return Utils.readFile(CHARGE_RATE_ENABLE).equals("1"); } public static boolean hasCustomChargeRateEnable() { return Utils.existFile(CHARGE_RATE_ENABLE); } public static boolean hasChargeRate() { return Utils.existFile(CHARGE_RATE); } public static void setBlx(int value, Context context) { Control.runCommand(String.valueOf(value), BLX, Control.CommandType.GENERIC, context); } public static int getCurBlx() { return Utils.stringToInt(Utils.readFile(BLX)); } public static boolean hasBlx() { return Utils.existFile(BLX); } public static void activateForceFastCharge(boolean active, Context context) { Control.runCommand(active ? "1" : "0", FORCE_FAST_CHARGE, Control.CommandType.GENERIC, context); } public static boolean isForceFastChargeActive() { return Utils.readFile(FORCE_FAST_CHARGE).equals("1"); } public static boolean hasForceFastCharge() { return Utils.existFile(FORCE_FAST_CHARGE); } }
chrisc93/KernelAdiutor
app/src/main/java/com/grarak/kerneladiutor/utils/kernel/Battery.java
Java
apache-2.0
2,625
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudfront.model.transform; import java.util.Map; import java.util.Map.Entry; import javax.xml.stream.events.XMLEvent; import com.amazonaws.services.cloudfront.model.*; import com.amazonaws.transform.Unmarshaller; import com.amazonaws.transform.MapEntry; import com.amazonaws.transform.StaxUnmarshallerContext; import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*; /** * Trusted Signers StAX Unmarshaller */ public class TrustedSignersStaxUnmarshaller implements Unmarshaller<TrustedSigners, StaxUnmarshallerContext> { public TrustedSigners unmarshall(StaxUnmarshallerContext context) throws Exception { TrustedSigners trustedSigners = new TrustedSigners(); int originalDepth = context.getCurrentDepth(); int targetDepth = originalDepth + 1; if (context.isStartOfDocument()) targetDepth += 1; while (true) { XMLEvent xmlEvent = context.nextEvent(); if (xmlEvent.isEndDocument()) return trustedSigners; if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) { if (context.testExpression("Enabled", targetDepth)) { trustedSigners.setEnabled(BooleanStaxUnmarshaller.getInstance().unmarshall(context)); continue; } if (context.testExpression("Quantity", targetDepth)) { trustedSigners.setQuantity(IntegerStaxUnmarshaller.getInstance().unmarshall(context)); continue; } if (context.testExpression("Items/AwsAccountNumber", targetDepth)) { trustedSigners.getItems().add(StringStaxUnmarshaller.getInstance().unmarshall(context)); continue; } } else if (xmlEvent.isEndElement()) { if (context.getCurrentDepth() < originalDepth) { return trustedSigners; } } } } private static TrustedSignersStaxUnmarshaller instance; public static TrustedSignersStaxUnmarshaller getInstance() { if (instance == null) instance = new TrustedSignersStaxUnmarshaller(); return instance; } }
OnePaaS/aws-sdk-java
aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront/model/transform/TrustedSignersStaxUnmarshaller.java
Java
apache-2.0
2,818
import sys import types try: import urlparse except ImportError: # pragma: no cover from urllib import parse as urlparse # True if we are running on Python 3. PY3 = sys.version_info[0] == 3 if PY3: # pragma: no cover string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes long = int else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str long = long if PY3: # pragma: no cover from urllib.parse import unquote_to_bytes def unquote_bytes_to_wsgi(bytestring): return unquote_to_bytes(bytestring).decode('latin-1') else: from urlparse import unquote as unquote_to_bytes def unquote_bytes_to_wsgi(bytestring): return unquote_to_bytes(bytestring) def text_(s, encoding='latin-1', errors='strict'): """ If ``s`` is an instance of ``binary_type``, return ``s.decode(encoding, errors)``, otherwise return ``s``""" if isinstance(s, binary_type): return s.decode(encoding, errors) return s # pragma: no cover if PY3: # pragma: no cover def tostr(s): if isinstance(s, text_type): s = s.encode('latin-1') return str(s, 'latin-1', 'strict') def tobytes(s): return bytes(s, 'latin-1') else: tostr = str def tobytes(s): return s try: from Queue import ( Queue, Empty, ) except ImportError: # pragma: no cover from queue import ( Queue, Empty, ) try: import thread except ImportError: # pragma: no cover import _thread as thread if PY3: # pragma: no cover import builtins exec_ = getattr(builtins, "exec") def reraise(tp, value, tb=None): if value is None: value = tp if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value del builtins else: # pragma: no cover def exec_(code, globs=None, locs=None): """Execute code in a namespace.""" if globs is None: frame = sys._getframe(1) globs = frame.f_globals if locs is None: locs = frame.f_locals del frame elif locs is None: locs = globs exec("""exec code in globs, locs""") exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) try: from StringIO import StringIO as NativeIO except ImportError: # pragma: no cover from io import StringIO as NativeIO try: import httplib except ImportError: # pragma: no cover from http import client as httplib try: MAXINT = sys.maxint except AttributeError: # pragma: no cover MAXINT = sys.maxsize
grepme/CMPUT410Lab01
virt_env/virt1/lib/python2.7/site-packages/waitress-0.8.9-py2.7.egg/waitress/compat.py
Python
apache-2.0
2,776
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #include <cstdlib> #include <sstream> #include <boost/algorithm/string.hpp> #include <thrift/transport/THttpClient.h> #include <thrift/transport/TSocket.h> namespace apache { namespace thrift { namespace transport { using namespace std; THttpClient::THttpClient(boost::shared_ptr<TTransport> transport, std::string host, std::string path) : THttpTransport(transport), host_(host), path_(path) { } THttpClient::THttpClient(string host, int port, string path) : THttpTransport(boost::shared_ptr<TTransport>(new TSocket(host, port))), host_(host), path_(path) { } THttpClient::~THttpClient() {} void THttpClient::parseHeader(char* header) { char* colon = strchr(header, ':'); if (colon == NULL) { return; } char* value = colon+1; if (boost::istarts_with(header, "Transfer-Encoding")) { if (boost::iends_with(value, "chunked")) { chunked_ = true; } } else if (boost::istarts_with(header, "Content-Length")) { chunked_ = false; contentLength_ = atoi(value); } } bool THttpClient::parseStatusLine(char* status) { char* http = status; char* code = strchr(http, ' '); if (code == NULL) { throw TTransportException(string("Bad Status: ") + status); } *code = '\0'; while (*(code++) == ' ') {}; char* msg = strchr(code, ' '); if (msg == NULL) { throw TTransportException(string("Bad Status: ") + status); } *msg = '\0'; if (strcmp(code, "200") == 0) { // HTTP 200 = OK, we got the response return true; } else if (strcmp(code, "100") == 0) { // HTTP 100 = continue, just keep reading return false; } else { throw TTransportException(string("Bad Status: ") + status); } } void THttpClient::flush() { // Fetch the contents of the write buffer uint8_t* buf; uint32_t len; writeBuffer_.getBuffer(&buf, &len); // Construct the HTTP header std::ostringstream h; h << "POST " << path_ << " HTTP/1.1" << CRLF << "Host: " << host_ << CRLF << "Content-Type: application/x-thrift" << CRLF << "Content-Length: " << len << CRLF << "Accept: application/x-thrift" << CRLF << "User-Agent: Thrift/" << VERSION << " (C++/THttpClient)" << CRLF << CRLF; string header = h.str(); if(header.size() > (std::numeric_limits<uint32_t>::max)()) throw TTransportException("Header too big"); // Write the header, then the data, then flush transport_->write((const uint8_t*)header.c_str(), static_cast<uint32_t>(header.size())); transport_->write(buf, len); transport_->flush(); // Reset the buffer and header variables writeBuffer_.resetBuffer(); readHeaders_ = true; } }}} // apache::thrift::transport
henryr/Impala
thirdparty/thrift-0.9.0/lib/cpp/src/thrift/transport/THttpClient.cpp
C++
apache-2.0
3,451
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.web.api.dto; import io.swagger.annotations.ApiModelProperty; import javax.xml.bind.annotation.XmlType; /** * Details of a relationship. */ @XmlType(name = "relationship") public class RelationshipDTO { private String name; private String description; private Boolean autoTerminate; /** * @return the relationship name */ @ApiModelProperty( value = "The relationship name." ) public String getName() { return name; } public void setName(String name) { this.name = name; } /** * @return the relationship description */ @ApiModelProperty( value = "The relationship description." ) public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } /** * @return true if relationship is auto terminated;false otherwise */ @ApiModelProperty( value = "Whether or not flowfiles sent to this relationship should auto terminate." ) public Boolean isAutoTerminate() { return autoTerminate; } public void setAutoTerminate(Boolean autoTerminate) { this.autoTerminate = autoTerminate; } }
pvillard31/nifi
nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-client-dto/src/main/java/org/apache/nifi/web/api/dto/RelationshipDTO.java
Java
apache-2.0
2,095
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticache.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.elasticache.AmazonElastiCache#createCacheSubnetGroup(CreateCacheSubnetGroupRequest) CreateCacheSubnetGroup operation}. * <p> * The <i>CreateCacheSubnetGroup</i> action creates a new cache subnet * group. * </p> * <p> * Use this parameter only when you are creating a cluster in an Amazon * Virtual Private Cloud (VPC). * </p> * * @see com.amazonaws.services.elasticache.AmazonElastiCache#createCacheSubnetGroup(CreateCacheSubnetGroupRequest) */ public class CreateCacheSubnetGroupRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * A name for the cache subnet group. This value is stored as a lowercase * string. <p>Constraints: Must contain no more than 255 alphanumeric * characters or hyphens. <p>Example: <code>mysubnetgroup</code> */ private String cacheSubnetGroupName; /** * A description for the cache subnet group. */ private String cacheSubnetGroupDescription; /** * A list of VPC subnet IDs for the cache subnet group. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIds; /** * A name for the cache subnet group. This value is stored as a lowercase * string. <p>Constraints: Must contain no more than 255 alphanumeric * characters or hyphens. <p>Example: <code>mysubnetgroup</code> * * @return A name for the cache subnet group. This value is stored as a lowercase * string. <p>Constraints: Must contain no more than 255 alphanumeric * characters or hyphens. <p>Example: <code>mysubnetgroup</code> */ public String getCacheSubnetGroupName() { return cacheSubnetGroupName; } /** * A name for the cache subnet group. This value is stored as a lowercase * string. <p>Constraints: Must contain no more than 255 alphanumeric * characters or hyphens. <p>Example: <code>mysubnetgroup</code> * * @param cacheSubnetGroupName A name for the cache subnet group. This value is stored as a lowercase * string. <p>Constraints: Must contain no more than 255 alphanumeric * characters or hyphens. <p>Example: <code>mysubnetgroup</code> */ public void setCacheSubnetGroupName(String cacheSubnetGroupName) { this.cacheSubnetGroupName = cacheSubnetGroupName; } /** * A name for the cache subnet group. This value is stored as a lowercase * string. <p>Constraints: Must contain no more than 255 alphanumeric * characters or hyphens. <p>Example: <code>mysubnetgroup</code> * <p> * Returns a reference to this object so that method calls can be chained together. * * @param cacheSubnetGroupName A name for the cache subnet group. This value is stored as a lowercase * string. <p>Constraints: Must contain no more than 255 alphanumeric * characters or hyphens. <p>Example: <code>mysubnetgroup</code> * * @return A reference to this updated object so that method calls can be chained * together. */ public CreateCacheSubnetGroupRequest withCacheSubnetGroupName(String cacheSubnetGroupName) { this.cacheSubnetGroupName = cacheSubnetGroupName; return this; } /** * A description for the cache subnet group. * * @return A description for the cache subnet group. */ public String getCacheSubnetGroupDescription() { return cacheSubnetGroupDescription; } /** * A description for the cache subnet group. * * @param cacheSubnetGroupDescription A description for the cache subnet group. */ public void setCacheSubnetGroupDescription(String cacheSubnetGroupDescription) { this.cacheSubnetGroupDescription = cacheSubnetGroupDescription; } /** * A description for the cache subnet group. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param cacheSubnetGroupDescription A description for the cache subnet group. * * @return A reference to this updated object so that method calls can be chained * together. */ public CreateCacheSubnetGroupRequest withCacheSubnetGroupDescription(String cacheSubnetGroupDescription) { this.cacheSubnetGroupDescription = cacheSubnetGroupDescription; return this; } /** * A list of VPC subnet IDs for the cache subnet group. * * @return A list of VPC subnet IDs for the cache subnet group. */ public java.util.List<String> getSubnetIds() { if (subnetIds == null) { subnetIds = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); subnetIds.setAutoConstruct(true); } return subnetIds; } /** * A list of VPC subnet IDs for the cache subnet group. * * @param subnetIds A list of VPC subnet IDs for the cache subnet group. */ public void setSubnetIds(java.util.Collection<String> subnetIds) { if (subnetIds == null) { this.subnetIds = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIdsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(subnetIds.size()); subnetIdsCopy.addAll(subnetIds); this.subnetIds = subnetIdsCopy; } /** * A list of VPC subnet IDs for the cache subnet group. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSubnetIds(java.util.Collection)} or {@link * #withSubnetIds(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param subnetIds A list of VPC subnet IDs for the cache subnet group. * * @return A reference to this updated object so that method calls can be chained * together. */ public CreateCacheSubnetGroupRequest withSubnetIds(String... subnetIds) { if (getSubnetIds() == null) setSubnetIds(new java.util.ArrayList<String>(subnetIds.length)); for (String value : subnetIds) { getSubnetIds().add(value); } return this; } /** * A list of VPC subnet IDs for the cache subnet group. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param subnetIds A list of VPC subnet IDs for the cache subnet group. * * @return A reference to this updated object so that method calls can be chained * together. */ public CreateCacheSubnetGroupRequest withSubnetIds(java.util.Collection<String> subnetIds) { if (subnetIds == null) { this.subnetIds = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIdsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(subnetIds.size()); subnetIdsCopy.addAll(subnetIds); this.subnetIds = subnetIdsCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCacheSubnetGroupName() != null) sb.append("CacheSubnetGroupName: " + getCacheSubnetGroupName() + ","); if (getCacheSubnetGroupDescription() != null) sb.append("CacheSubnetGroupDescription: " + getCacheSubnetGroupDescription() + ","); if (getSubnetIds() != null) sb.append("SubnetIds: " + getSubnetIds() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCacheSubnetGroupName() == null) ? 0 : getCacheSubnetGroupName().hashCode()); hashCode = prime * hashCode + ((getCacheSubnetGroupDescription() == null) ? 0 : getCacheSubnetGroupDescription().hashCode()); hashCode = prime * hashCode + ((getSubnetIds() == null) ? 0 : getSubnetIds().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateCacheSubnetGroupRequest == false) return false; CreateCacheSubnetGroupRequest other = (CreateCacheSubnetGroupRequest)obj; if (other.getCacheSubnetGroupName() == null ^ this.getCacheSubnetGroupName() == null) return false; if (other.getCacheSubnetGroupName() != null && other.getCacheSubnetGroupName().equals(this.getCacheSubnetGroupName()) == false) return false; if (other.getCacheSubnetGroupDescription() == null ^ this.getCacheSubnetGroupDescription() == null) return false; if (other.getCacheSubnetGroupDescription() != null && other.getCacheSubnetGroupDescription().equals(this.getCacheSubnetGroupDescription()) == false) return false; if (other.getSubnetIds() == null ^ this.getSubnetIds() == null) return false; if (other.getSubnetIds() != null && other.getSubnetIds().equals(this.getSubnetIds()) == false) return false; return true; } @Override public CreateCacheSubnetGroupRequest clone() { return (CreateCacheSubnetGroupRequest) super.clone(); } }
awebneck/aws-sdk-java
aws-java-sdk-elasticache/src/main/java/com/amazonaws/services/elasticache/model/CreateCacheSubnetGroupRequest.java
Java
apache-2.0
10,465
<?php /** * Copyright 2011 Bas de Nooijer. * Copyright 2011 Gasol Wu. PIXNET Digital Media Corporation. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this listof conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are * those of the authors and should not be interpreted as representing official * policies, either expressed or implied, of the copyright holder. * * @copyright Copyright 2011 Bas de Nooijer <solarium@raspberry.nl> * @copyright Copyright 2011 Gasol Wu <gasol.wu@gmail.com> * @license http://github.com/basdenooijer/solarium/raw/master/COPYING * @link http://www.solarium-project.org/ * * @package Solarium * @subpackage Client */ /** * Build a MoreLikeThis request * * @package Solarium * @subpackage Client */ class Solarium_Client_RequestBuilder_MoreLikeThis extends Solarium_Client_RequestBuilder_Select { /** * Build request for a MoreLikeThis query * * @param Solarium_Query_MoreLikeThis $query * @return Solarium_Client_Request */ public function build($query) { $request = parent::build($query); // add mlt params to request $request->addParam('mlt.interestingTerms', $query->getInterestingTerms()); $request->addParam('mlt.match.include', $query->getMatchInclude()); $request->addParam('mlt.match.offset', $query->getStart()); $request->addParam('mlt.fl', $query->getMltFields()); $request->addParam('mlt.mintf', $query->getMinimumTermFrequency()); $request->addParam('mlt.mindf', $query->getMinimumDocumentFrequency()); $request->addParam('mlt.minwl', $query->getMinimumWordLength()); $request->addParam('mlt.maxwl', $query->getMaximumWordLength()); $request->addParam('mlt.maxqt', $query->getMaximumQueryTerms()); $request->addParam('mlt.maxntp', $query->getMaximumNumberOfTokens()); $request->addParam('mlt.boost', $query->getBoost()); $request->addParam('mlt.qf', $query->getQueryFields()); // convert query to stream if necessary if (true === $query->getQueryStream()) { $request->removeParam('q'); $request->setRawData($query->getQuery()); $request->setMethod(Solarium_Client_Request::METHOD_POST); $request->addHeader('Content-Type: text/plain; charset=utf-8'); } return $request; } }
Asparagirl/LeafSeek
leafseek-frontend/solarium_v2.3.0/Client/RequestBuilder/MoreLikeThis.php
PHP
apache-2.0
3,689
/* Copyright (c) 2004-2010, The Dojo Foundation All Rights Reserved. Available via Academic Free License >= 2.1 OR the modified BSD license. see: http://dojotoolkit.org/license for details */ /* This is an optimized version of Dojo, built for deployment and not for development. To get sources and documentation, please visit: http://dojotoolkit.org */ ;(function(){ /* dojo, dijit, and dojox must always be the first three, and in that order. djConfig.scopeMap = [ ["dojo", "fojo"], ["dijit", "fijit"], ["dojox", "fojox"] ] */ eval("var djConfig = {scopeMap:[[\"dojo\",\"owfdojo\"],[\"dijit\",\"owfdijit\"],[\"dojox\",\"owfdojox\"]]};"); //The null below can be relaced by a build-time value used instead of djConfig.scopeMap. var sMap = null; //See if new scopes need to be defined. if((sMap || (typeof djConfig != "undefined" && djConfig.scopeMap)) && (typeof window != "undefined")){ var scopeDef = "", scopePrefix = "", scopeSuffix = "", scopeMap = {}, scopeMapRev = {}; sMap = sMap || djConfig.scopeMap; for(var i = 0; i < sMap.length; i++){ //Make local variables, then global variables that use the locals. var newScope = sMap[i]; scopeDef += "var " + newScope[0] + " = {}; " + newScope[1] + " = " + newScope[0] + ";" + newScope[1] + "._scopeName = '" + newScope[1] + "';"; scopePrefix += (i == 0 ? "" : ",") + newScope[0]; scopeSuffix += (i == 0 ? "" : ",") + newScope[1]; scopeMap[newScope[0]] = newScope[1]; scopeMapRev[newScope[1]] = newScope[0]; } eval(scopeDef + "dojo._scopeArgs = [" + scopeSuffix + "];"); dojo._scopePrefixArgs = scopePrefix; dojo._scopePrefix = "(function(" + scopePrefix + "){"; dojo._scopeSuffix = "})(" + scopeSuffix + ")"; dojo._scopeMap = scopeMap; dojo._scopeMapRev = scopeMapRev; } /*===== // note: // 'djConfig' does not exist under 'dojo.*' so that it can be set before the // 'dojo' variable exists. // note: // Setting any of these variables *after* the library has loaded does // nothing at all. djConfig = { // summary: // Application code can set the global 'djConfig' prior to loading // the library to override certain global settings for how dojo works. // // isDebug: Boolean // Defaults to `false`. If set to `true`, ensures that Dojo provides // extended debugging feedback via Firebug. If Firebug is not available // on your platform, setting `isDebug` to `true` will force Dojo to // pull in (and display) the version of Firebug Lite which is // integrated into the Dojo distribution, thereby always providing a // debugging/logging console when `isDebug` is enabled. Note that // Firebug's `console.*` methods are ALWAYS defined by Dojo. If // `isDebug` is false and you are on a platform without Firebug, these // methods will be defined as no-ops. isDebug: false, // debugAtAllCosts: Boolean // Defaults to `false`. If set to `true`, this triggers an alternate // mode of the package system in which dependencies are detected and // only then are resources evaluated in dependency order via // `<script>` tag inclusion. This may double-request resources and // cause problems with scripts which expect `dojo.require()` to // preform synchronously. `debugAtAllCosts` can be an invaluable // debugging aid, but when using it, ensure that all code which // depends on Dojo modules is wrapped in `dojo.addOnLoad()` handlers. // Due to the somewhat unpredictable side-effects of using // `debugAtAllCosts`, it is strongly recommended that you enable this // flag as a last resort. `debugAtAllCosts` has no effect when loading // resources across domains. For usage information, see the // [Dojo Book](http://dojotoolkit.org/book/book-dojo/part-4-meta-dojo-making-your-dojo-code-run-faster-and-better/debugging-facilities/deb) debugAtAllCosts: false, // locale: String // The locale to assume for loading localized resources in this page, // specified according to [RFC 3066](http://www.ietf.org/rfc/rfc3066.txt). // Must be specified entirely in lowercase, e.g. `en-us` and `zh-cn`. // See the documentation for `dojo.i18n` and `dojo.requireLocalization` // for details on loading localized resources. If no locale is specified, // Dojo assumes the locale of the user agent, according to `navigator.userLanguage` // or `navigator.language` properties. locale: undefined, // extraLocale: Array // No default value. Specifies additional locales whose // resources should also be loaded alongside the default locale when // calls to `dojo.requireLocalization()` are processed. extraLocale: undefined, // baseUrl: String // The directory in which `dojo.js` is located. Under normal // conditions, Dojo auto-detects the correct location from which it // was loaded. You may need to manually configure `baseUrl` in cases // where you have renamed `dojo.js` or in which `<base>` tags confuse // some browsers (e.g. IE 6). The variable `dojo.baseUrl` is assigned // either the value of `djConfig.baseUrl` if one is provided or the // auto-detected root if not. Other modules are located relative to // this path. The path should end in a slash. baseUrl: undefined, // modulePaths: Object // A map of module names to paths relative to `dojo.baseUrl`. The // key/value pairs correspond directly to the arguments which // `dojo.registerModulePath` accepts. Specifiying // `djConfig.modulePaths = { "foo": "../../bar" }` is the equivalent // of calling `dojo.registerModulePath("foo", "../../bar");`. Multiple // modules may be configured via `djConfig.modulePaths`. modulePaths: {}, // afterOnLoad: Boolean // Indicates Dojo was added to the page after the page load. In this case // Dojo will not wait for the page DOMContentLoad/load events and fire // its dojo.addOnLoad callbacks after making sure all outstanding // dojo.required modules have loaded. Only works with a built dojo.js, // it does not work the dojo.js directly from source control. afterOnLoad: false, // addOnLoad: Function or Array // Adds a callback via dojo.addOnLoad. Useful when Dojo is added after // the page loads and djConfig.afterOnLoad is true. Supports the same // arguments as dojo.addOnLoad. When using a function reference, use // `djConfig.addOnLoad = function(){};`. For object with function name use // `djConfig.addOnLoad = [myObject, "functionName"];` and for object with // function reference use // `djConfig.addOnLoad = [myObject, function(){}];` addOnLoad: null, // require: Array // An array of module names to be loaded immediately after dojo.js has been included // in a page. require: [], // defaultDuration: Array // Default duration, in milliseconds, for wipe and fade animations within dijits. // Assigned to dijit.defaultDuration. defaultDuration: 200, // dojoBlankHtmlUrl: String // Used by some modules to configure an empty iframe. Used by dojo.io.iframe and // dojo.back, and dijit popup support in IE where an iframe is needed to make sure native // controls do not bleed through the popups. Normally this configuration variable // does not need to be set, except when using cross-domain/CDN Dojo builds. // Save dojo/resources/blank.html to your domain and set `djConfig.dojoBlankHtmlUrl` // to the path on your domain your copy of blank.html. dojoBlankHtmlUrl: undefined, // ioPublish: Boolean? // Set this to true to enable publishing of topics for the different phases of // IO operations. Publishing is done via dojo.publish. See dojo.__IoPublish for a list // of topics that are published. ioPublish: false, // useCustomLogger: Anything? // If set to a value that evaluates to true such as a string or array and // isDebug is true and Firebug is not available or running, then it bypasses // the creation of Firebug Lite allowing you to define your own console object. useCustomLogger: undefined, // transparentColor: Array // Array containing the r, g, b components used as transparent color in dojo.Color; // if undefined, [255,255,255] (white) will be used. transparentColor: undefined, // skipIeDomLoaded: Boolean // For IE only, skip the DOMContentLoaded hack used. Sometimes it can cause an Operation // Aborted error if the rest of the page triggers script defers before the DOM is ready. // If this is config value is set to true, then dojo.addOnLoad callbacks will not be // triggered until the page load event, which is after images and iframes load. If you // want to trigger the callbacks sooner, you can put a script block in the bottom of // your HTML that calls dojo._loadInit();. If you are using multiversion support, change // "dojo." to the appropriate scope name for dojo. skipIeDomLoaded: false } =====*/ (function(){ // firebug stubs if(typeof this["loadFirebugConsole"] == "function"){ // for Firebug 1.2 this["loadFirebugConsole"](); }else{ this.console = this.console || {}; // Be careful to leave 'log' always at the end var cn = [ "assert", "count", "debug", "dir", "dirxml", "error", "group", "groupEnd", "info", "profile", "profileEnd", "time", "timeEnd", "trace", "warn", "log" ]; var i=0, tn; while((tn=cn[i++])){ if(!console[tn]){ (function(){ var tcn = tn+""; console[tcn] = ('log' in console) ? function(){ var a = Array.apply({}, arguments); a.unshift(tcn+":"); console["log"](a.join(" ")); } : function(){} console[tcn]._fake = true; })(); } } } //TODOC: HOW TO DOC THIS? // dojo is the root variable of (almost all) our public symbols -- make sure it is defined. if(typeof dojo == "undefined"){ dojo = { _scopeName: "dojo", _scopePrefix: "", _scopePrefixArgs: "", _scopeSuffix: "", _scopeMap: {}, _scopeMapRev: {} }; } var d = dojo; //Need placeholders for dijit and dojox for scoping code. if(typeof dijit == "undefined"){ dijit = {_scopeName: "dijit"}; } if(typeof dojox == "undefined"){ dojox = {_scopeName: "dojox"}; } if(!d._scopeArgs){ d._scopeArgs = [dojo, dijit, dojox]; } /*===== dojo.global = { // summary: // Alias for the global scope // (e.g. the window object in a browser). // description: // Refer to 'dojo.global' rather than referring to window to ensure your // code runs correctly in contexts other than web browsers (e.g. Rhino on a server). } =====*/ d.global = this; d.config =/*===== djConfig = =====*/{ isDebug: false, debugAtAllCosts: false }; if(typeof djConfig != "undefined"){ for(var opt in djConfig){ d.config[opt] = djConfig[opt]; } } /*===== // Override locale setting, if specified dojo.locale = { // summary: the locale as defined by Dojo (read-only) }; =====*/ dojo.locale = d.config.locale; var rev = "$Rev: 22487 $".match(/\d+/); /*===== dojo.version = function(){ // summary: // Version number of the Dojo Toolkit // major: Integer // Major version. If total version is "1.2.0beta1", will be 1 // minor: Integer // Minor version. If total version is "1.2.0beta1", will be 2 // patch: Integer // Patch version. If total version is "1.2.0beta1", will be 0 // flag: String // Descriptor flag. If total version is "1.2.0beta1", will be "beta1" // revision: Number // The SVN rev from which dojo was pulled this.major = 0; this.minor = 0; this.patch = 0; this.flag = ""; this.revision = 0; } =====*/ dojo.version = { major: 1, minor: 0, patch: 0, flag: "", revision: rev ? +rev[0] : NaN, toString: function(){ with(d.version){ return major + "." + minor + "." + patch + flag + " (" + revision + ")"; // String } } } // Register with the OpenAjax hub if(typeof OpenAjax != "undefined"){ OpenAjax.hub.registerLibrary(dojo._scopeName, "http://dojotoolkit.org", d.version.toString()); } var extraNames, extraLen, empty = {}; for(var i in {toString: 1}){ extraNames = []; break; } dojo._extraNames = extraNames = extraNames || ["hasOwnProperty", "valueOf", "isPrototypeOf", "propertyIsEnumerable", "toLocaleString", "toString", "constructor"]; extraLen = extraNames.length; dojo._mixin = function(/*Object*/ target, /*Object*/ source){ // summary: // Adds all properties and methods of source to target. This addition // is "prototype extension safe", so that instances of objects // will not pass along prototype defaults. var name, s, i; for(name in source){ // the "tobj" condition avoid copying properties in "source" // inherited from Object.prototype. For example, if target has a custom // toString() method, don't overwrite it with the toString() method // that source inherited from Object.prototype s = source[name]; if(!(name in target) || (target[name] !== s && (!(name in empty) || empty[name] !== s))){ target[name] = s; } } // IE doesn't recognize some custom functions in for..in if(extraLen && source){ for(i = 0; i < extraLen; ++i){ name = extraNames[i]; s = source[name]; if(!(name in target) || (target[name] !== s && (!(name in empty) || empty[name] !== s))){ target[name] = s; } } } return target; // Object } dojo.mixin = function(/*Object*/obj, /*Object...*/props){ // summary: // Adds all properties and methods of props to obj and returns the // (now modified) obj. // description: // `dojo.mixin` can mix multiple source objects into a // destination object which is then returned. Unlike regular // `for...in` iteration, `dojo.mixin` is also smart about avoiding // extensions which other toolkits may unwisely add to the root // object prototype // obj: // The object to mix properties into. Also the return value. // props: // One or more objects whose values are successively copied into // obj. If more than one of these objects contain the same value, // the one specified last in the function call will "win". // example: // make a shallow copy of an object // | var copy = dojo.mixin({}, source); // example: // many class constructors often take an object which specifies // values to be configured on the object. In this case, it is // often simplest to call `dojo.mixin` on the `this` object: // | dojo.declare("acme.Base", null, { // | constructor: function(properties){ // | // property configuration: // | dojo.mixin(this, properties); // | // | console.log(this.quip); // | // ... // | }, // | quip: "I wasn't born yesterday, you know - I've seen movies.", // | // ... // | }); // | // | // create an instance of the class and configure it // | var b = new acme.Base({quip: "That's what it does!" }); // example: // copy in properties from multiple objects // | var flattened = dojo.mixin( // | { // | name: "Frylock", // | braces: true // | }, // | { // | name: "Carl Brutanananadilewski" // | } // | ); // | // | // will print "Carl Brutanananadilewski" // | console.log(flattened.name); // | // will print "true" // | console.log(flattened.braces); if(!obj){ obj = {}; } for(var i=1, l=arguments.length; i<l; i++){ d._mixin(obj, arguments[i]); } return obj; // Object } dojo._getProp = function(/*Array*/parts, /*Boolean*/create, /*Object*/context){ var obj=context || d.global; for(var i=0, p; obj && (p=parts[i]); i++){ if(i == 0 && d._scopeMap[p]){ p = d._scopeMap[p]; } obj = (p in obj ? obj[p] : (create ? obj[p]={} : undefined)); } return obj; // mixed } dojo.setObject = function(/*String*/name, /*Object*/value, /*Object?*/context){ // summary: // Set a property from a dot-separated string, such as "A.B.C" // description: // Useful for longer api chains where you have to test each object in // the chain, or when you have an object reference in string format. // Objects are created as needed along `path`. Returns the passed // value if setting is successful or `undefined` if not. // name: // Path to a property, in the form "A.B.C". // context: // Optional. Object to use as root of path. Defaults to // `dojo.global`. // example: // set the value of `foo.bar.baz`, regardless of whether // intermediate objects already exist: // | dojo.setObject("foo.bar.baz", value); // example: // without `dojo.setObject`, we often see code like this: // | // ensure that intermediate objects are available // | if(!obj["parent"]){ obj.parent = {}; } // | if(!obj.parent["child"]){ obj.parent.child= {}; } // | // now we can safely set the property // | obj.parent.child.prop = "some value"; // wheras with `dojo.setObject`, we can shorten that to: // | dojo.setObject("parent.child.prop", "some value", obj); var parts=name.split("."), p=parts.pop(), obj=d._getProp(parts, true, context); return obj && p ? (obj[p]=value) : undefined; // Object } dojo.getObject = function(/*String*/name, /*Boolean?*/create, /*Object?*/context){ // summary: // Get a property from a dot-separated string, such as "A.B.C" // description: // Useful for longer api chains where you have to test each object in // the chain, or when you have an object reference in string format. // name: // Path to an property, in the form "A.B.C". // create: // Optional. Defaults to `false`. If `true`, Objects will be // created at any point along the 'path' that is undefined. // context: // Optional. Object to use as root of path. Defaults to // 'dojo.global'. Null may be passed. return d._getProp(name.split("."), create, context); // Object } dojo.exists = function(/*String*/name, /*Object?*/obj){ // summary: // determine if an object supports a given method // description: // useful for longer api chains where you have to test each object in // the chain. Useful only for object and method detection. // Not useful for testing generic properties on an object. // In particular, dojo.exists("foo.bar") when foo.bar = "" // will return false. Use ("bar" in foo) to test for those cases. // name: // Path to an object, in the form "A.B.C". // obj: // Object to use as root of path. Defaults to // 'dojo.global'. Null may be passed. // example: // | // define an object // | var foo = { // | bar: { } // | }; // | // | // search the global scope // | dojo.exists("foo.bar"); // true // | dojo.exists("foo.bar.baz"); // false // | // | // search from a particular scope // | dojo.exists("bar", foo); // true // | dojo.exists("bar.baz", foo); // false return !!d.getObject(name, false, obj); // Boolean } dojo["eval"] = function(/*String*/ scriptFragment){ // summary: // A legacy method created for use exclusively by internal Dojo methods. Do not use // this method directly, the behavior of this eval will differ from the normal // browser eval. // description: // Placed in a separate function to minimize size of trapped // exceptions. Calling eval() directly from some other scope may // complicate tracebacks on some platforms. // returns: // The result of the evaluation. Often `undefined` return d.global.eval ? d.global.eval(scriptFragment) : eval(scriptFragment); // Object } /*===== dojo.deprecated = function(behaviour, extra, removal){ // summary: // Log a debug message to indicate that a behavior has been // deprecated. // behaviour: String // The API or behavior being deprecated. Usually in the form // of "myApp.someFunction()". // extra: String? // Text to append to the message. Often provides advice on a // new function or facility to achieve the same goal during // the deprecation period. // removal: String? // Text to indicate when in the future the behavior will be // removed. Usually a version number. // example: // | dojo.deprecated("myApp.getTemp()", "use myApp.getLocaleTemp() instead", "1.0"); } dojo.experimental = function(moduleName, extra){ // summary: Marks code as experimental. // description: // This can be used to mark a function, file, or module as // experimental. Experimental code is not ready to be used, and the // APIs are subject to change without notice. Experimental code may be // completed deleted without going through the normal deprecation // process. // moduleName: String // The name of a module, or the name of a module file or a specific // function // extra: String? // some additional message for the user // example: // | dojo.experimental("dojo.data.Result"); // example: // | dojo.experimental("dojo.weather.toKelvin()", "PENDING approval from NOAA"); } =====*/ //Real functions declared in dojo._firebug.firebug. d.deprecated = d.experimental = function(){}; })(); // vim:ai:ts=4:noet /* * loader.js - A bootstrap module. Runs before the hostenv_*.js file. Contains * all of the package loading methods. */ (function(){ var d = dojo; d.mixin(d, { _loadedModules: {}, _inFlightCount: 0, _hasResource: {}, _modulePrefixes: { dojo: { name: "dojo", value: "." }, // dojox: { name: "dojox", value: "../dojox" }, // dijit: { name: "dijit", value: "../dijit" }, doh: { name: "doh", value: "../util/doh" }, tests: { name: "tests", value: "tests" } }, _moduleHasPrefix: function(/*String*/module){ // summary: checks to see if module has been established var mp = d._modulePrefixes; return !!(mp[module] && mp[module].value); // Boolean }, _getModulePrefix: function(/*String*/module){ // summary: gets the prefix associated with module var mp = d._modulePrefixes; if(d._moduleHasPrefix(module)){ return mp[module].value; // String } return module; // String }, _loadedUrls: [], //WARNING: // This variable is referenced by packages outside of bootstrap: // FloatingPane.js and undo/browser.js _postLoad: false, //Egad! Lots of test files push on this directly instead of using dojo.addOnLoad. _loaders: [], _unloaders: [], _loadNotifying: false }); dojo._loadPath = function(/*String*/relpath, /*String?*/module, /*Function?*/cb){ // summary: // Load a Javascript module given a relative path // // description: // Loads and interprets the script located at relpath, which is // relative to the script root directory. If the script is found but // its interpretation causes a runtime exception, that exception is // not caught by us, so the caller will see it. We return a true // value if and only if the script is found. // // relpath: // A relative path to a script (no leading '/', and typically ending // in '.js'). // module: // A module whose existance to check for after loading a path. Can be // used to determine success or failure of the load. // cb: // a callback function to pass the result of evaluating the script var uri = ((relpath.charAt(0) == '/' || relpath.match(/^\w+:/)) ? "" : d.baseUrl) + relpath; try{ return !module ? d._loadUri(uri, cb) : d._loadUriAndCheck(uri, module, cb); // Boolean }catch(e){ console.error(e); return false; // Boolean } } dojo._loadUri = function(/*String*/uri, /*Function?*/cb){ // summary: // Loads JavaScript from a URI // description: // Reads the contents of the URI, and evaluates the contents. This is // used to load modules as well as resource bundles. Returns true if // it succeeded. Returns false if the URI reading failed. Throws if // the evaluation throws. // uri: a uri which points at the script to be loaded // cb: // a callback function to process the result of evaluating the script // as an expression, typically used by the resource bundle loader to // load JSON-style resources if(d._loadedUrls[uri]){ return true; // Boolean } d._inFlightCount++; // block addOnLoad calls that arrive while we're busy downloading var contents = d._getText(uri, true); if(contents){ // not 404, et al d._loadedUrls[uri] = true; d._loadedUrls.push(uri); if(cb){ contents = '('+contents+')'; }else{ //Only do the scoping if no callback. If a callback is specified, //it is most likely the i18n bundle stuff. contents = d._scopePrefix + contents + d._scopeSuffix; } if(!d.isIE){ contents += "\r\n//@ sourceURL=" + uri; } // debugging assist for Firebug var value = d["eval"](contents); if(cb){ cb(value); } } // Check to see if we need to call _callLoaded() due to an addOnLoad() that arrived while we were busy downloading if(--d._inFlightCount == 0 && d._postLoad && d._loaders.length){ // We shouldn't be allowed to get here but Firefox allows an event // (mouse, keybd, async xhrGet) to interrupt a synchronous xhrGet. // If the current script block contains multiple require() statements, then after each // require() returns, inFlightCount == 0, but we want to hold the _callLoaded() until // all require()s are done since the out-of-sequence addOnLoad() presumably needs them all. // setTimeout allows the next require() to start (if needed), and then we check this again. setTimeout(function(){ // If inFlightCount > 0, then multiple require()s are running sequentially and // the next require() started after setTimeout() was executed but before we got here. if(d._inFlightCount == 0){ d._callLoaded(); } }, 0); } return !!contents; // Boolean: contents? true : false } // FIXME: probably need to add logging to this method dojo._loadUriAndCheck = function(/*String*/uri, /*String*/moduleName, /*Function?*/cb){ // summary: calls loadUri then findModule and returns true if both succeed var ok = false; try{ ok = d._loadUri(uri, cb); }catch(e){ console.error("failed loading " + uri + " with error: " + e); } return !!(ok && d._loadedModules[moduleName]); // Boolean } dojo.loaded = function(){ // summary: // signal fired when initial environment and package loading is // complete. You should use dojo.addOnLoad() instead of doing a // direct dojo.connect() to this method in order to handle // initialization tasks that require the environment to be // initialized. In a browser host, declarative widgets will // be constructed when this function finishes runing. d._loadNotifying = true; d._postLoad = true; var mll = d._loaders; //Clear listeners so new ones can be added //For other xdomain package loads after the initial load. d._loaders = []; for(var x = 0; x < mll.length; x++){ mll[x](); } d._loadNotifying = false; //Make sure nothing else got added to the onload queue //after this first run. If something did, and we are not waiting for any //more inflight resources, run again. if(d._postLoad && d._inFlightCount == 0 && mll.length){ d._callLoaded(); } } dojo.unloaded = function(){ // summary: // signal fired by impending environment destruction. You should use // dojo.addOnUnload() instead of doing a direct dojo.connect() to this // method to perform page/application cleanup methods. See // dojo.addOnUnload for more info. var mll = d._unloaders; while(mll.length){ (mll.pop())(); } } d._onto = function(arr, obj, fn){ if(!fn){ arr.push(obj); }else if(fn){ var func = (typeof fn == "string") ? obj[fn] : fn; arr.push(function(){ func.call(obj); }); } } dojo.ready = dojo.addOnLoad = function(/*Object*/obj, /*String|Function?*/functionName){ // summary: // Registers a function to be triggered after the DOM and dojo.require() calls // have finished loading. // // description: // Registers a function to be triggered after the DOM has finished // loading and `dojo.require` modules have loaded. Widgets declared in markup // have been instantiated if `djConfig.parseOnLoad` is true when this fires. // // Images and CSS files may or may not have finished downloading when // the specified function is called. (Note that widgets' CSS and HTML // code is guaranteed to be downloaded before said widgets are // instantiated, though including css resouces BEFORE any script elements // is highly recommended). // // example: // Register an anonymous function to run when everything is ready // | dojo.addOnLoad(function(){ doStuff(); }); // // example: // Register a function to run when everything is ready by pointer: // | var init = function(){ doStuff(); } // | dojo.addOnLoad(init); // // example: // Register a function to run scoped to `object`, either by name or anonymously: // | dojo.addOnLoad(object, "functionName"); // | dojo.addOnLoad(object, function(){ doStuff(); }); d._onto(d._loaders, obj, functionName); //Added for xdomain loading. dojo.addOnLoad is used to //indicate callbacks after doing some dojo.require() statements. //In the xdomain case, if all the requires are loaded (after initial //page load), then immediately call any listeners. if(d._postLoad && d._inFlightCount == 0 && !d._loadNotifying){ d._callLoaded(); } } //Support calling dojo.addOnLoad via djConfig.addOnLoad. Support all the //call permutations of dojo.addOnLoad. Mainly useful when dojo is added //to the page after the page has loaded. var dca = d.config.addOnLoad; if(dca){ d.addOnLoad[(dca instanceof Array ? "apply" : "call")](d, dca); } dojo._modulesLoaded = function(){ if(d._postLoad){ return; } if(d._inFlightCount > 0){ console.warn("files still in flight!"); return; } d._callLoaded(); } dojo._callLoaded = function(){ // The "object" check is for IE, and the other opera check fixes an // issue in Opera where it could not find the body element in some // widget test cases. For 0.9, maybe route all browsers through the // setTimeout (need protection still for non-browser environments // though). This might also help the issue with FF 2.0 and freezing // issues where we try to do sync xhr while background css images are // being loaded (trac #2572)? Consider for 0.9. if(typeof setTimeout == "object" || (d.config.useXDomain && d.isOpera)){ setTimeout( d.isAIR ? function(){ d.loaded(); } : d._scopeName + ".loaded();", 0); }else{ d.loaded(); } } dojo._getModuleSymbols = function(/*String*/modulename){ // summary: // Converts a module name in dotted JS notation to an array // representing the path in the source tree var syms = modulename.split("."); for(var i = syms.length; i>0; i--){ var parentModule = syms.slice(0, i).join("."); if(i == 1 && !d._moduleHasPrefix(parentModule)){ // Support default module directory (sibling of dojo) for top-level modules syms[0] = "../" + syms[0]; }else{ var parentModulePath = d._getModulePrefix(parentModule); if(parentModulePath != parentModule){ syms.splice(0, i, parentModulePath); break; } } } return syms; // Array } dojo._global_omit_module_check = false; dojo.loadInit = function(/*Function*/init){ // summary: // Executes a function that needs to be executed for the loader's dojo.requireIf // resolutions to work. This is needed mostly for the xdomain loader case where // a function needs to be executed to set up the possible values for a dojo.requireIf // call. // init: // a function reference. Executed immediately. // description: This function is mainly a marker for the xdomain loader to know parts of // code that needs be executed outside the function wrappper that is placed around modules. // The init function could be executed more than once, and it should make no assumptions // on what is loaded, or what modules are available. Only the functionality in Dojo Base // is allowed to be used. Avoid using this method. For a valid use case, // see the source for dojox.gfx. init(); } dojo._loadModule = dojo.require = function(/*String*/moduleName, /*Boolean?*/omitModuleCheck){ // summary: // loads a Javascript module from the appropriate URI // moduleName: // module name to load, using periods for separators, // e.g. "dojo.date.locale". Module paths are de-referenced by dojo's // internal mapping of locations to names and are disambiguated by // longest prefix. See `dojo.registerModulePath()` for details on // registering new modules. // omitModuleCheck: // if `true`, omitModuleCheck skips the step of ensuring that the // loaded file actually defines the symbol it is referenced by. // For example if it called as `dojo.require("a.b.c")` and the // file located at `a/b/c.js` does not define an object `a.b.c`, // and exception will be throws whereas no exception is raised // when called as `dojo.require("a.b.c", true)` // description: // Modules are loaded via dojo.require by using one of two loaders: the normal loader // and the xdomain loader. The xdomain loader is used when dojo was built with a // custom build that specified loader=xdomain and the module lives on a modulePath // that is a whole URL, with protocol and a domain. The versions of Dojo that are on // the Google and AOL CDNs use the xdomain loader. // // If the module is loaded via the xdomain loader, it is an asynchronous load, since // the module is added via a dynamically created script tag. This // means that dojo.require() can return before the module has loaded. However, this // should only happen in the case where you do dojo.require calls in the top-level // HTML page, or if you purposely avoid the loader checking for dojo.require // dependencies in your module by using a syntax like dojo["require"] to load the module. // // Sometimes it is useful to not have the loader detect the dojo.require calls in the // module so that you can dynamically load the modules as a result of an action on the // page, instead of right at module load time. // // Also, for script blocks in an HTML page, the loader does not pre-process them, so // it does not know to download the modules before the dojo.require calls occur. // // So, in those two cases, when you want on-the-fly module loading or for script blocks // in the HTML page, special care must be taken if the dojo.required code is loaded // asynchronously. To make sure you can execute code that depends on the dojo.required // modules, be sure to add the code that depends on the modules in a dojo.addOnLoad() // callback. dojo.addOnLoad waits for all outstanding modules to finish loading before // executing. Example: // // | <script type="text/javascript"> // | dojo.require("foo"); // | dojo.require("bar"); // | dojo.addOnLoad(function(){ // | //you can now safely do something with foo and bar // | }); // | </script> // // This type of syntax works with both xdomain and normal loaders, so it is good // practice to always use this idiom for on-the-fly code loading and in HTML script // blocks. If at some point you change loaders and where the code is loaded from, // it will all still work. // // More on how dojo.require // `dojo.require("A.B")` first checks to see if symbol A.B is // defined. If it is, it is simply returned (nothing to do). // // If it is not defined, it will look for `A/B.js` in the script root // directory. // // `dojo.require` throws an excpetion if it cannot find a file // to load, or if the symbol `A.B` is not defined after loading. // // It returns the object `A.B`, but note the caveats above about on-the-fly loading and // HTML script blocks when the xdomain loader is loading a module. // // `dojo.require()` does nothing about importing symbols into // the current namespace. It is presumed that the caller will // take care of that. For example, to import all symbols into a // local block, you might write: // // | with (dojo.require("A.B")) { // | ... // | } // // And to import just the leaf symbol to a local variable: // // | var B = dojo.require("A.B"); // | ... // returns: the required namespace object omitModuleCheck = d._global_omit_module_check || omitModuleCheck; //Check if it is already loaded. var module = d._loadedModules[moduleName]; if(module){ return module; } // convert periods to slashes var relpath = d._getModuleSymbols(moduleName).join("/") + '.js'; var modArg = !omitModuleCheck ? moduleName : null; var ok = d._loadPath(relpath, modArg); if(!ok && !omitModuleCheck){ throw new Error("Could not load '" + moduleName + "'; last tried '" + relpath + "'"); } // check that the symbol was defined // Don't bother if we're doing xdomain (asynchronous) loading. if(!omitModuleCheck && !d._isXDomain){ // pass in false so we can give better error module = d._loadedModules[moduleName]; if(!module){ throw new Error("symbol '" + moduleName + "' is not defined after loading '" + relpath + "'"); } } return module; } dojo.provide = function(/*String*/ resourceName){ // summary: // Register a resource with the package system. Works in conjunction with `dojo.require` // // description: // Each javascript source file is called a resource. When a // resource is loaded by the browser, `dojo.provide()` registers // that it has been loaded. // // Each javascript source file must have at least one // `dojo.provide()` call at the top of the file, corresponding to // the file name. For example, `js/dojo/foo.js` must have // `dojo.provide("dojo.foo");` before any calls to // `dojo.require()` are made. // // For backwards compatibility reasons, in addition to registering // the resource, `dojo.provide()` also ensures that the javascript // object for the module exists. For example, // `dojo.provide("dojox.data.FlickrStore")`, in addition to // registering that `FlickrStore.js` is a resource for the // `dojox.data` module, will ensure that the `dojox.data` // javascript object exists, so that calls like // `dojo.data.foo = function(){ ... }` don't fail. // // In the case of a build where multiple javascript source files // are combined into one bigger file (similar to a .lib or .jar // file), that file may contain multiple dojo.provide() calls, to // note that it includes multiple resources. // // resourceName: String // A dot-sperated string identifying a resource. // // example: // Safely create a `my` object, and make dojo.require("my.CustomModule") work // | dojo.provide("my.CustomModule"); //Make sure we have a string. resourceName = resourceName + ""; return (d._loadedModules[resourceName] = d.getObject(resourceName, true)); // Object } //Start of old bootstrap2: dojo.platformRequire = function(/*Object*/modMap){ // summary: // require one or more modules based on which host environment // Dojo is currently operating in // description: // This method takes a "map" of arrays which one can use to // optionally load dojo modules. The map is indexed by the // possible dojo.name_ values, with two additional values: // "default" and "common". The items in the "default" array will // be loaded if none of the other items have been choosen based on // dojo.name_, set by your host environment. The items in the // "common" array will *always* be loaded, regardless of which // list is chosen. // example: // | dojo.platformRequire({ // | browser: [ // | "foo.sample", // simple module // | "foo.test", // | ["foo.bar.baz", true] // skip object check in _loadModule (dojo.require) // | ], // | default: [ "foo.sample._base" ], // | common: [ "important.module.common" ] // | }); var common = modMap.common || []; var result = common.concat(modMap[d._name] || modMap["default"] || []); for(var x=0; x<result.length; x++){ var curr = result[x]; if(curr.constructor == Array){ d._loadModule.apply(d, curr); }else{ d._loadModule(curr); } } } dojo.requireIf = function(/*Boolean*/ condition, /*String*/ resourceName){ // summary: // If the condition is true then call `dojo.require()` for the specified // resource // // example: // | dojo.requireIf(dojo.isBrowser, "my.special.Module"); if(condition === true){ // FIXME: why do we support chained require()'s here? does the build system? var args = []; for(var i = 1; i < arguments.length; i++){ args.push(arguments[i]); } d.require.apply(d, args); } } dojo.requireAfterIf = d.requireIf; dojo.registerModulePath = function(/*String*/module, /*String*/prefix){ // summary: // Maps a module name to a path // description: // An unregistered module is given the default path of ../[module], // relative to Dojo root. For example, module acme is mapped to // ../acme. If you want to use a different module name, use // dojo.registerModulePath. // example: // If your dojo.js is located at this location in the web root: // | /myapp/js/dojo/dojo/dojo.js // and your modules are located at: // | /myapp/js/foo/bar.js // | /myapp/js/foo/baz.js // | /myapp/js/foo/thud/xyzzy.js // Your application can tell Dojo to locate the "foo" namespace by calling: // | dojo.registerModulePath("foo", "../../foo"); // At which point you can then use dojo.require() to load the // modules (assuming they provide() the same things which are // required). The full code might be: // | <script type="text/javascript" // | src="/myapp/js/dojo/dojo/dojo.js"></script> // | <script type="text/javascript"> // | dojo.registerModulePath("foo", "../../foo"); // | dojo.require("foo.bar"); // | dojo.require("foo.baz"); // | dojo.require("foo.thud.xyzzy"); // | </script> d._modulePrefixes[module] = { name: module, value: prefix }; } dojo.requireLocalization = function(/*String*/moduleName, /*String*/bundleName, /*String?*/locale, /*String?*/availableFlatLocales){ // summary: // Declares translated resources and loads them if necessary, in the // same style as dojo.require. Contents of the resource bundle are // typically strings, but may be any name/value pair, represented in // JSON format. See also `dojo.i18n.getLocalization`. // // description: // Load translated resource bundles provided underneath the "nls" // directory within a package. Translated resources may be located in // different packages throughout the source tree. // // Each directory is named for a locale as specified by RFC 3066, // (http://www.ietf.org/rfc/rfc3066.txt), normalized in lowercase. // Note that the two bundles in the example do not define all the // same variants. For a given locale, bundles will be loaded for // that locale and all more general locales above it, including a // fallback at the root directory. For example, a declaration for // the "de-at" locale will first load `nls/de-at/bundleone.js`, // then `nls/de/bundleone.js` and finally `nls/bundleone.js`. The // data will be flattened into a single Object so that lookups // will follow this cascading pattern. An optional build step can // preload the bundles to avoid data redundancy and the multiple // network hits normally required to load these resources. // // moduleName: // name of the package containing the "nls" directory in which the // bundle is found // // bundleName: // bundle name, i.e. the filename without the '.js' suffix. Using "nls" as a // a bundle name is not supported, since "nls" is the name of the folder // that holds bundles. Using "nls" as the bundle name will cause problems // with the custom build. // // locale: // the locale to load (optional) By default, the browser's user // locale as defined by dojo.locale // // availableFlatLocales: // A comma-separated list of the available, flattened locales for this // bundle. This argument should only be set by the build process. // // example: // A particular widget may define one or more resource bundles, // structured in a program as follows, where moduleName is // mycode.mywidget and bundleNames available include bundleone and // bundletwo: // | ... // | mycode/ // | mywidget/ // | nls/ // | bundleone.js (the fallback translation, English in this example) // | bundletwo.js (also a fallback translation) // | de/ // | bundleone.js // | bundletwo.js // | de-at/ // | bundleone.js // | en/ // | (empty; use the fallback translation) // | en-us/ // | bundleone.js // | en-gb/ // | bundleone.js // | es/ // | bundleone.js // | bundletwo.js // | ...etc // | ... // d.require("dojo.i18n"); d.i18n._requireLocalization.apply(d.hostenv, arguments); }; var ore = new RegExp("^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?$"), ire = new RegExp("^((([^\\[:]+):)?([^@]+)@)?(\\[([^\\]]+)\\]|([^\\[:]*))(:([0-9]+))?$"); dojo._Url = function(/*dojo._Url|String...*/){ // summary: // Constructor to create an object representing a URL. // It is marked as private, since we might consider removing // or simplifying it. // description: // Each argument is evaluated in order relative to the next until // a canonical uri is produced. To get an absolute Uri relative to // the current document use: // new dojo._Url(document.baseURI, url) var n = null, _a = arguments, uri = [_a[0]]; // resolve uri components relative to each other for(var i = 1; i<_a.length; i++){ if(!_a[i]){ continue; } // Safari doesn't support this.constructor so we have to be explicit // FIXME: Tracked (and fixed) in Webkit bug 3537. // http://bugs.webkit.org/show_bug.cgi?id=3537 var relobj = new d._Url(_a[i]+""), uriobj = new d._Url(uri[0]+""); if( relobj.path == "" && !relobj.scheme && !relobj.authority && !relobj.query ){ if(relobj.fragment != n){ uriobj.fragment = relobj.fragment; } relobj = uriobj; }else if(!relobj.scheme){ relobj.scheme = uriobj.scheme; if(!relobj.authority){ relobj.authority = uriobj.authority; if(relobj.path.charAt(0) != "/"){ var path = uriobj.path.substring(0, uriobj.path.lastIndexOf("/") + 1) + relobj.path; var segs = path.split("/"); for(var j = 0; j < segs.length; j++){ if(segs[j] == "."){ // flatten "./" references if(j == segs.length - 1){ segs[j] = ""; }else{ segs.splice(j, 1); j--; } }else if(j > 0 && !(j == 1 && segs[0] == "") && segs[j] == ".." && segs[j-1] != ".."){ // flatten "../" references if(j == (segs.length - 1)){ segs.splice(j, 1); segs[j - 1] = ""; }else{ segs.splice(j - 1, 2); j -= 2; } } } relobj.path = segs.join("/"); } } } uri = []; if(relobj.scheme){ uri.push(relobj.scheme, ":"); } if(relobj.authority){ uri.push("//", relobj.authority); } uri.push(relobj.path); if(relobj.query){ uri.push("?", relobj.query); } if(relobj.fragment){ uri.push("#", relobj.fragment); } } this.uri = uri.join(""); // break the uri into its main components var r = this.uri.match(ore); this.scheme = r[2] || (r[1] ? "" : n); this.authority = r[4] || (r[3] ? "" : n); this.path = r[5]; // can never be undefined this.query = r[7] || (r[6] ? "" : n); this.fragment = r[9] || (r[8] ? "" : n); if(this.authority != n){ // server based naming authority r = this.authority.match(ire); this.user = r[3] || n; this.password = r[4] || n; this.host = r[6] || r[7]; // ipv6 || ipv4 this.port = r[9] || n; } } dojo._Url.prototype.toString = function(){ return this.uri; }; dojo.moduleUrl = function(/*String*/module, /*dojo._Url||String*/url){ // summary: // Returns a `dojo._Url` object relative to a module. // example: // | var pngPath = dojo.moduleUrl("acme","images/small.png"); // | console.dir(pngPath); // list the object properties // | // create an image and set it's source to pngPath's value: // | var img = document.createElement("img"); // | // NOTE: we assign the string representation of the url object // | img.src = pngPath.toString(); // | // add our image to the document // | dojo.body().appendChild(img); // example: // you may de-reference as far as you like down the package // hierarchy. This is sometimes handy to avoid lenghty relative // urls or for building portable sub-packages. In this example, // the `acme.widget` and `acme.util` directories may be located // under different roots (see `dojo.registerModulePath`) but the // the modules which reference them can be unaware of their // relative locations on the filesystem: // | // somewhere in a configuration block // | dojo.registerModulePath("acme.widget", "../../acme/widget"); // | dojo.registerModulePath("acme.util", "../../util"); // | // | // ... // | // | // code in a module using acme resources // | var tmpltPath = dojo.moduleUrl("acme.widget","templates/template.html"); // | var dataPath = dojo.moduleUrl("acme.util","resources/data.json"); var loc = d._getModuleSymbols(module).join('/'); if(!loc){ return null; } if(loc.lastIndexOf("/") != loc.length-1){ loc += "/"; } //If the path is an absolute path (starts with a / or is on another //domain/xdomain) then don't add the baseUrl. var colonIndex = loc.indexOf(":"); if(loc.charAt(0) != "/" && (colonIndex == -1 || colonIndex > loc.indexOf("/"))){ loc = d.baseUrl + loc; } return new d._Url(loc, url); // dojo._Url } })(); /*===== dojo.isBrowser = { // example: // | if(dojo.isBrowser){ ... } }; dojo.isFF = { // example: // | if(dojo.isFF > 1){ ... } }; dojo.isIE = { // example: // | if(dojo.isIE > 6){ // | // we are IE7 // | } }; dojo.isSafari = { // example: // | if(dojo.isSafari){ ... } // example: // Detect iPhone: // | if(dojo.isSafari && navigator.userAgent.indexOf("iPhone") != -1){ // | // we are iPhone. Note, iPod touch reports "iPod" above and fails this test. // | } }; dojo = { // isBrowser: Boolean // True if the client is a web-browser isBrowser: true, // isFF: Number | undefined // Version as a Number if client is FireFox. undefined otherwise. Corresponds to // major detected FireFox version (1.5, 2, 3, etc.) isFF: 2, // isIE: Number | undefined // Version as a Number if client is MSIE(PC). undefined otherwise. Corresponds to // major detected IE version (6, 7, 8, etc.) isIE: 6, // isKhtml: Number | undefined // Version as a Number if client is a KHTML browser. undefined otherwise. Corresponds to major // detected version. isKhtml: 0, // isWebKit: Number | undefined // Version as a Number if client is a WebKit-derived browser (Konqueror, // Safari, Chrome, etc.). undefined otherwise. isWebKit: 0, // isMozilla: Number | undefined // Version as a Number if client is a Mozilla-based browser (Firefox, // SeaMonkey). undefined otherwise. Corresponds to major detected version. isMozilla: 0, // isOpera: Number | undefined // Version as a Number if client is Opera. undefined otherwise. Corresponds to // major detected version. isOpera: 0, // isSafari: Number | undefined // Version as a Number if client is Safari or iPhone. undefined otherwise. isSafari: 0, // isChrome: Number | undefined // Version as a Number if client is Chrome browser. undefined otherwise. isChrome: 0 // isMac: Boolean // True if the client runs on Mac } =====*/ if(typeof window != 'undefined'){ dojo.isBrowser = true; dojo._name = "browser"; // attempt to figure out the path to dojo if it isn't set in the config (function(){ var d = dojo; // this is a scope protection closure. We set browser versions and grab // the URL we were loaded from here. // grab the node we were loaded from if(document && document.getElementsByTagName){ var scripts = document.getElementsByTagName("script"); var rePkg = /dojo(\.xd)?\.js(\W|$)/i; for(var i = 0; i < scripts.length; i++){ var src = scripts[i].getAttribute("src"); if(!src){ continue; } var m = src.match(rePkg); if(m){ // find out where we came from if(!d.config.baseUrl){ d.config.baseUrl = src.substring(0, m.index); } // and find out if we need to modify our behavior var cfg = scripts[i].getAttribute("djConfig"); if(cfg){ var cfgo = eval("({ "+cfg+" })"); for(var x in cfgo){ dojo.config[x] = cfgo[x]; } } break; // "first Dojo wins" } } } d.baseUrl = d.config.baseUrl; // fill in the rendering support information in dojo.render.* var n = navigator; var dua = n.userAgent, dav = n.appVersion, tv = parseFloat(dav); if(dua.indexOf("Opera") >= 0){ d.isOpera = tv; } if(dua.indexOf("AdobeAIR") >= 0){ d.isAIR = 1; } d.isKhtml = (dav.indexOf("Konqueror") >= 0) ? tv : 0; d.isWebKit = parseFloat(dua.split("WebKit/")[1]) || undefined; d.isChrome = parseFloat(dua.split("Chrome/")[1]) || undefined; d.isMac = dav.indexOf("Macintosh") >= 0; // safari detection derived from: // http://developer.apple.com/internet/safari/faq.html#anchor2 // http://developer.apple.com/internet/safari/uamatrix.html var index = Math.max(dav.indexOf("WebKit"), dav.indexOf("Safari"), 0); if(index && !dojo.isChrome){ // try to grab the explicit Safari version first. If we don't get // one, look for less than 419.3 as the indication that we're on something // "Safari 2-ish". d.isSafari = parseFloat(dav.split("Version/")[1]); if(!d.isSafari || parseFloat(dav.substr(index + 7)) <= 419.3){ d.isSafari = 2; } } if(dua.indexOf("Gecko") >= 0 && !d.isKhtml && !d.isWebKit){ d.isMozilla = d.isMoz = tv; } if(d.isMoz){ //We really need to get away from this. Consider a sane isGecko approach for the future. d.isFF = parseFloat(dua.split("Firefox/")[1] || dua.split("Minefield/")[1]) || undefined; } if(document.all && !d.isOpera){ d.isIE = parseFloat(dav.split("MSIE ")[1]) || undefined; //In cases where the page has an HTTP header or META tag with //X-UA-Compatible, then it is in emulation mode. //Make sure isIE reflects the desired version. //document.documentMode of 5 means quirks mode. //Only switch the value if documentMode's major version //is different from isIE's major version. var mode = document.documentMode; if(mode && mode != 5 && Math.floor(d.isIE) != mode){ d.isIE = mode; } } //Workaround to get local file loads of dojo to work on IE 7 //by forcing to not use native xhr. if(dojo.isIE && window.location.protocol === "file:"){ dojo.config.ieForceActiveXXhr=true; } d.isQuirks = document.compatMode == "BackCompat"; // TODO: is the HTML LANG attribute relevant? d.locale = dojo.config.locale || (d.isIE ? n.userLanguage : n.language).toLowerCase(); // These are in order of decreasing likelihood; this will change in time. d._XMLHTTP_PROGIDS = ['Msxml2.XMLHTTP', 'Microsoft.XMLHTTP', 'Msxml2.XMLHTTP.4.0']; d._xhrObj = function(){ // summary: // does the work of portably generating a new XMLHTTPRequest object. var http, last_e; if(!dojo.isIE || !dojo.config.ieForceActiveXXhr){ try{ http = new XMLHttpRequest(); }catch(e){} } if(!http){ for(var i=0; i<3; ++i){ var progid = d._XMLHTTP_PROGIDS[i]; try{ http = new ActiveXObject(progid); }catch(e){ last_e = e; } if(http){ d._XMLHTTP_PROGIDS = [progid]; // so faster next time break; } } } if(!http){ throw new Error("XMLHTTP not available: "+last_e); } return http; // XMLHTTPRequest instance } d._isDocumentOk = function(http){ var stat = http.status || 0, lp = location.protocol; return (stat >= 200 && stat < 300) || // Boolean stat == 304 || // allow any 2XX response code stat == 1223 || // get it out of the cache // Internet Explorer mangled the status code OR we're Titanium/browser chrome/chrome extension requesting a local file (!stat && (lp == "file:" || lp == "chrome:" || lp == "chrome-extension:" || lp == "app:") ); } //See if base tag is in use. //This is to fix http://trac.dojotoolkit.org/ticket/3973, //but really, we need to find out how to get rid of the dojo._Url reference //below and still have DOH work with the dojo.i18n test following some other //test that uses the test frame to load a document (trac #2757). //Opera still has problems, but perhaps a larger issue of base tag support //with XHR requests (hasBase is true, but the request is still made to document //path, not base path). var owloc = window.location+""; var base = document.getElementsByTagName("base"); var hasBase = (base && base.length > 0); d._getText = function(/*URI*/ uri, /*Boolean*/ fail_ok){ // summary: Read the contents of the specified uri and return those contents. // uri: // A relative or absolute uri. If absolute, it still must be in // the same "domain" as we are. // fail_ok: // Default false. If fail_ok and loading fails, return null // instead of throwing. // returns: The response text. null is returned when there is a // failure and failure is okay (an exception otherwise) // NOTE: must be declared before scope switches ie. this._xhrObj() var http = d._xhrObj(); if(!hasBase && dojo._Url){ uri = (new dojo._Url(owloc, uri)).toString(); } if(d.config.cacheBust){ //Make sure we have a string before string methods are used on uri uri += ""; uri += (uri.indexOf("?") == -1 ? "?" : "&") + String(d.config.cacheBust).replace(/\W+/g,""); } http.open('GET', uri, false); try{ http.send(null); if(!d._isDocumentOk(http)){ var err = Error("Unable to load "+uri+" status:"+ http.status); err.status = http.status; err.responseText = http.responseText; throw err; } }catch(e){ if(fail_ok){ return null; } // null // rethrow the exception throw e; } return http.responseText; // String } var _w = window; var _handleNodeEvent = function(/*String*/evtName, /*Function*/fp){ // summary: // non-destructively adds the specified function to the node's // evtName handler. // evtName: should be in the form "onclick" for "onclick" handlers. // Make sure you pass in the "on" part. var _a = _w.attachEvent || _w.addEventListener; evtName = _w.attachEvent ? evtName : evtName.substring(2); _a(evtName, function(){ fp.apply(_w, arguments); }, false); }; d._windowUnloaders = []; d.windowUnloaded = function(){ // summary: // signal fired by impending window destruction. You may use // dojo.addOnWindowUnload() to register a listener for this // event. NOTE: if you wish to dojo.connect() to this method // to perform page/application cleanup, be aware that this // event WILL NOT fire if no handler has been registered with // dojo.addOnWindowUnload. This behavior started in Dojo 1.3. // Previous versions always triggered dojo.windowUnloaded. See // dojo.addOnWindowUnload for more info. var mll = d._windowUnloaders; while(mll.length){ (mll.pop())(); } d = null; }; var _onWindowUnloadAttached = 0; d.addOnWindowUnload = function(/*Object?|Function?*/obj, /*String|Function?*/functionName){ // summary: // registers a function to be triggered when window.onunload // fires. // description: // The first time that addOnWindowUnload is called Dojo // will register a page listener to trigger your unload // handler with. Note that registering these handlers may // destory "fastback" page caching in browsers that support // it. Be careful trying to modify the DOM or access // JavaScript properties during this phase of page unloading: // they may not always be available. Consider // dojo.addOnUnload() if you need to modify the DOM or do // heavy JavaScript work since it fires at the eqivalent of // the page's "onbeforeunload" event. // example: // | dojo.addOnWindowUnload(functionPointer) // | dojo.addOnWindowUnload(object, "functionName"); // | dojo.addOnWindowUnload(object, function(){ /* ... */}); d._onto(d._windowUnloaders, obj, functionName); if(!_onWindowUnloadAttached){ _onWindowUnloadAttached = 1; _handleNodeEvent("onunload", d.windowUnloaded); } }; var _onUnloadAttached = 0; d.addOnUnload = function(/*Object?|Function?*/obj, /*String|Function?*/functionName){ // summary: // registers a function to be triggered when the page unloads. // description: // The first time that addOnUnload is called Dojo will // register a page listener to trigger your unload handler // with. // // In a browser enviroment, the functions will be triggered // during the window.onbeforeunload event. Be careful of doing // too much work in an unload handler. onbeforeunload can be // triggered if a link to download a file is clicked, or if // the link is a javascript: link. In these cases, the // onbeforeunload event fires, but the document is not // actually destroyed. So be careful about doing destructive // operations in a dojo.addOnUnload callback. // // Further note that calling dojo.addOnUnload will prevent // browsers from using a "fast back" cache to make page // loading via back button instantaneous. // example: // | dojo.addOnUnload(functionPointer) // | dojo.addOnUnload(object, "functionName") // | dojo.addOnUnload(object, function(){ /* ... */}); d._onto(d._unloaders, obj, functionName); if(!_onUnloadAttached){ _onUnloadAttached = 1; _handleNodeEvent("onbeforeunload", dojo.unloaded); } }; })(); //START DOMContentLoaded dojo._initFired = false; dojo._loadInit = function(e){ if(dojo._scrollIntervalId){ clearInterval(dojo._scrollIntervalId); dojo._scrollIntervalId = 0; } if(!dojo._initFired){ dojo._initFired = true; //Help out IE to avoid memory leak. if(!dojo.config.afterOnLoad && window.detachEvent){ window.detachEvent("onload", dojo._loadInit); } if(dojo._inFlightCount == 0){ dojo._modulesLoaded(); } } } if(!dojo.config.afterOnLoad){ if(document.addEventListener){ //Standards. Hooray! Assumption here that if standards based, //it knows about DOMContentLoaded. It is OK if it does not, the fall through //to window onload should be good enough. document.addEventListener("DOMContentLoaded", dojo._loadInit, false); window.addEventListener("load", dojo._loadInit, false); }else if(window.attachEvent){ window.attachEvent("onload", dojo._loadInit); //DOMContentLoaded approximation. Diego Perini found this MSDN article //that indicates doScroll is available after DOM ready, so do a setTimeout //to check when it is available. //http://msdn.microsoft.com/en-us/library/ms531426.aspx if(!dojo.config.skipIeDomLoaded && self === self.top){ dojo._scrollIntervalId = setInterval(function (){ try{ //When dojo is loaded into an iframe in an IE HTML Application //(HTA), such as in a selenium test, javascript in the iframe //can't see anything outside of it, so self===self.top is true, //but the iframe is not the top window and doScroll will be //available before document.body is set. Test document.body //before trying the doScroll trick if(document.body){ document.documentElement.doScroll("left"); dojo._loadInit(); } }catch (e){} }, 30); } } } if(dojo.isIE){ try{ (function(){ document.namespaces.add("v", "urn:schemas-microsoft-com:vml"); var vmlElems = ["*", "group", "roundrect", "oval", "shape", "rect", "imagedata", "path", "textpath", "text"], i = 0, l = 1, s = document.createStyleSheet(); if(dojo.isIE >= 8){ i = 1; l = vmlElems.length; } for(; i < l; ++i){ s.addRule("v\\:" + vmlElems[i], "behavior:url(#default#VML); display:inline-block"); } })(); }catch(e){} } //END DOMContentLoaded /* OpenAjax.subscribe("OpenAjax", "onload", function(){ if(dojo._inFlightCount == 0){ dojo._modulesLoaded(); } }); OpenAjax.subscribe("OpenAjax", "onunload", function(){ dojo.unloaded(); }); */ } //if (typeof window != 'undefined') //Register any module paths set up in djConfig. Need to do this //in the hostenvs since hostenv_browser can read djConfig from a //script tag's attribute. (function(){ var mp = dojo.config["modulePaths"]; if(mp){ for(var param in mp){ dojo.registerModulePath(param, mp[param]); } } })(); //Load debug code if necessary. if(dojo.config.isDebug){ dojo.require("dojo._firebug.firebug"); } if(dojo.config.debugAtAllCosts){ dojo.config.useXDomain = true; dojo.require("dojo._base._loader.loader_xd"); dojo.require("dojo._base._loader.loader_debug"); dojo.require("dojo.i18n"); } if(!dojo._hasResource["dojo._base.lang"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.lang"] = true; dojo.provide("dojo._base.lang"); (function(){ var d = dojo, opts = Object.prototype.toString; // Crockford (ish) functions dojo.isString = function(/*anything*/ it){ // summary: // Return true if it is a String return (typeof it == "string" || it instanceof String); // Boolean } dojo.isArray = function(/*anything*/ it){ // summary: // Return true if it is an Array. // Does not work on Arrays created in other windows. return it && (it instanceof Array || typeof it == "array"); // Boolean } dojo.isFunction = function(/*anything*/ it){ // summary: // Return true if it is a Function return opts.call(it) === "[object Function]"; }; dojo.isObject = function(/*anything*/ it){ // summary: // Returns true if it is a JavaScript object (or an Array, a Function // or null) return it !== undefined && (it === null || typeof it == "object" || d.isArray(it) || d.isFunction(it)); // Boolean } dojo.isArrayLike = function(/*anything*/ it){ // summary: // similar to dojo.isArray() but more permissive // description: // Doesn't strongly test for "arrayness". Instead, settles for "isn't // a string or number and has a length property". Arguments objects // and DOM collections will return true when passed to // dojo.isArrayLike(), but will return false when passed to // dojo.isArray(). // returns: // If it walks like a duck and quacks like a duck, return `true` return it && it !== undefined && // Boolean // keep out built-in constructors (Number, String, ...) which have length // properties !d.isString(it) && !d.isFunction(it) && !(it.tagName && it.tagName.toLowerCase() == 'form') && (d.isArray(it) || isFinite(it.length)); } dojo.isAlien = function(/*anything*/ it){ // summary: // Returns true if it is a built-in function or some other kind of // oddball that *should* report as a function but doesn't return it && !d.isFunction(it) && /\{\s*\[native code\]\s*\}/.test(String(it)); // Boolean } dojo.extend = function(/*Object*/ constructor, /*Object...*/ props){ // summary: // Adds all properties and methods of props to constructor's // prototype, making them available to all instances created with // constructor. for(var i=1, l=arguments.length; i<l; i++){ d._mixin(constructor.prototype, arguments[i]); } return constructor; // Object } dojo._hitchArgs = function(scope, method /*,...*/){ var pre = d._toArray(arguments, 2); var named = d.isString(method); return function(){ // arrayify arguments var args = d._toArray(arguments); // locate our method var f = named ? (scope||d.global)[method] : method; // invoke with collected args return f && f.apply(scope || this, pre.concat(args)); // mixed } // Function } dojo.hitch = function(/*Object*/scope, /*Function|String*/method /*,...*/){ // summary: // Returns a function that will only ever execute in the a given scope. // This allows for easy use of object member functions // in callbacks and other places in which the "this" keyword may // otherwise not reference the expected scope. // Any number of default positional arguments may be passed as parameters // beyond "method". // Each of these values will be used to "placehold" (similar to curry) // for the hitched function. // scope: // The scope to use when method executes. If method is a string, // scope is also the object containing method. // method: // A function to be hitched to scope, or the name of the method in // scope to be hitched. // example: // | dojo.hitch(foo, "bar")(); // runs foo.bar() in the scope of foo // example: // | dojo.hitch(foo, myFunction); // returns a function that runs myFunction in the scope of foo // example: // Expansion on the default positional arguments passed along from // hitch. Passed args are mixed first, additional args after. // | var foo = { bar: function(a, b, c){ console.log(a, b, c); } }; // | var fn = dojo.hitch(foo, "bar", 1, 2); // | fn(3); // logs "1, 2, 3" // example: // | var foo = { bar: 2 }; // | dojo.hitch(foo, function(){ this.bar = 10; })(); // execute an anonymous function in scope of foo if(arguments.length > 2){ return d._hitchArgs.apply(d, arguments); // Function } if(!method){ method = scope; scope = null; } if(d.isString(method)){ scope = scope || d.global; if(!scope[method]){ throw(['dojo.hitch: scope["', method, '"] is null (scope="', scope, '")'].join('')); } return function(){ return scope[method].apply(scope, arguments || []); }; // Function } return !scope ? method : function(){ return method.apply(scope, arguments || []); }; // Function } /*===== dojo.delegate = function(obj, props){ // summary: // Returns a new object which "looks" to obj for properties which it // does not have a value for. Optionally takes a bag of properties to // seed the returned object with initially. // description: // This is a small implementaton of the Boodman/Crockford delegation // pattern in JavaScript. An intermediate object constructor mediates // the prototype chain for the returned object, using it to delegate // down to obj for property lookup when object-local lookup fails. // This can be thought of similarly to ES4's "wrap", save that it does // not act on types but rather on pure objects. // obj: // The object to delegate to for properties not found directly on the // return object or in props. // props: // an object containing properties to assign to the returned object // returns: // an Object of anonymous type // example: // | var foo = { bar: "baz" }; // | var thinger = dojo.delegate(foo, { thud: "xyzzy"}); // | thinger.bar == "baz"; // delegated to foo // | foo.thud == undefined; // by definition // | thinger.thud == "xyzzy"; // mixed in from props // | foo.bar = "thonk"; // | thinger.bar == "thonk"; // still delegated to foo's bar } =====*/ dojo.delegate = dojo._delegate = (function(){ // boodman/crockford delegation w/ cornford optimization function TMP(){} return function(obj, props){ TMP.prototype = obj; var tmp = new TMP(); TMP.prototype = null; if(props){ d._mixin(tmp, props); } return tmp; // Object } })(); /*===== dojo._toArray = function(obj, offset, startWith){ // summary: // Converts an array-like object (i.e. arguments, DOMCollection) to an // array. Returns a new Array with the elements of obj. // obj: Object // the object to "arrayify". We expect the object to have, at a // minimum, a length property which corresponds to integer-indexed // properties. // offset: Number? // the location in obj to start iterating from. Defaults to 0. // Optional. // startWith: Array? // An array to pack with the properties of obj. If provided, // properties in obj are appended at the end of startWith and // startWith is the returned array. } =====*/ var efficient = function(obj, offset, startWith){ return (startWith||[]).concat(Array.prototype.slice.call(obj, offset||0)); }; var slow = function(obj, offset, startWith){ var arr = startWith||[]; for(var x = offset || 0; x < obj.length; x++){ arr.push(obj[x]); } return arr; }; dojo._toArray = d.isIE ? function(obj){ return ((obj.item) ? slow : efficient).apply(this, arguments); } : efficient; dojo.partial = function(/*Function|String*/method /*, ...*/){ // summary: // similar to hitch() except that the scope object is left to be // whatever the execution context eventually becomes. // description: // Calling dojo.partial is the functional equivalent of calling: // | dojo.hitch(null, funcName, ...); var arr = [ null ]; return d.hitch.apply(d, arr.concat(d._toArray(arguments))); // Function } var extraNames = d._extraNames, extraLen = extraNames.length, empty = {}; dojo.clone = function(/*anything*/ o){ // summary: // Clones objects (including DOM nodes) and all children. // Warning: do not clone cyclic structures. if(!o || typeof o != "object" || d.isFunction(o)){ // null, undefined, any non-object, or function return o; // anything } if(o.nodeType && "cloneNode" in o){ // DOM Node return o.cloneNode(true); // Node } if(o instanceof Date){ // Date return new Date(o.getTime()); // Date } var r, i, l, s, name; if(d.isArray(o)){ // array r = []; for(i = 0, l = o.length; i < l; ++i){ if(i in o){ r.push(d.clone(o[i])); } } // we don't clone functions for performance reasons // }else if(d.isFunction(o)){ // // function // r = function(){ return o.apply(this, arguments); }; }else{ // generic objects r = o.constructor ? new o.constructor() : {}; } for(name in o){ // the "tobj" condition avoid copying properties in "source" // inherited from Object.prototype. For example, if target has a custom // toString() method, don't overwrite it with the toString() method // that source inherited from Object.prototype s = o[name]; if(!(name in r) || (r[name] !== s && (!(name in empty) || empty[name] !== s))){ r[name] = d.clone(s); } } // IE doesn't recognize some custom functions in for..in if(extraLen){ for(i = 0; i < extraLen; ++i){ name = extraNames[i]; s = o[name]; if(!(name in r) || (r[name] !== s && (!(name in empty) || empty[name] !== s))){ r[name] = s; // functions only, we don't clone them } } } return r; // Object } /*===== dojo.trim = function(str){ // summary: // Trims whitespace from both sides of the string // str: String // String to be trimmed // returns: String // Returns the trimmed string // description: // This version of trim() was selected for inclusion into the base due // to its compact size and relatively good performance // (see [Steven Levithan's blog](http://blog.stevenlevithan.com/archives/faster-trim-javascript) // Uses String.prototype.trim instead, if available. // The fastest but longest version of this function is located at // dojo.string.trim() return ""; // String } =====*/ dojo.trim = String.prototype.trim ? function(str){ return str.trim(); } : function(str){ return str.replace(/^\s\s*/, '').replace(/\s\s*$/, ''); }; /*===== dojo.replace = function(tmpl, map, pattern){ // summary: // Performs parameterized substitutions on a string. Throws an // exception if any parameter is unmatched. // tmpl: String // String to be used as a template. // map: Object|Function // If an object, it is used as a dictionary to look up substitutions. // If a function, it is called for every substitution with following // parameters: a whole match, a name, an offset, and the whole template // string (see https://developer.mozilla.org/en/Core_JavaScript_1.5_Reference/Global_Objects/String/replace // for more details). // pattern: RegEx? // Optional regular expression objects that overrides the default pattern. // Must be global and match one item. The default is: /\{([^\}]+)\}/g, // which matches patterns like that: "{xxx}", where "xxx" is any sequence // of characters, which doesn't include "}". // returns: String // Returns the substituted string. // example: // | // uses a dictionary for substitutions: // | dojo.replace("Hello, {name.first} {name.last} AKA {nick}!", // | { // | nick: "Bob", // | name: { // | first: "Robert", // | middle: "X", // | last: "Cringely" // | } // | }); // | // returns: Hello, Robert Cringely AKA Bob! // example: // | // uses an array for substitutions: // | dojo.replace("Hello, {0} {2}!", // | ["Robert", "X", "Cringely"]); // | // returns: Hello, Robert Cringely! // example: // | // uses a function for substitutions: // | function sum(a){ // | var t = 0; // | dojo.forEach(a, function(x){ t += x; }); // | return t; // | } // | dojo.replace( // | "{count} payments averaging {avg} USD per payment.", // | dojo.hitch( // | { payments: [11, 16, 12] }, // | function(_, key){ // | switch(key){ // | case "count": return this.payments.length; // | case "min": return Math.min.apply(Math, this.payments); // | case "max": return Math.max.apply(Math, this.payments); // | case "sum": return sum(this.payments); // | case "avg": return sum(this.payments) / this.payments.length; // | } // | } // | ) // | ); // | // prints: 3 payments averaging 13 USD per payment. // example: // | // uses an alternative PHP-like pattern for substitutions: // | dojo.replace("Hello, ${0} ${2}!", // | ["Robert", "X", "Cringely"], /\$\{([^\}]+)\}/g); // | // returns: Hello, Robert Cringely! return ""; // String } =====*/ var _pattern = /\{([^\}]+)\}/g; dojo.replace = function(tmpl, map, pattern){ return tmpl.replace(pattern || _pattern, d.isFunction(map) ? map : function(_, k){ return d.getObject(k, false, map); }); }; })(); } if(!dojo._hasResource["dojo._base.array"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.array"] = true; dojo.provide("dojo._base.array"); (function(){ var _getParts = function(arr, obj, cb){ return [ (typeof arr == "string") ? arr.split("") : arr, obj || dojo.global, // FIXME: cache the anonymous functions we create here? (typeof cb == "string") ? new Function("item", "index", "array", cb) : cb ]; }; var everyOrSome = function(/*Boolean*/every, /*Array|String*/arr, /*Function|String*/callback, /*Object?*/thisObject){ var _p = _getParts(arr, thisObject, callback); arr = _p[0]; for(var i=0,l=arr.length; i<l; ++i){ var result = !!_p[2].call(_p[1], arr[i], i, arr); if(every ^ result){ return result; // Boolean } } return every; // Boolean }; dojo.mixin(dojo, { indexOf: function( /*Array*/ array, /*Object*/ value, /*Integer?*/ fromIndex, /*Boolean?*/ findLast){ // summary: // locates the first index of the provided value in the // passed array. If the value is not found, -1 is returned. // description: // This method corresponds to the JavaScript 1.6 Array.indexOf method, with one difference: when // run over sparse arrays, the Dojo function invokes the callback for every index whereas JavaScript // 1.6's indexOf skips the holes in the sparse array. // For details on this method, see: // https://developer.mozilla.org/en/Core_JavaScript_1.5_Reference/Objects/Array/indexOf var step = 1, end = array.length || 0, i = 0; if(findLast){ i = end - 1; step = end = -1; } if(fromIndex != undefined){ i = fromIndex; } if((findLast && i > end) || i < end){ for(; i != end; i += step){ if(array[i] == value){ return i; } } } return -1; // Number }, lastIndexOf: function(/*Array*/array, /*Object*/value, /*Integer?*/fromIndex){ // summary: // locates the last index of the provided value in the passed // array. If the value is not found, -1 is returned. // description: // This method corresponds to the JavaScript 1.6 Array.lastIndexOf method, with one difference: when // run over sparse arrays, the Dojo function invokes the callback for every index whereas JavaScript // 1.6's lastIndexOf skips the holes in the sparse array. // For details on this method, see: // https://developer.mozilla.org/en/Core_JavaScript_1.5_Reference/Objects/Array/lastIndexOf return dojo.indexOf(array, value, fromIndex, true); // Number }, forEach: function(/*Array|String*/arr, /*Function|String*/callback, /*Object?*/thisObject){ // summary: // for every item in arr, callback is invoked. Return values are ignored. // If you want to break out of the loop, consider using dojo.every() or dojo.some(). // forEach does not allow breaking out of the loop over the items in arr. // arr: // the array to iterate over. If a string, operates on individual characters. // callback: // a function is invoked with three arguments: item, index, and array // thisObject: // may be used to scope the call to callback // description: // This function corresponds to the JavaScript 1.6 Array.forEach() method, with one difference: when // run over sparse arrays, this implemenation passes the "holes" in the sparse array to // the callback function with a value of undefined. JavaScript 1.6's forEach skips the holes in the sparse array. // For more details, see: // https://developer.mozilla.org/en/Core_JavaScript_1.5_Reference/Objects/Array/forEach // example: // | // log out all members of the array: // | dojo.forEach( // | [ "thinger", "blah", "howdy", 10 ], // | function(item){ // | console.log(item); // | } // | ); // example: // | // log out the members and their indexes // | dojo.forEach( // | [ "thinger", "blah", "howdy", 10 ], // | function(item, idx, arr){ // | console.log(item, "at index:", idx); // | } // | ); // example: // | // use a scoped object member as the callback // | // | var obj = { // | prefix: "logged via obj.callback:", // | callback: function(item){ // | console.log(this.prefix, item); // | } // | }; // | // | // specifying the scope function executes the callback in that scope // | dojo.forEach( // | [ "thinger", "blah", "howdy", 10 ], // | obj.callback, // | obj // | ); // | // | // alternately, we can accomplish the same thing with dojo.hitch() // | dojo.forEach( // | [ "thinger", "blah", "howdy", 10 ], // | dojo.hitch(obj, "callback") // | ); // match the behavior of the built-in forEach WRT empty arrs if(!arr || !arr.length){ return; } // FIXME: there are several ways of handilng thisObject. Is // dojo.global always the default context? var _p = _getParts(arr, thisObject, callback); arr = _p[0]; for(var i=0,l=arr.length; i<l; ++i){ _p[2].call(_p[1], arr[i], i, arr); } }, every: function(/*Array|String*/arr, /*Function|String*/callback, /*Object?*/thisObject){ // summary: // Determines whether or not every item in arr satisfies the // condition implemented by callback. // arr: // the array to iterate on. If a string, operates on individual characters. // callback: // a function is invoked with three arguments: item, index, // and array and returns true if the condition is met. // thisObject: // may be used to scope the call to callback // description: // This function corresponds to the JavaScript 1.6 Array.every() method, with one difference: when // run over sparse arrays, this implemenation passes the "holes" in the sparse array to // the callback function with a value of undefined. JavaScript 1.6's every skips the holes in the sparse array. // For more details, see: // https://developer.mozilla.org/en/Core_JavaScript_1.5_Reference/Objects/Array/every // example: // | // returns false // | dojo.every([1, 2, 3, 4], function(item){ return item>1; }); // example: // | // returns true // | dojo.every([1, 2, 3, 4], function(item){ return item>0; }); return everyOrSome(true, arr, callback, thisObject); // Boolean }, some: function(/*Array|String*/arr, /*Function|String*/callback, /*Object?*/thisObject){ // summary: // Determines whether or not any item in arr satisfies the // condition implemented by callback. // arr: // the array to iterate over. If a string, operates on individual characters. // callback: // a function is invoked with three arguments: item, index, // and array and returns true if the condition is met. // thisObject: // may be used to scope the call to callback // description: // This function corresponds to the JavaScript 1.6 Array.some() method, with one difference: when // run over sparse arrays, this implemenation passes the "holes" in the sparse array to // the callback function with a value of undefined. JavaScript 1.6's some skips the holes in the sparse array. // For more details, see: // https://developer.mozilla.org/en/Core_JavaScript_1.5_Reference/Objects/Array/some // example: // | // is true // | dojo.some([1, 2, 3, 4], function(item){ return item>1; }); // example: // | // is false // | dojo.some([1, 2, 3, 4], function(item){ return item<1; }); return everyOrSome(false, arr, callback, thisObject); // Boolean }, map: function(/*Array|String*/arr, /*Function|String*/callback, /*Function?*/thisObject){ // summary: // applies callback to each element of arr and returns // an Array with the results // arr: // the array to iterate on. If a string, operates on // individual characters. // callback: // a function is invoked with three arguments, (item, index, // array), and returns a value // thisObject: // may be used to scope the call to callback // description: // This function corresponds to the JavaScript 1.6 Array.map() method, with one difference: when // run over sparse arrays, this implemenation passes the "holes" in the sparse array to // the callback function with a value of undefined. JavaScript 1.6's map skips the holes in the sparse array. // For more details, see: // https://developer.mozilla.org/en/Core_JavaScript_1.5_Reference/Objects/Array/map // example: // | // returns [2, 3, 4, 5] // | dojo.map([1, 2, 3, 4], function(item){ return item+1 }); var _p = _getParts(arr, thisObject, callback); arr = _p[0]; var outArr = (arguments[3] ? (new arguments[3]()) : []); for(var i=0,l=arr.length; i<l; ++i){ outArr.push(_p[2].call(_p[1], arr[i], i, arr)); } return outArr; // Array }, filter: function(/*Array*/arr, /*Function|String*/callback, /*Object?*/thisObject){ // summary: // Returns a new Array with those items from arr that match the // condition implemented by callback. // arr: // the array to iterate over. // callback: // a function that is invoked with three arguments (item, // index, array). The return of this function is expected to // be a boolean which determines whether the passed-in item // will be included in the returned array. // thisObject: // may be used to scope the call to callback // description: // This function corresponds to the JavaScript 1.6 Array.filter() method, with one difference: when // run over sparse arrays, this implemenation passes the "holes" in the sparse array to // the callback function with a value of undefined. JavaScript 1.6's filter skips the holes in the sparse array. // For more details, see: // https://developer.mozilla.org/en/Core_JavaScript_1.5_Reference/Objects/Array/filter // example: // | // returns [2, 3, 4] // | dojo.filter([1, 2, 3, 4], function(item){ return item>1; }); var _p = _getParts(arr, thisObject, callback); arr = _p[0]; var outArr = []; for(var i=0,l=arr.length; i<l; ++i){ if(_p[2].call(_p[1], arr[i], i, arr)){ outArr.push(arr[i]); } } return outArr; // Array } }); })(); /* */ } if(!dojo._hasResource["dojo._base.declare"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.declare"] = true; dojo.provide("dojo._base.declare"); (function(){ var d = dojo, mix = d._mixin, op = Object.prototype, opts = op.toString, xtor = new Function, counter = 0, cname = "constructor"; function err(msg){ throw new Error("declare: " + msg); } // C3 Method Resolution Order (see http://www.python.org/download/releases/2.3/mro/) function c3mro(bases){ var result = [], roots = [{cls: 0, refs: []}], nameMap = {}, clsCount = 1, l = bases.length, i = 0, j, lin, base, top, proto, rec, name, refs; // build a list of bases naming them if needed for(; i < l; ++i){ base = bases[i]; if(!base){ err("mixin #" + i + " is unknown. Did you use dojo.require to pull it in?"); }else if(opts.call(base) != "[object Function]"){ err("mixin #" + i + " is not a callable constructor."); } lin = base._meta ? base._meta.bases : [base]; top = 0; // add bases to the name map for(j = lin.length - 1; j >= 0; --j){ proto = lin[j].prototype; if(!proto.hasOwnProperty("declaredClass")){ proto.declaredClass = "uniqName_" + (counter++); } name = proto.declaredClass; if(!nameMap.hasOwnProperty(name)){ nameMap[name] = {count: 0, refs: [], cls: lin[j]}; ++clsCount; } rec = nameMap[name]; if(top && top !== rec){ rec.refs.push(top); ++top.count; } top = rec; } ++top.count; roots[0].refs.push(top); } // remove classes without external references recursively while(roots.length){ top = roots.pop(); result.push(top.cls); --clsCount; // optimization: follow a single-linked chain while(refs = top.refs, refs.length == 1){ top = refs[0]; if(!top || --top.count){ // branch or end of chain => do not end to roots top = 0; break; } result.push(top.cls); --clsCount; } if(top){ // branch for(i = 0, l = refs.length; i < l; ++i){ top = refs[i]; if(!--top.count){ roots.push(top); } } } } if(clsCount){ err("can't build consistent linearization"); } // calculate the superclass offset base = bases[0]; result[0] = base ? base._meta && base === result[result.length - base._meta.bases.length] ? base._meta.bases.length : 1 : 0; return result; } function inherited(args, a, f){ var name, chains, bases, caller, meta, base, proto, opf, pos, cache = this._inherited = this._inherited || {}; // crack arguments if(typeof args == "string"){ name = args; args = a; a = f; } f = 0; caller = args.callee; name = name || caller.nom; if(!name){ err("can't deduce a name to call inherited()"); } meta = this.constructor._meta; bases = meta.bases; pos = cache.p; if(name != cname){ // method if(cache.c !== caller){ // cache bust pos = 0; base = bases[0]; meta = base._meta; if(meta.hidden[name] !== caller){ // error detection chains = meta.chains; if(chains && typeof chains[name] == "string"){ err("calling chained method with inherited: " + name); } // find caller do{ meta = base._meta; proto = base.prototype; if(meta && (proto[name] === caller && proto.hasOwnProperty(name) || meta.hidden[name] === caller)){ break; } }while(base = bases[++pos]); // intentional assignment pos = base ? pos : -1; } } // find next base = bases[++pos]; if(base){ proto = base.prototype; if(base._meta && proto.hasOwnProperty(name)){ f = proto[name]; }else{ opf = op[name]; do{ proto = base.prototype; f = proto[name]; if(f && (base._meta ? proto.hasOwnProperty(name) : f !== opf)){ break; } }while(base = bases[++pos]); // intentional assignment } } f = base && f || op[name]; }else{ // constructor if(cache.c !== caller){ // cache bust pos = 0; meta = bases[0]._meta; if(meta && meta.ctor !== caller){ // error detection chains = meta.chains; if(!chains || chains.constructor !== "manual"){ err("calling chained constructor with inherited"); } // find caller while(base = bases[++pos]){ // intentional assignment meta = base._meta; if(meta && meta.ctor === caller){ break; } } pos = base ? pos : -1; } } // find next while(base = bases[++pos]){ // intentional assignment meta = base._meta; f = meta ? meta.ctor : base; if(f){ break; } } f = base && f; } // cache the found super method cache.c = f; cache.p = pos; // now we have the result if(f){ return a === true ? f : f.apply(this, a || args); } // intentionally if a super method was not found } function getInherited(name, args){ if(typeof name == "string"){ return this.inherited(name, args, true); } return this.inherited(name, true); } // emulation of "instanceof" function isInstanceOf(cls){ var bases = this.constructor._meta.bases; for(var i = 0, l = bases.length; i < l; ++i){ if(bases[i] === cls){ return true; } } return this instanceof cls; } function mixOwn(target, source){ var name, i = 0, l = d._extraNames.length; // add props adding metadata for incoming functions skipping a constructor for(name in source){ if(name != cname && source.hasOwnProperty(name)){ target[name] = source[name]; } } // process unenumerable methods on IE for(; i < l; ++i){ name = d._extraNames[i]; if(name != cname && source.hasOwnProperty(name)){ target[name] = source[name]; } } } // implementation of safe mixin function function safeMixin(target, source){ var name, t, i = 0, l = d._extraNames.length; // add props adding metadata for incoming functions skipping a constructor for(name in source){ t = source[name]; if((t !== op[name] || !(name in op)) && name != cname){ if(opts.call(t) == "[object Function]"){ // non-trivial function method => attach its name t.nom = name; } target[name] = t; } } // process unenumerable methods on IE for(; i < l; ++i){ name = d._extraNames[i]; t = source[name]; if((t !== op[name] || !(name in op)) && name != cname){ if(opts.call(t) == "[object Function]"){ // non-trivial function method => attach its name t.nom = name; } target[name] = t; } } return target; } function extend(source){ safeMixin(this.prototype, source); return this; } // chained constructor compatible with the legacy dojo.declare() function chainedConstructor(bases, ctorSpecial){ return function(){ var a = arguments, args = a, a0 = a[0], f, i, m, l = bases.length, preArgs; if(!(this instanceof a.callee)){ // not called via new, so force it return applyNew(a); } //this._inherited = {}; // perform the shaman's rituals of the original dojo.declare() // 1) call two types of the preamble if(ctorSpecial && (a0 && a0.preamble || this.preamble)){ // full blown ritual preArgs = new Array(bases.length); // prepare parameters preArgs[0] = a; for(i = 0;;){ // process the preamble of the 1st argument a0 = a[0]; if(a0){ f = a0.preamble; if(f){ a = f.apply(this, a) || a; } } // process the preamble of this class f = bases[i].prototype; f = f.hasOwnProperty("preamble") && f.preamble; if(f){ a = f.apply(this, a) || a; } // one peculiarity of the preamble: // it is called if it is not needed, // e.g., there is no constructor to call // let's watch for the last constructor // (see ticket #9795) if(++i == l){ break; } preArgs[i] = a; } } // 2) call all non-trivial constructors using prepared arguments for(i = l - 1; i >= 0; --i){ f = bases[i]; m = f._meta; f = m ? m.ctor : f; if(f){ f.apply(this, preArgs ? preArgs[i] : a); } } // 3) continue the original ritual: call the postscript f = this.postscript; if(f){ f.apply(this, args); } }; } // chained constructor compatible with the legacy dojo.declare() function singleConstructor(ctor, ctorSpecial){ return function(){ var a = arguments, t = a, a0 = a[0], f; if(!(this instanceof a.callee)){ // not called via new, so force it return applyNew(a); } //this._inherited = {}; // perform the shaman's rituals of the original dojo.declare() // 1) call two types of the preamble if(ctorSpecial){ // full blown ritual if(a0){ // process the preamble of the 1st argument f = a0.preamble; if(f){ t = f.apply(this, t) || t; } } f = this.preamble; if(f){ // process the preamble of this class f.apply(this, t); // one peculiarity of the preamble: // it is called even if it is not needed, // e.g., there is no constructor to call // let's watch for the last constructor // (see ticket #9795) } } // 2) call a constructor if(ctor){ ctor.apply(this, a); } // 3) continue the original ritual: call the postscript f = this.postscript; if(f){ f.apply(this, a); } }; } // plain vanilla constructor (can use inherited() to call its base constructor) function simpleConstructor(bases){ return function(){ var a = arguments, i = 0, f, m; if(!(this instanceof a.callee)){ // not called via new, so force it return applyNew(a); } //this._inherited = {}; // perform the shaman's rituals of the original dojo.declare() // 1) do not call the preamble // 2) call the top constructor (it can use this.inherited()) for(; f = bases[i]; ++i){ // intentional assignment m = f._meta; f = m ? m.ctor : f; if(f){ f.apply(this, a); break; } } // 3) call the postscript f = this.postscript; if(f){ f.apply(this, a); } }; } function chain(name, bases, reversed){ return function(){ var b, m, f, i = 0, step = 1; if(reversed){ i = bases.length - 1; step = -1; } for(; b = bases[i]; i += step){ // intentional assignment m = b._meta; f = (m ? m.hidden : b.prototype)[name]; if(f){ f.apply(this, arguments); } } }; } // forceNew(ctor) // return a new object that inherits from ctor.prototype but // without actually running ctor on the object. function forceNew(ctor){ // create object with correct prototype using a do-nothing // constructor xtor.prototype = ctor.prototype; var t = new xtor; xtor.prototype = null; // clean up return t; } // applyNew(args) // just like 'new ctor()' except that the constructor and its arguments come // from args, which must be an array or an arguments object function applyNew(args){ // create an object with ctor's prototype but without // calling ctor on it. var ctor = args.callee, t = forceNew(ctor); // execute the real constructor on the new object ctor.apply(t, args); return t; } d.declare = function(className, superclass, props){ // crack parameters if(typeof className != "string"){ props = superclass; superclass = className; className = ""; } props = props || {}; var proto, i, t, ctor, name, bases, chains, mixins = 1, parents = superclass; // build a prototype if(opts.call(superclass) == "[object Array]"){ // C3 MRO bases = c3mro(superclass); t = bases[0]; mixins = bases.length - t; superclass = bases[mixins]; }else{ bases = [0]; if(superclass){ if(opts.call(superclass) == "[object Function]"){ t = superclass._meta; bases = bases.concat(t ? t.bases : superclass); }else{ err("base class is not a callable constructor."); } }else if(superclass !== null){ err("unknown base class. Did you use dojo.require to pull it in?") } } if(superclass){ for(i = mixins - 1;; --i){ proto = forceNew(superclass); if(!i){ // stop if nothing to add (the last base) break; } // mix in properties t = bases[i]; (t._meta ? mixOwn : mix)(proto, t.prototype); // chain in new constructor ctor = new Function; ctor.superclass = superclass; ctor.prototype = proto; superclass = proto.constructor = ctor; } }else{ proto = {}; } // add all properties safeMixin(proto, props); // add constructor t = props.constructor; if(t !== op.constructor){ t.nom = cname; proto.constructor = t; } // collect chains and flags for(i = mixins - 1; i; --i){ // intentional assignment t = bases[i]._meta; if(t && t.chains){ chains = mix(chains || {}, t.chains); } } if(proto["-chains-"]){ chains = mix(chains || {}, proto["-chains-"]); } // build ctor t = !chains || !chains.hasOwnProperty(cname); bases[0] = ctor = (chains && chains.constructor === "manual") ? simpleConstructor(bases) : (bases.length == 1 ? singleConstructor(props.constructor, t) : chainedConstructor(bases, t)); // add meta information to the constructor ctor._meta = {bases: bases, hidden: props, chains: chains, parents: parents, ctor: props.constructor}; ctor.superclass = superclass && superclass.prototype; ctor.extend = extend; ctor.prototype = proto; proto.constructor = ctor; // add "standard" methods to the prototype proto.getInherited = getInherited; proto.inherited = inherited; proto.isInstanceOf = isInstanceOf; // add name if specified if(className){ proto.declaredClass = className; d.setObject(className, ctor); } // build chains and add them to the prototype if(chains){ for(name in chains){ if(proto[name] && typeof chains[name] == "string" && name != cname){ t = proto[name] = chain(name, bases, chains[name] === "after"); t.nom = name; } } } // chained methods do not return values // no need to chain "invisible" functions return ctor; // Function }; d.safeMixin = safeMixin; /*===== dojo.declare = function(className, superclass, props){ // summary: // Create a feature-rich constructor from compact notation. // className: String?: // The optional name of the constructor (loosely, a "class") // stored in the "declaredClass" property in the created prototype. // It will be used as a global name for a created constructor. // superclass: Function|Function[]: // May be null, a Function, or an Array of Functions. This argument // specifies a list of bases (the left-most one is the most deepest // base). // props: Object: // An object whose properties are copied to the created prototype. // Add an instance-initialization function by making it a property // named "constructor". // returns: // New constructor function. // description: // Create a constructor using a compact notation for inheritance and // prototype extension. // // Mixin ancestors provide a type of multiple inheritance. // Prototypes of mixin ancestors are copied to the new class: // changes to mixin prototypes will not affect classes to which // they have been mixed in. // // Ancestors can be compound classes created by this version of // dojo.declare. In complex cases all base classes are going to be // linearized according to C3 MRO algorithm // (see http://www.python.org/download/releases/2.3/mro/ for more // details). // // "className" is cached in "declaredClass" property of the new class, // if it was supplied. The immediate super class will be cached in // "superclass" property of the new class. // // Methods in "props" will be copied and modified: "nom" property // (the declared name of the method) will be added to all copied // functions to help identify them for the internal machinery. Be // very careful, while reusing methods: if you use the same // function under different names, it can produce errors in some // cases. // // It is possible to use constructors created "manually" (without // dojo.declare) as bases. They will be called as usual during the // creation of an instance, their methods will be chained, and even // called by "this.inherited()". // // Special property "-chains-" governs how to chain methods. It is // a dictionary, which uses method names as keys, and hint strings // as values. If a hint string is "after", this method will be // called after methods of its base classes. If a hint string is // "before", this method will be called before methods of its base // classes. // // If "constructor" is not mentioned in "-chains-" property, it will // be chained using the legacy mode: using "after" chaining, // calling preamble() method before each constructor, if available, // and calling postscript() after all constructors were executed. // If the hint is "after", it is chained as a regular method, but // postscript() will be called after the chain of constructors. // "constructor" cannot be chained "before", but it allows // a special hint string: "manual", which means that constructors // are not going to be chained in any way, and programmer will call // them manually using this.inherited(). In the latter case // postscript() will be called after the construction. // // All chaining hints are "inherited" from base classes and // potentially can be overridden. Be very careful when overriding // hints! Make sure that all chained methods can work in a proposed // manner of chaining. // // Once a method was chained, it is impossible to unchain it. The // only exception is "constructor". You don't need to define a // method in order to supply a chaining hint. // // If a method is chained, it cannot use this.inherited() because // all other methods in the hierarchy will be called automatically. // // Usually constructors and initializers of any kind are chained // using "after" and destructors of any kind are chained as // "before". Note that chaining assumes that chained methods do not // return any value: any returned value will be discarded. // // example: // | dojo.declare("my.classes.bar", my.classes.foo, { // | // properties to be added to the class prototype // | someValue: 2, // | // initialization function // | constructor: function(){ // | this.myComplicatedObject = new ReallyComplicatedObject(); // | }, // | // other functions // | someMethod: function(){ // | doStuff(); // | } // | }); // // example: // | var MyBase = dojo.declare(null, { // | // constructor, properties, and methods go here // | // ... // | }); // | var MyClass1 = dojo.declare(MyBase, { // | // constructor, properties, and methods go here // | // ... // | }); // | var MyClass2 = dojo.declare(MyBase, { // | // constructor, properties, and methods go here // | // ... // | }); // | var MyDiamond = dojo.declare([MyClass1, MyClass2], { // | // constructor, properties, and methods go here // | // ... // | }); // // example: // | var F = function(){ console.log("raw constructor"); }; // | F.prototype.method = function(){ // | console.log("raw method"); // | }; // | var A = dojo.declare(F, { // | constructor: function(){ // | console.log("A.constructor"); // | }, // | method: function(){ // | console.log("before calling F.method..."); // | this.inherited(arguments); // | console.log("...back in A"); // | } // | }); // | new A().method(); // | // will print: // | // raw constructor // | // A.constructor // | // before calling F.method... // | // raw method // | // ...back in A // // example: // | var A = dojo.declare(null, { // | "-chains-": { // | destroy: "before" // | } // | }); // | var B = dojo.declare(A, { // | constructor: function(){ // | console.log("B.constructor"); // | }, // | destroy: function(){ // | console.log("B.destroy"); // | } // | }); // | var C = dojo.declare(B, { // | constructor: function(){ // | console.log("C.constructor"); // | }, // | destroy: function(){ // | console.log("C.destroy"); // | } // | }); // | new C().destroy(); // | // prints: // | // B.constructor // | // C.constructor // | // C.destroy // | // B.destroy // // example: // | var A = dojo.declare(null, { // | "-chains-": { // | constructor: "manual" // | } // | }); // | var B = dojo.declare(A, { // | constructor: function(){ // | // ... // | // call the base constructor with new parameters // | this.inherited(arguments, [1, 2, 3]); // | // ... // | } // | }); // // example: // | var A = dojo.declare(null, { // | "-chains-": { // | m1: "before" // | }, // | m1: function(){ // | console.log("A.m1"); // | }, // | m2: function(){ // | console.log("A.m2"); // | } // | }); // | var B = dojo.declare(A, { // | "-chains-": { // | m2: "after" // | }, // | m1: function(){ // | console.log("B.m1"); // | }, // | m2: function(){ // | console.log("B.m2"); // | } // | }); // | var x = new B(); // | x.m1(); // | // prints: // | // B.m1 // | // A.m1 // | x.m2(); // | // prints: // | // A.m2 // | // B.m2 return new Function(); // Function }; =====*/ /*===== dojo.safeMixin = function(target, source){ // summary: // Mix in properties skipping a constructor and decorating functions // like it is done by dojo.declare. // target: Object // Target object to accept new properties. // source: Object // Source object for new properties. // description: // This function is used to mix in properties like dojo._mixin does, // but it skips a constructor property and decorates functions like // dojo.declare does. // // It is meant to be used with classes and objects produced with // dojo.declare. Functions mixed in with dojo.safeMixin can use // this.inherited() like normal methods. // // This function is used to implement extend() method of a constructor // produced with dojo.declare(). // // example: // | var A = dojo.declare(null, { // | m1: function(){ // | console.log("A.m1"); // | }, // | m2: function(){ // | console.log("A.m2"); // | } // | }); // | var B = dojo.declare(A, { // | m1: function(){ // | this.inherited(arguments); // | console.log("B.m1"); // | } // | }); // | B.extend({ // | m2: function(){ // | this.inherited(arguments); // | console.log("B.m2"); // | } // | }); // | var x = new B(); // | dojo.safeMixin(x, { // | m1: function(){ // | this.inherited(arguments); // | console.log("X.m1"); // | }, // | m2: function(){ // | this.inherited(arguments); // | console.log("X.m2"); // | } // | }); // | x.m2(); // | // prints: // | // A.m1 // | // B.m1 // | // X.m1 }; =====*/ /*===== Object.inherited = function(name, args, newArgs){ // summary: // Calls a super method. // name: String? // The optional method name. Should be the same as the caller's // name. Usually "name" is specified in complex dynamic cases, when // the calling method was dynamically added, undecorated by // dojo.declare, and it cannot be determined. // args: Arguments // The caller supply this argument, which should be the original // "arguments". // newArgs: Object? // If "true", the found function will be returned without // executing it. // If Array, it will be used to call a super method. Otherwise // "args" will be used. // returns: // Whatever is returned by a super method, or a super method itself, // if "true" was specified as newArgs. // description: // This method is used inside method of classes produced with // dojo.declare to call a super method (next in the chain). It is // used for manually controlled chaining. Consider using the regular // chaining, because it is faster. Use "this.inherited()" only in // complex cases. // // This method cannot me called from automatically chained // constructors including the case of a special (legacy) // constructor chaining. It cannot be called from chained methods. // // If "this.inherited()" cannot find the next-in-chain method, it // does nothing and returns "undefined". The last method in chain // can be a default method implemented in Object, which will be // called last. // // If "name" is specified, it is assumed that the method that // received "args" is the parent method for this call. It is looked // up in the chain list and if it is found the next-in-chain method // is called. If it is not found, the first-in-chain method is // called. // // If "name" is not specified, it will be derived from the calling // method (using a methoid property "nom"). // // example: // | var B = dojo.declare(A, { // | method1: function(a, b, c){ // | this.inherited(arguments); // | }, // | method2: function(a, b){ // | return this.inherited(arguments, [a + b]); // | } // | }); // | // next method is not in the chain list because it is added // | // manually after the class was created. // | B.prototype.method3 = function(){ // | console.log("This is a dynamically-added method."); // | this.inherited("method3", arguments); // | }; // example: // | var B = dojo.declare(A, { // | method: function(a, b){ // | var super = this.inherited(arguments, true); // | // ... // | if(!super){ // | console.log("there is no super method"); // | return 0; // | } // | return super.apply(this, arguments); // | } // | }); return {}; // Object } =====*/ /*===== Object.getInherited = function(name, args){ // summary: // Returns a super method. // name: String? // The optional method name. Should be the same as the caller's // name. Usually "name" is specified in complex dynamic cases, when // the calling method was dynamically added, undecorated by // dojo.declare, and it cannot be determined. // args: Arguments // The caller supply this argument, which should be the original // "arguments". // returns: // Returns a super method (Function) or "undefined". // description: // This method is a convenience method for "this.inherited()". // It uses the same algorithm but instead of executing a super // method, it returns it, or "undefined" if not found. // // example: // | var B = dojo.declare(A, { // | method: function(a, b){ // | var super = this.getInherited(arguments); // | // ... // | if(!super){ // | console.log("there is no super method"); // | return 0; // | } // | return super.apply(this, arguments); // | } // | }); return {}; // Object } =====*/ /*===== Object.isInstanceOf = function(cls){ // summary: // Checks the inheritance chain to see if it is inherited from this // class. // cls: Function // Class constructor. // returns: // "true", if this object is inherited from this class, "false" // otherwise. // description: // This method is used with instances of classes produced with // dojo.declare to determine of they support a certain interface or // not. It models "instanceof" operator. // // example: // | var A = dojo.declare(null, { // | // constructor, properties, and methods go here // | // ... // | }); // | var B = dojo.declare(null, { // | // constructor, properties, and methods go here // | // ... // | }); // | var C = dojo.declare([A, B], { // | // constructor, properties, and methods go here // | // ... // | }); // | var D = dojo.declare(A, { // | // constructor, properties, and methods go here // | // ... // | }); // | // | var a = new A(), b = new B(), c = new C(), d = new D(); // | // | console.log(a.isInstanceOf(A)); // true // | console.log(b.isInstanceOf(A)); // false // | console.log(c.isInstanceOf(A)); // true // | console.log(d.isInstanceOf(A)); // true // | // | console.log(a.isInstanceOf(B)); // false // | console.log(b.isInstanceOf(B)); // true // | console.log(c.isInstanceOf(B)); // true // | console.log(d.isInstanceOf(B)); // false // | // | console.log(a.isInstanceOf(C)); // false // | console.log(b.isInstanceOf(C)); // false // | console.log(c.isInstanceOf(C)); // true // | console.log(d.isInstanceOf(C)); // false // | // | console.log(a.isInstanceOf(D)); // false // | console.log(b.isInstanceOf(D)); // false // | console.log(c.isInstanceOf(D)); // false // | console.log(d.isInstanceOf(D)); // true return {}; // Object } =====*/ /*===== Object.extend = function(source){ // summary: // Adds all properties and methods of source to constructor's // prototype, making them available to all instances created with // constructor. This method is specific to constructors created with // dojo.declare. // source: Object // Source object which properties are going to be copied to the // constructor's prototype. // description: // Adds source properties to the constructor's prototype. It can // override existing properties. // // This method is similar to dojo.extend function, but it is specific // to constructors produced by dojo.declare. It is implemented // using dojo.safeMixin, and it skips a constructor property, // and properly decorates copied functions. // // example: // | var A = dojo.declare(null, { // | m1: function(){}, // | s1: "Popokatepetl" // | }); // | A.extend({ // | m1: function(){}, // | m2: function(){}, // | f1: true, // | d1: 42 // | }); }; =====*/ })(); } if(!dojo._hasResource["dojo._base.connect"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.connect"] = true; dojo.provide("dojo._base.connect"); // this file courtesy of the TurboAjax Group, licensed under a Dojo CLA // low-level delegation machinery dojo._listener = { // create a dispatcher function getDispatcher: function(){ // following comments pulled out-of-line to prevent cloning them // in the returned function. // - indices (i) that are really in the array of listeners (ls) will // not be in Array.prototype. This is the 'sparse array' trick // that keeps us safe from libs that take liberties with built-in // objects // - listener is invoked with current scope (this) return function(){ var ap=Array.prototype, c=arguments.callee, ls=c._listeners, t=c.target; // return value comes from original target function var r = t && t.apply(this, arguments); // make local copy of listener array so it is immutable during processing var i, lls; lls = [].concat(ls); // invoke listeners after target function for(i in lls){ if(!(i in ap)){ lls[i].apply(this, arguments); } } // return value comes from original target function return r; }; }, // add a listener to an object add: function(/*Object*/ source, /*String*/ method, /*Function*/ listener){ // Whenever 'method' is invoked, 'listener' will have the same scope. // Trying to supporting a context object for the listener led to // complexity. // Non trivial to provide 'once' functionality here // because listener could be the result of a dojo.hitch call, // in which case two references to the same hitch target would not // be equivalent. source = source || dojo.global; // The source method is either null, a dispatcher, or some other function var f = source[method]; // Ensure a dispatcher if(!f || !f._listeners){ var d = dojo._listener.getDispatcher(); // original target function is special d.target = f; // dispatcher holds a list of listeners d._listeners = []; // redirect source to dispatcher f = source[method] = d; } // The contract is that a handle is returned that can // identify this listener for disconnect. // // The type of the handle is private. Here is it implemented as Integer. // DOM event code has this same contract but handle is Function // in non-IE browsers. // // We could have separate lists of before and after listeners. return f._listeners.push(listener); /*Handle*/ }, // remove a listener from an object remove: function(/*Object*/ source, /*String*/ method, /*Handle*/ handle){ var f = (source || dojo.global)[method]; // remember that handle is the index+1 (0 is not a valid handle) if(f && f._listeners && handle--){ delete f._listeners[handle]; } } }; // Multiple delegation for arbitrary methods. // This unit knows nothing about DOM, but we include DOM aware documentation // and dontFix argument here to help the autodocs. Actual DOM aware code is in // event.js. dojo.connect = function(/*Object|null*/ obj, /*String*/ event, /*Object|null*/ context, /*String|Function*/ method, /*Boolean?*/ dontFix){ // summary: // `dojo.connect` is the core event handling and delegation method in // Dojo. It allows one function to "listen in" on the execution of // any other, triggering the second whenever the first is called. Many // listeners may be attached to a function, and source functions may // be either regular function calls or DOM events. // // description: // Connects listeners to actions, so that after event fires, a // listener is called with the same arguments passed to the original // function. // // Since `dojo.connect` allows the source of events to be either a // "regular" JavaScript function or a DOM event, it provides a uniform // interface for listening to all the types of events that an // application is likely to deal with though a single, unified // interface. DOM programmers may want to think of it as // "addEventListener for everything and anything". // // When setting up a connection, the `event` parameter must be a // string that is the name of the method/event to be listened for. If // `obj` is null, `dojo.global` is assumed, meaning that connections // to global methods are supported but also that you may inadvertently // connect to a global by passing an incorrect object name or invalid // reference. // // `dojo.connect` generally is forgiving. If you pass the name of a // function or method that does not yet exist on `obj`, connect will // not fail, but will instead set up a stub method. Similarly, null // arguments may simply be omitted such that fewer than 4 arguments // may be required to set up a connection See the examples for details. // // The return value is a handle that is needed to // remove this connection with `dojo.disconnect`. // // obj: // The source object for the event function. // Defaults to `dojo.global` if null. // If obj is a DOM node, the connection is delegated // to the DOM event manager (unless dontFix is true). // // event: // String name of the event function in obj. // I.e. identifies a property `obj[event]`. // // context: // The object that method will receive as "this". // // If context is null and method is a function, then method // inherits the context of event. // // If method is a string then context must be the source // object object for method (context[method]). If context is null, // dojo.global is used. // // method: // A function reference, or name of a function in context. // The function identified by method fires after event does. // method receives the same arguments as the event. // See context argument comments for information on method's scope. // // dontFix: // If obj is a DOM node, set dontFix to true to prevent delegation // of this connection to the DOM event manager. // // example: // When obj.onchange(), do ui.update(): // | dojo.connect(obj, "onchange", ui, "update"); // | dojo.connect(obj, "onchange", ui, ui.update); // same // // example: // Using return value for disconnect: // | var link = dojo.connect(obj, "onchange", ui, "update"); // | ... // | dojo.disconnect(link); // // example: // When onglobalevent executes, watcher.handler is invoked: // | dojo.connect(null, "onglobalevent", watcher, "handler"); // // example: // When ob.onCustomEvent executes, customEventHandler is invoked: // | dojo.connect(ob, "onCustomEvent", null, "customEventHandler"); // | dojo.connect(ob, "onCustomEvent", "customEventHandler"); // same // // example: // When ob.onCustomEvent executes, customEventHandler is invoked // with the same scope (this): // | dojo.connect(ob, "onCustomEvent", null, customEventHandler); // | dojo.connect(ob, "onCustomEvent", customEventHandler); // same // // example: // When globalEvent executes, globalHandler is invoked // with the same scope (this): // | dojo.connect(null, "globalEvent", null, globalHandler); // | dojo.connect("globalEvent", globalHandler); // same // normalize arguments var a=arguments, args=[], i=0; // if a[0] is a String, obj was omitted args.push(dojo.isString(a[0]) ? null : a[i++], a[i++]); // if the arg-after-next is a String or Function, context was NOT omitted var a1 = a[i+1]; args.push(dojo.isString(a1)||dojo.isFunction(a1) ? a[i++] : null, a[i++]); // absorb any additional arguments for(var l=a.length; i<l; i++){ args.push(a[i]); } // do the actual work return dojo._connect.apply(this, args); /*Handle*/ } // used by non-browser hostenvs. always overriden by event.js dojo._connect = function(obj, event, context, method){ var l=dojo._listener, h=l.add(obj, event, dojo.hitch(context, method)); return [obj, event, h, l]; // Handle } dojo.disconnect = function(/*Handle*/ handle){ // summary: // Remove a link created by dojo.connect. // description: // Removes the connection between event and the method referenced by handle. // handle: // the return value of the dojo.connect call that created the connection. if(handle && handle[0] !== undefined){ dojo._disconnect.apply(this, handle); // let's not keep this reference delete handle[0]; } } dojo._disconnect = function(obj, event, handle, listener){ listener.remove(obj, event, handle); } // topic publish/subscribe dojo._topics = {}; dojo.subscribe = function(/*String*/ topic, /*Object|null*/ context, /*String|Function*/ method){ // summary: // Attach a listener to a named topic. The listener function is invoked whenever the // named topic is published (see: dojo.publish). // Returns a handle which is needed to unsubscribe this listener. // context: // Scope in which method will be invoked, or null for default scope. // method: // The name of a function in context, or a function reference. This is the function that // is invoked when topic is published. // example: // | dojo.subscribe("alerts", null, function(caption, message){ alert(caption + "\n" + message); }); // | dojo.publish("alerts", [ "read this", "hello world" ]); // support for 2 argument invocation (omitting context) depends on hitch return [topic, dojo._listener.add(dojo._topics, topic, dojo.hitch(context, method))]; /*Handle*/ } dojo.unsubscribe = function(/*Handle*/ handle){ // summary: // Remove a topic listener. // handle: // The handle returned from a call to subscribe. // example: // | var alerter = dojo.subscribe("alerts", null, function(caption, message){ alert(caption + "\n" + message); }; // | ... // | dojo.unsubscribe(alerter); if(handle){ dojo._listener.remove(dojo._topics, handle[0], handle[1]); } } dojo.publish = function(/*String*/ topic, /*Array*/ args){ // summary: // Invoke all listener method subscribed to topic. // topic: // The name of the topic to publish. // args: // An array of arguments. The arguments will be applied // to each topic subscriber (as first class parameters, via apply). // example: // | dojo.subscribe("alerts", null, function(caption, message){ alert(caption + "\n" + message); }; // | dojo.publish("alerts", [ "read this", "hello world" ]); // Note that args is an array, which is more efficient vs variable length // argument list. Ideally, var args would be implemented via Array // throughout the APIs. var f = dojo._topics[topic]; if(f){ f.apply(this, args||[]); } } dojo.connectPublisher = function( /*String*/ topic, /*Object|null*/ obj, /*String*/ event){ // summary: // Ensure that every time obj.event() is called, a message is published // on the topic. Returns a handle which can be passed to // dojo.disconnect() to disable subsequent automatic publication on // the topic. // topic: // The name of the topic to publish. // obj: // The source object for the event function. Defaults to dojo.global // if null. // event: // The name of the event function in obj. // I.e. identifies a property obj[event]. // example: // | dojo.connectPublisher("/ajax/start", dojo, "xhrGet"); var pf = function(){ dojo.publish(topic, arguments); } return event ? dojo.connect(obj, event, pf) : dojo.connect(obj, pf); //Handle }; } if(!dojo._hasResource["dojo._base.Deferred"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.Deferred"] = true; dojo.provide("dojo._base.Deferred"); (function(){ var mutator = function(){}; var freeze = Object.freeze || function(){}; // A deferred provides an API for creating and resolving a promise. dojo.Deferred = function(/*Function?*/canceller){ // summary: // Deferreds provide a generic means for encapsulating an asynchronous // operation and notifying users of the completion and result of the operation. // description: // The dojo.Deferred API is based on the concept of promises that provide a // generic interface into the eventual completion of an asynchronous action. // The motivation for promises fundamentally is about creating a // separation of concerns that allows one to achieve the same type of // call patterns and logical data flow in asynchronous code as can be // achieved in synchronous code. Promises allows one // to be able to call a function purely with arguments needed for // execution, without conflating the call with concerns of whether it is // sync or async. One shouldn't need to alter a call's arguments if the // implementation switches from sync to async (or vice versa). By having // async functions return promises, the concerns of making the call are // separated from the concerns of asynchronous interaction (which are // handled by the promise). // // The dojo.Deferred is a type of promise that provides methods for fulfilling the // promise with a successful result or an error. The most important method for // working with Dojo's promises is the then() method, which follows the // CommonJS proposed promise API. An example of using a Dojo promise: // // | var resultingPromise = someAsyncOperation.then(function(result){ // | ... handle result ... // | }, // | function(error){ // | ... handle error ... // | }); // // The .then() call returns a new promise that represents the result of the // execution of the callback. The callbacks will never affect the original promises value. // // The dojo.Deferred instances also provide the following functions for backwards compatibility: // // * addCallback(handler) // * addErrback(handler) // * callback(result) // * errback(result) // // Callbacks are allowed to return promisesthemselves, so // you can build complicated sequences of events with ease. // // The creator of the Deferred may specify a canceller. The canceller // is a function that will be called if Deferred.cancel is called // before the Deferred fires. You can use this to implement clean // aborting of an XMLHttpRequest, etc. Note that cancel will fire the // deferred with a CancelledError (unless your canceller returns // another kind of error), so the errbacks should be prepared to // handle that error for cancellable Deferreds. // example: // | var deferred = new dojo.Deferred(); // | setTimeout(function(){ deferred.callback({success: true}); }, 1000); // | return deferred; // example: // Deferred objects are often used when making code asynchronous. It // may be easiest to write functions in a synchronous manner and then // split code using a deferred to trigger a response to a long-lived // operation. For example, instead of register a callback function to // denote when a rendering operation completes, the function can // simply return a deferred: // // | // callback style: // | function renderLotsOfData(data, callback){ // | var success = false // | try{ // | for(var x in data){ // | renderDataitem(data[x]); // | } // | success = true; // | }catch(e){ } // | if(callback){ // | callback(success); // | } // | } // // | // using callback style // | renderLotsOfData(someDataObj, function(success){ // | // handles success or failure // | if(!success){ // | promptUserToRecover(); // | } // | }); // | // NOTE: no way to add another callback here!! // example: // Using a Deferred doesn't simplify the sending code any, but it // provides a standard interface for callers and senders alike, // providing both with a simple way to service multiple callbacks for // an operation and freeing both sides from worrying about details // such as "did this get called already?". With Deferreds, new // callbacks can be added at any time. // // | // Deferred style: // | function renderLotsOfData(data){ // | var d = new dojo.Deferred(); // | try{ // | for(var x in data){ // | renderDataitem(data[x]); // | } // | d.callback(true); // | }catch(e){ // | d.errback(new Error("rendering failed")); // | } // | return d; // | } // // | // using Deferred style // | renderLotsOfData(someDataObj).then(null, function(){ // | promptUserToRecover(); // | }); // | // NOTE: addErrback and addCallback both return the Deferred // | // again, so we could chain adding callbacks or save the // | // deferred for later should we need to be notified again. // example: // In this example, renderLotsOfData is syncrhonous and so both // versions are pretty artificial. Putting the data display on a // timeout helps show why Deferreds rock: // // | // Deferred style and async func // | function renderLotsOfData(data){ // | var d = new dojo.Deferred(); // | setTimeout(function(){ // | try{ // | for(var x in data){ // | renderDataitem(data[x]); // | } // | d.callback(true); // | }catch(e){ // | d.errback(new Error("rendering failed")); // | } // | }, 100); // | return d; // | } // // | // using Deferred style // | renderLotsOfData(someDataObj).then(null, function(){ // | promptUserToRecover(); // | }); // // Note that the caller doesn't have to change his code at all to // handle the asynchronous case. var result, finished, isError, head, nextListener; var promise = this.promise = {}; function complete(value){ if(finished){ throw new Error("This deferred has already been resolved"); } result = value; finished = true; notify(); } function notify(){ var mutated; while(!mutated && nextListener){ var listener = nextListener; nextListener = nextListener.next; if(mutated = (listener.progress == mutator)){ // assignment and check finished = false; } var func = (isError ? listener.error : listener.resolved); if (func) { try { var newResult = func(result); if (newResult && typeof newResult.then === "function") { newResult.then(dojo.hitch(listener.deferred, "resolve"), dojo.hitch(listener.deferred, "reject")); continue; } var unchanged = mutated && newResult === undefined; listener.deferred[unchanged && isError ? "reject" : "resolve"](unchanged ? result : newResult); } catch (e) { listener.deferred.reject(e); } }else { if(isError){ listener.deferred.reject(result); }else{ listener.deferred.resolve(result); } } } } // calling resolve will resolve the promise this.resolve = this.callback = function(value){ // summary: // Fulfills the Deferred instance successfully with the provide value this.fired = 0; this.results = [value, null]; complete(value); }; // calling error will indicate that the promise failed this.reject = this.errback = function(error){ // summary: // Fulfills the Deferred instance as an error with the provided error isError = true; this.fired = 1; complete(error); this.results = [null, error]; if(!error || error.log !== false){ (dojo.config.deferredOnError || function(x){ console.error(x); })(error); } }; // call progress to provide updates on the progress on the completion of the promise this.progress = function(update){ // summary // Send progress events to all listeners var listener = nextListener; while(listener){ var progress = listener.progress; progress && progress(update); listener = listener.next; } }; this.addCallbacks = function(/*Function?*/callback, /*Function?*/errback){ this.then(callback, errback, mutator); return this; }; // provide the implementation of the promise this.then = promise.then = function(/*Function?*/resolvedCallback, /*Function?*/errorCallback, /*Function?*/progressCallback){ // summary // Adds a fulfilledHandler, errorHandler, and progressHandler to be called for // completion of a promise. The fulfilledHandler is called when the promise // is fulfilled. The errorHandler is called when a promise fails. The // progressHandler is called for progress events. All arguments are optional // and non-function values are ignored. The progressHandler is not only an // optional argument, but progress events are purely optional. Promise // providers are not required to ever create progress events. // // This function will return a new promise that is fulfilled when the given // fulfilledHandler or errorHandler callback is finished. This allows promise // operations to be chained together. The value returned from the callback // handler is the fulfillment value for the returned promise. If the callback // throws an error, the returned promise will be moved to failed state. // // example: // An example of using a CommonJS compliant promise: // | asyncComputeTheAnswerToEverything(). // | then(addTwo). // | then(printResult, onError); // | >44 // var returnDeferred = progressCallback == mutator ? this : new dojo.Deferred(promise.cancel); var listener = { resolved: resolvedCallback, error: errorCallback, progress: progressCallback, deferred: returnDeferred }; if(nextListener){ head = head.next = listener; } else{ nextListener = head = listener; } if(finished){ notify(); } return returnDeferred.promise; }; var deferred = this; this.cancel = promise.cancel = function () { // summary: // Cancels the asynchronous operation if(!finished){ var error = canceller && canceller(deferred); if(!finished){ if (!(error instanceof Error)) { error = new Error(error); } error.log = false; deferred.reject(error); } } } freeze(promise); }; dojo.extend(dojo.Deferred, { addCallback: function (/*Function*/callback) { return this.addCallbacks(dojo.hitch.apply(dojo, arguments)); }, addErrback: function (/*Function*/errback) { return this.addCallbacks(null, dojo.hitch.apply(dojo, arguments)); }, addBoth: function (/*Function*/callback) { var enclosed = dojo.hitch.apply(dojo, arguments); return this.addCallbacks(enclosed, enclosed); }, fired: -1 }); })(); dojo.when = function(promiseOrValue, /*Function?*/callback, /*Function?*/errback, /*Function?*/progressHandler){ // summary: // This provides normalization between normal synchronous values and // asynchronous promises, so you can interact with them in a common way // example: // | function printFirstAndList(items){ // | dojo.when(findFirst(items), console.log); // | dojo.when(findLast(items), console.log); // | } // | function findFirst(items){ // | return dojo.when(items, function(items){ // | return items[0]; // | }); // | } // | function findLast(items){ // | return dojo.when(items, function(items){ // | return items[items.length]; // | }); // | } // And now all three of his functions can be used sync or async. // | printFirstAndLast([1,2,3,4]) will work just as well as // | printFirstAndLast(dojo.xhrGet(...)); if(promiseOrValue && typeof promiseOrValue.then === "function"){ return promiseOrValue.then(callback, errback, progressHandler); } return callback(promiseOrValue); }; } if(!dojo._hasResource["dojo._base.json"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.json"] = true; dojo.provide("dojo._base.json"); dojo.fromJson = function(/*String*/ json){ // summary: // Parses a [JSON](http://json.org) string to return a JavaScript object. // description: // Throws for invalid JSON strings, but it does not use a strict JSON parser. It // delegates to eval(). The content passed to this method must therefore come // from a trusted source. // json: // a string literal of a JSON item, for instance: // `'{ "foo": [ "bar", 1, { "baz": "thud" } ] }'` return eval("(" + json + ")"); // Object } dojo._escapeString = function(/*String*/str){ //summary: // Adds escape sequences for non-visual characters, double quote and // backslash and surrounds with double quotes to form a valid string // literal. return ('"' + str.replace(/(["\\])/g, '\\$1') + '"'). replace(/[\f]/g, "\\f").replace(/[\b]/g, "\\b").replace(/[\n]/g, "\\n"). replace(/[\t]/g, "\\t").replace(/[\r]/g, "\\r"); // string } dojo.toJsonIndentStr = "\t"; dojo.toJson = function(/*Object*/ it, /*Boolean?*/ prettyPrint, /*String?*/ _indentStr){ // summary: // Returns a [JSON](http://json.org) serialization of an object. // description: // Returns a [JSON](http://json.org) serialization of an object. // Note that this doesn't check for infinite recursion, so don't do that! // it: // an object to be serialized. Objects may define their own // serialization via a special "__json__" or "json" function // property. If a specialized serializer has been defined, it will // be used as a fallback. // prettyPrint: // if true, we indent objects and arrays to make the output prettier. // The variable `dojo.toJsonIndentStr` is used as the indent string -- // to use something other than the default (tab), change that variable // before calling dojo.toJson(). // _indentStr: // private variable for recursive calls when pretty printing, do not use. // example: // simple serialization of a trivial object // | var jsonStr = dojo.toJson({ howdy: "stranger!", isStrange: true }); // | doh.is('{"howdy":"stranger!","isStrange":true}', jsonStr); // example: // a custom serializer for an objects of a particular class: // | dojo.declare("Furby", null, { // | furbies: "are strange", // | furbyCount: 10, // | __json__: function(){ // | }, // | }); if(it === undefined){ return "undefined"; } var objtype = typeof it; if(objtype == "number" || objtype == "boolean"){ return it + ""; } if(it === null){ return "null"; } if(dojo.isString(it)){ return dojo._escapeString(it); } // recurse var recurse = arguments.callee; // short-circuit for objects that support "json" serialization // if they return "self" then just pass-through... var newObj; _indentStr = _indentStr || ""; var nextIndent = prettyPrint ? _indentStr + dojo.toJsonIndentStr : ""; var tf = it.__json__||it.json; if(dojo.isFunction(tf)){ newObj = tf.call(it); if(it !== newObj){ return recurse(newObj, prettyPrint, nextIndent); } } if(it.nodeType && it.cloneNode){ // isNode // we can't seriailize DOM nodes as regular objects because they have cycles // DOM nodes could be serialized with something like outerHTML, but // that can be provided by users in the form of .json or .__json__ function. throw new Error("Can't serialize DOM nodes"); } var sep = prettyPrint ? " " : ""; var newLine = prettyPrint ? "\n" : ""; // array if(dojo.isArray(it)){ var res = dojo.map(it, function(obj){ var val = recurse(obj, prettyPrint, nextIndent); if(typeof val != "string"){ val = "undefined"; } return newLine + nextIndent + val; }); return "[" + res.join("," + sep) + newLine + _indentStr + "]"; } /* // look in the registry try { window.o = it; newObj = dojo.json.jsonRegistry.match(it); return recurse(newObj, prettyPrint, nextIndent); }catch(e){ // console.log(e); } // it's a function with no adapter, skip it */ if(objtype == "function"){ return null; // null } // generic object code path var output = [], key; for(key in it){ var keyStr, val; if(typeof key == "number"){ keyStr = '"' + key + '"'; }else if(typeof key == "string"){ keyStr = dojo._escapeString(key); }else{ // skip non-string or number keys continue; } val = recurse(it[key], prettyPrint, nextIndent); if(typeof val != "string"){ // skip non-serializable values continue; } // FIXME: use += on Moz!! // MOW NOTE: using += is a pain because you have to account for the dangling comma... output.push(newLine + nextIndent + keyStr + ":" + sep + val); } return "{" + output.join("," + sep) + newLine + _indentStr + "}"; // String } } if(!dojo._hasResource["dojo._base.Color"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.Color"] = true; dojo.provide("dojo._base.Color"); (function(){ var d = dojo; dojo.Color = function(/*Array|String|Object*/ color){ // summary: // Takes a named string, hex string, array of rgb or rgba values, // an object with r, g, b, and a properties, or another `dojo.Color` object // and creates a new Color instance to work from. // // example: // Work with a Color instance: // | var c = new dojo.Color(); // | c.setColor([0,0,0]); // black // | var hex = c.toHex(); // #000000 // // example: // Work with a node's color: // | var color = dojo.style("someNode", "backgroundColor"); // | var n = new dojo.Color(color); // | // adjust the color some // | n.r *= .5; // | console.log(n.toString()); // rgb(128, 255, 255); if(color){ this.setColor(color); } }; // FIXME: // there's got to be a more space-efficient way to encode or discover // these!! Use hex? dojo.Color.named = { black: [0,0,0], silver: [192,192,192], gray: [128,128,128], white: [255,255,255], maroon: [128,0,0], red: [255,0,0], purple: [128,0,128], fuchsia: [255,0,255], green: [0,128,0], lime: [0,255,0], olive: [128,128,0], yellow: [255,255,0], navy: [0,0,128], blue: [0,0,255], teal: [0,128,128], aqua: [0,255,255], transparent: d.config.transparentColor || [255,255,255] }; dojo.extend(dojo.Color, { r: 255, g: 255, b: 255, a: 1, _set: function(r, g, b, a){ var t = this; t.r = r; t.g = g; t.b = b; t.a = a; }, setColor: function(/*Array|String|Object*/ color){ // summary: // Takes a named string, hex string, array of rgb or rgba values, // an object with r, g, b, and a properties, or another `dojo.Color` object // and sets this color instance to that value. // // example: // | var c = new dojo.Color(); // no color // | c.setColor("#ededed"); // greyish if(d.isString(color)){ d.colorFromString(color, this); }else if(d.isArray(color)){ d.colorFromArray(color, this); }else{ this._set(color.r, color.g, color.b, color.a); if(!(color instanceof d.Color)){ this.sanitize(); } } return this; // dojo.Color }, sanitize: function(){ // summary: // Ensures the object has correct attributes // description: // the default implementation does nothing, include dojo.colors to // augment it with real checks return this; // dojo.Color }, toRgb: function(){ // summary: // Returns 3 component array of rgb values // example: // | var c = new dojo.Color("#000000"); // | console.log(c.toRgb()); // [0,0,0] var t = this; return [t.r, t.g, t.b]; // Array }, toRgba: function(){ // summary: // Returns a 4 component array of rgba values from the color // represented by this object. var t = this; return [t.r, t.g, t.b, t.a]; // Array }, toHex: function(){ // summary: // Returns a CSS color string in hexadecimal representation // example: // | console.log(new dojo.Color([0,0,0]).toHex()); // #000000 var arr = d.map(["r", "g", "b"], function(x){ var s = this[x].toString(16); return s.length < 2 ? "0" + s : s; }, this); return "#" + arr.join(""); // String }, toCss: function(/*Boolean?*/ includeAlpha){ // summary: // Returns a css color string in rgb(a) representation // example: // | var c = new dojo.Color("#FFF").toCss(); // | console.log(c); // rgb('255','255','255') var t = this, rgb = t.r + ", " + t.g + ", " + t.b; return (includeAlpha ? "rgba(" + rgb + ", " + t.a : "rgb(" + rgb) + ")"; // String }, toString: function(){ // summary: // Returns a visual representation of the color return this.toCss(true); // String } }); dojo.blendColors = function( /*dojo.Color*/ start, /*dojo.Color*/ end, /*Number*/ weight, /*dojo.Color?*/ obj ){ // summary: // Blend colors end and start with weight from 0 to 1, 0.5 being a 50/50 blend, // can reuse a previously allocated dojo.Color object for the result var t = obj || new d.Color(); d.forEach(["r", "g", "b", "a"], function(x){ t[x] = start[x] + (end[x] - start[x]) * weight; if(x != "a"){ t[x] = Math.round(t[x]); } }); return t.sanitize(); // dojo.Color }; dojo.colorFromRgb = function(/*String*/ color, /*dojo.Color?*/ obj){ // summary: // Returns a `dojo.Color` instance from a string of the form // "rgb(...)" or "rgba(...)". Optionally accepts a `dojo.Color` // object to update with the parsed value and return instead of // creating a new object. // returns: // A dojo.Color object. If obj is passed, it will be the return value. var m = color.toLowerCase().match(/^rgba?\(([\s\.,0-9]+)\)/); return m && dojo.colorFromArray(m[1].split(/\s*,\s*/), obj); // dojo.Color }; dojo.colorFromHex = function(/*String*/ color, /*dojo.Color?*/ obj){ // summary: // Converts a hex string with a '#' prefix to a color object. // Supports 12-bit #rgb shorthand. Optionally accepts a // `dojo.Color` object to update with the parsed value. // // returns: // A dojo.Color object. If obj is passed, it will be the return value. // // example: // | var thing = dojo.colorFromHex("#ededed"); // grey, longhand // // example: // | var thing = dojo.colorFromHex("#000"); // black, shorthand var t = obj || new d.Color(), bits = (color.length == 4) ? 4 : 8, mask = (1 << bits) - 1; color = Number("0x" + color.substr(1)); if(isNaN(color)){ return null; // dojo.Color } d.forEach(["b", "g", "r"], function(x){ var c = color & mask; color >>= bits; t[x] = bits == 4 ? 17 * c : c; }); t.a = 1; return t; // dojo.Color }; dojo.colorFromArray = function(/*Array*/ a, /*dojo.Color?*/ obj){ // summary: // Builds a `dojo.Color` from a 3 or 4 element array, mapping each // element in sequence to the rgb(a) values of the color. // example: // | var myColor = dojo.colorFromArray([237,237,237,0.5]); // grey, 50% alpha // returns: // A dojo.Color object. If obj is passed, it will be the return value. var t = obj || new d.Color(); t._set(Number(a[0]), Number(a[1]), Number(a[2]), Number(a[3])); if(isNaN(t.a)){ t.a = 1; } return t.sanitize(); // dojo.Color }; dojo.colorFromString = function(/*String*/ str, /*dojo.Color?*/ obj){ // summary: // Parses `str` for a color value. Accepts hex, rgb, and rgba // style color values. // description: // Acceptable input values for str may include arrays of any form // accepted by dojo.colorFromArray, hex strings such as "#aaaaaa", or // rgb or rgba strings such as "rgb(133, 200, 16)" or "rgba(10, 10, // 10, 50)" // returns: // A dojo.Color object. If obj is passed, it will be the return value. var a = d.Color.named[str]; return a && d.colorFromArray(a, obj) || d.colorFromRgb(str, obj) || d.colorFromHex(str, obj); }; })(); } if(!dojo._hasResource["dojo._base"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base"] = true; dojo.provide("dojo._base"); } if(!dojo._hasResource["dojo._base.window"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.window"] = true; dojo.provide("dojo._base.window"); /*===== dojo.doc = { // summary: // Alias for the current document. 'dojo.doc' can be modified // for temporary context shifting. Also see dojo.withDoc(). // description: // Refer to dojo.doc rather // than referring to 'window.document' to ensure your code runs // correctly in managed contexts. // example: // | n.appendChild(dojo.doc.createElement('div')); } =====*/ dojo.doc = window["document"] || null; dojo.body = function(){ // summary: // Return the body element of the document // return the body object associated with dojo.doc // example: // | dojo.body().appendChild(dojo.doc.createElement('div')); // Note: document.body is not defined for a strict xhtml document // Would like to memoize this, but dojo.doc can change vi dojo.withDoc(). return dojo.doc.body || dojo.doc.getElementsByTagName("body")[0]; // Node } dojo.setContext = function(/*Object*/globalObject, /*DocumentElement*/globalDocument){ // summary: // changes the behavior of many core Dojo functions that deal with // namespace and DOM lookup, changing them to work in a new global // context (e.g., an iframe). The varibles dojo.global and dojo.doc // are modified as a result of calling this function and the result of // `dojo.body()` likewise differs. dojo.global = globalObject; dojo.doc = globalDocument; }; dojo.withGlobal = function( /*Object*/globalObject, /*Function*/callback, /*Object?*/thisObject, /*Array?*/cbArguments){ // summary: // Invoke callback with globalObject as dojo.global and // globalObject.document as dojo.doc. // description: // Invoke callback with globalObject as dojo.global and // globalObject.document as dojo.doc. If provided, globalObject // will be executed in the context of object thisObject // When callback() returns or throws an error, the dojo.global // and dojo.doc will be restored to its previous state. var oldGlob = dojo.global; try{ dojo.global = globalObject; return dojo.withDoc.call(null, globalObject.document, callback, thisObject, cbArguments); }finally{ dojo.global = oldGlob; } } dojo.withDoc = function( /*DocumentElement*/documentObject, /*Function*/callback, /*Object?*/thisObject, /*Array?*/cbArguments){ // summary: // Invoke callback with documentObject as dojo.doc. // description: // Invoke callback with documentObject as dojo.doc. If provided, // callback will be executed in the context of object thisObject // When callback() returns or throws an error, the dojo.doc will // be restored to its previous state. var oldDoc = dojo.doc, oldLtr = dojo._bodyLtr, oldQ = dojo.isQuirks; try{ dojo.doc = documentObject; delete dojo._bodyLtr; // uncache dojo.isQuirks = dojo.doc.compatMode == "BackCompat"; // no need to check for QuirksMode which was Opera 7 only if(thisObject && typeof callback == "string"){ callback = thisObject[callback]; } return callback.apply(thisObject, cbArguments || []); }finally{ dojo.doc = oldDoc; delete dojo._bodyLtr; // in case it was undefined originally, and set to true/false by the alternate document if(oldLtr !== undefined){ dojo._bodyLtr = oldLtr; } dojo.isQuirks = oldQ; } }; } if(!dojo._hasResource["dojo._base.event"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.event"] = true; dojo.provide("dojo._base.event"); // this file courtesy of the TurboAjax Group, licensed under a Dojo CLA (function(){ // DOM event listener machinery var del = (dojo._event_listener = { add: function(/*DOMNode*/ node, /*String*/ name, /*Function*/ fp){ if(!node){return;} name = del._normalizeEventName(name); fp = del._fixCallback(name, fp); var oname = name; if( !dojo.isIE && (name == "mouseenter" || name == "mouseleave") ){ var ofp = fp; //oname = name; name = (name == "mouseenter") ? "mouseover" : "mouseout"; fp = function(e){ if(!dojo.isDescendant(e.relatedTarget, node)){ // e.type = oname; // FIXME: doesn't take? SJM: event.type is generally immutable. return ofp.call(this, e); } } } node.addEventListener(name, fp, false); return fp; /*Handle*/ }, remove: function(/*DOMNode*/ node, /*String*/ event, /*Handle*/ handle){ // summary: // clobbers the listener from the node // node: // DOM node to attach the event to // event: // the name of the handler to remove the function from // handle: // the handle returned from add if(node){ event = del._normalizeEventName(event); if(!dojo.isIE && (event == "mouseenter" || event == "mouseleave")){ event = (event == "mouseenter") ? "mouseover" : "mouseout"; } node.removeEventListener(event, handle, false); } }, _normalizeEventName: function(/*String*/ name){ // Generally, name should be lower case, unless it is special // somehow (e.g. a Mozilla DOM event). // Remove 'on'. return name.slice(0,2) =="on" ? name.slice(2) : name; }, _fixCallback: function(/*String*/ name, fp){ // By default, we only invoke _fixEvent for 'keypress' // If code is added to _fixEvent for other events, we have // to revisit this optimization. // This also applies to _fixEvent overrides for Safari and Opera // below. return name != "keypress" ? fp : function(e){ return fp.call(this, del._fixEvent(e, this)); }; }, _fixEvent: function(evt, sender){ // _fixCallback only attaches us to keypress. // Switch on evt.type anyway because we might // be called directly from dojo.fixEvent. switch(evt.type){ case "keypress": del._setKeyChar(evt); break; } return evt; }, _setKeyChar: function(evt){ evt.keyChar = evt.charCode ? String.fromCharCode(evt.charCode) : ''; evt.charOrCode = evt.keyChar || evt.keyCode; }, // For IE and Safari: some ctrl-key combinations (mostly w/punctuation) do not emit a char code in IE // we map those virtual key codes to ascii here // not valid for all (non-US) keyboards, so maybe we shouldn't bother _punctMap: { 106:42, 111:47, 186:59, 187:43, 188:44, 189:45, 190:46, 191:47, 192:96, 219:91, 220:92, 221:93, 222:39 } }); // DOM events dojo.fixEvent = function(/*Event*/ evt, /*DOMNode*/ sender){ // summary: // normalizes properties on the event object including event // bubbling methods, keystroke normalization, and x/y positions // evt: Event // native event object // sender: DOMNode // node to treat as "currentTarget" return del._fixEvent(evt, sender); } dojo.stopEvent = function(/*Event*/ evt){ // summary: // prevents propagation and clobbers the default action of the // passed event // evt: Event // The event object. If omitted, window.event is used on IE. evt.preventDefault(); evt.stopPropagation(); // NOTE: below, this method is overridden for IE } // the default listener to use on dontFix nodes, overriden for IE var node_listener = dojo._listener; // Unify connect and event listeners dojo._connect = function(obj, event, context, method, dontFix){ // FIXME: need a more strict test var isNode = obj && (obj.nodeType||obj.attachEvent||obj.addEventListener); // choose one of three listener options: raw (connect.js), DOM event on a Node, custom event on a Node // we need the third option to provide leak prevention on broken browsers (IE) var lid = isNode ? (dontFix ? 2 : 1) : 0, l = [dojo._listener, del, node_listener][lid]; // create a listener var h = l.add(obj, event, dojo.hitch(context, method)); // formerly, the disconnect package contained "l" directly, but if client code // leaks the disconnect package (by connecting it to a node), referencing "l" // compounds the problem. // instead we return a listener id, which requires custom _disconnect below. // return disconnect package return [ obj, event, h, lid ]; } dojo._disconnect = function(obj, event, handle, listener){ ([dojo._listener, del, node_listener][listener]).remove(obj, event, handle); } // Constants // Public: client code should test // keyCode against these named constants, as the // actual codes can vary by browser. dojo.keys = { // summary: // Definitions for common key values BACKSPACE: 8, TAB: 9, CLEAR: 12, ENTER: 13, SHIFT: 16, CTRL: 17, ALT: 18, META: dojo.isSafari ? 91 : 224, // the apple key on macs PAUSE: 19, CAPS_LOCK: 20, ESCAPE: 27, SPACE: 32, PAGE_UP: 33, PAGE_DOWN: 34, END: 35, HOME: 36, LEFT_ARROW: 37, UP_ARROW: 38, RIGHT_ARROW: 39, DOWN_ARROW: 40, INSERT: 45, DELETE: 46, HELP: 47, LEFT_WINDOW: 91, RIGHT_WINDOW: 92, SELECT: 93, NUMPAD_0: 96, NUMPAD_1: 97, NUMPAD_2: 98, NUMPAD_3: 99, NUMPAD_4: 100, NUMPAD_5: 101, NUMPAD_6: 102, NUMPAD_7: 103, NUMPAD_8: 104, NUMPAD_9: 105, NUMPAD_MULTIPLY: 106, NUMPAD_PLUS: 107, NUMPAD_ENTER: 108, NUMPAD_MINUS: 109, NUMPAD_PERIOD: 110, NUMPAD_DIVIDE: 111, F1: 112, F2: 113, F3: 114, F4: 115, F5: 116, F6: 117, F7: 118, F8: 119, F9: 120, F10: 121, F11: 122, F12: 123, F13: 124, F14: 125, F15: 126, NUM_LOCK: 144, SCROLL_LOCK: 145, // virtual key mapping copyKey: dojo.isMac && !dojo.isAIR ? (dojo.isSafari ? 91 : 224 ) : 17 }; var evtCopyKey = dojo.isMac ? "metaKey" : "ctrlKey"; dojo.isCopyKey = function(e){ // summary: // Checks an event for the copy key (meta on Mac, and ctrl anywhere else) // e: Event // Event object to examine return e[evtCopyKey]; // Boolean }; // Public: decoding mouse buttons from events /*===== dojo.mouseButtons = { // LEFT: Number // Numeric value of the left mouse button for the platform. LEFT: 0, // MIDDLE: Number // Numeric value of the middle mouse button for the platform. MIDDLE: 1, // RIGHT: Number // Numeric value of the right mouse button for the platform. RIGHT: 2, isButton: function(e, button){ // summary: // Checks an event object for a pressed button // e: Event // Event object to examine // button: Number // The button value (example: dojo.mouseButton.LEFT) return e.button == button; // Boolean }, isLeft: function(e){ // summary: // Checks an event object for the pressed left button // e: Event // Event object to examine return e.button == 0; // Boolean }, isMiddle: function(e){ // summary: // Checks an event object for the pressed middle button // e: Event // Event object to examine return e.button == 1; // Boolean }, isRight: function(e){ // summary: // Checks an event object for the pressed right button // e: Event // Event object to examine return e.button == 2; // Boolean } }; =====*/ if(dojo.isIE){ dojo.mouseButtons = { LEFT: 1, MIDDLE: 4, RIGHT: 2, // helper functions isButton: function(e, button){ return e.button & button; }, isLeft: function(e){ return e.button & 1; }, isMiddle: function(e){ return e.button & 4; }, isRight: function(e){ return e.button & 2; } }; }else{ dojo.mouseButtons = { LEFT: 0, MIDDLE: 1, RIGHT: 2, // helper functions isButton: function(e, button){ return e.button == button; }, isLeft: function(e){ return e.button == 0; }, isMiddle: function(e){ return e.button == 1; }, isRight: function(e){ return e.button == 2; } }; } // IE event normalization if(dojo.isIE){ var _trySetKeyCode = function(e, code){ try{ // squelch errors when keyCode is read-only // (e.g. if keyCode is ctrl or shift) return (e.keyCode = code); }catch(e){ return 0; } } // by default, use the standard listener var iel = dojo._listener; var listenersName = (dojo._ieListenersName = "_" + dojo._scopeName + "_listeners"); // dispatcher tracking property if(!dojo.config._allow_leaks){ // custom listener that handles leak protection for DOM events node_listener = iel = dojo._ie_listener = { // support handler indirection: event handler functions are // referenced here. Event dispatchers hold only indices. handlers: [], // add a listener to an object add: function(/*Object*/ source, /*String*/ method, /*Function*/ listener){ source = source || dojo.global; var f = source[method]; if(!f||!f[listenersName]){ var d = dojo._getIeDispatcher(); // original target function is special d.target = f && (ieh.push(f) - 1); // dispatcher holds a list of indices into handlers table d[listenersName] = []; // redirect source to dispatcher f = source[method] = d; } return f[listenersName].push(ieh.push(listener) - 1) ; /*Handle*/ }, // remove a listener from an object remove: function(/*Object*/ source, /*String*/ method, /*Handle*/ handle){ var f = (source||dojo.global)[method], l = f && f[listenersName]; if(f && l && handle--){ delete ieh[l[handle]]; delete l[handle]; } } }; // alias used above var ieh = iel.handlers; } dojo.mixin(del, { add: function(/*DOMNode*/ node, /*String*/ event, /*Function*/ fp){ if(!node){return;} // undefined event = del._normalizeEventName(event); if(event=="onkeypress"){ // we need to listen to onkeydown to synthesize // keypress events that otherwise won't fire // on IE var kd = node.onkeydown; if(!kd || !kd[listenersName] || !kd._stealthKeydownHandle){ var h = del.add(node, "onkeydown", del._stealthKeyDown); kd = node.onkeydown; kd._stealthKeydownHandle = h; kd._stealthKeydownRefs = 1; }else{ kd._stealthKeydownRefs++; } } return iel.add(node, event, del._fixCallback(fp)); }, remove: function(/*DOMNode*/ node, /*String*/ event, /*Handle*/ handle){ event = del._normalizeEventName(event); iel.remove(node, event, handle); if(event=="onkeypress"){ var kd = node.onkeydown; if(--kd._stealthKeydownRefs <= 0){ iel.remove(node, "onkeydown", kd._stealthKeydownHandle); delete kd._stealthKeydownHandle; } } }, _normalizeEventName: function(/*String*/ eventName){ // Generally, eventName should be lower case, unless it is // special somehow (e.g. a Mozilla event) // ensure 'on' return eventName.slice(0,2) != "on" ? "on" + eventName : eventName; }, _nop: function(){}, _fixEvent: function(/*Event*/ evt, /*DOMNode*/ sender){ // summary: // normalizes properties on the event object including event // bubbling methods, keystroke normalization, and x/y positions // evt: // native event object // sender: // node to treat as "currentTarget" if(!evt){ var w = sender && (sender.ownerDocument || sender.document || sender).parentWindow || window; evt = w.event; } if(!evt){return(evt);} evt.target = evt.srcElement; evt.currentTarget = (sender || evt.srcElement); evt.layerX = evt.offsetX; evt.layerY = evt.offsetY; // FIXME: scroll position query is duped from dojo.html to // avoid dependency on that entire module. Now that HTML is in // Base, we should convert back to something similar there. var se = evt.srcElement, doc = (se && se.ownerDocument) || document; // DO NOT replace the following to use dojo.body(), in IE, document.documentElement should be used // here rather than document.body var docBody = ((dojo.isIE < 6) || (doc["compatMode"] == "BackCompat")) ? doc.body : doc.documentElement; var offset = dojo._getIeDocumentElementOffset(); evt.pageX = evt.clientX + dojo._fixIeBiDiScrollLeft(docBody.scrollLeft || 0) - offset.x; evt.pageY = evt.clientY + (docBody.scrollTop || 0) - offset.y; if(evt.type == "mouseover"){ evt.relatedTarget = evt.fromElement; } if(evt.type == "mouseout"){ evt.relatedTarget = evt.toElement; } evt.stopPropagation = del._stopPropagation; evt.preventDefault = del._preventDefault; return del._fixKeys(evt); }, _fixKeys: function(evt){ switch(evt.type){ case "keypress": var c = ("charCode" in evt ? evt.charCode : evt.keyCode); if (c==10){ // CTRL-ENTER is CTRL-ASCII(10) on IE, but CTRL-ENTER on Mozilla c=0; evt.keyCode = 13; }else if(c==13||c==27){ c=0; // Mozilla considers ENTER and ESC non-printable }else if(c==3){ c=99; // Mozilla maps CTRL-BREAK to CTRL-c } // Mozilla sets keyCode to 0 when there is a charCode // but that stops the event on IE. evt.charCode = c; del._setKeyChar(evt); break; } return evt; }, _stealthKeyDown: function(evt){ // IE doesn't fire keypress for most non-printable characters. // other browsers do, we simulate it here. var kp = evt.currentTarget.onkeypress; // only works if kp exists and is a dispatcher if(!kp || !kp[listenersName]){ return; } // munge key/charCode var k=evt.keyCode; // These are Windows Virtual Key Codes // http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/WinUI/WindowsUserInterface/UserInput/VirtualKeyCodes.asp var unprintable = k!=13 && k!=32 && k!=27 && (k<48||k>90) && (k<96||k>111) && (k<186||k>192) && (k<219||k>222); // synthesize keypress for most unprintables and CTRL-keys if(unprintable||evt.ctrlKey){ var c = unprintable ? 0 : k; if(evt.ctrlKey){ if(k==3 || k==13){ return; // IE will post CTRL-BREAK, CTRL-ENTER as keypress natively }else if(c>95 && c<106){ c -= 48; // map CTRL-[numpad 0-9] to ASCII }else if((!evt.shiftKey)&&(c>=65&&c<=90)){ c += 32; // map CTRL-[A-Z] to lowercase }else{ c = del._punctMap[c] || c; // map other problematic CTRL combinations to ASCII } } // simulate a keypress event var faux = del._synthesizeEvent(evt, {type: 'keypress', faux: true, charCode: c}); kp.call(evt.currentTarget, faux); evt.cancelBubble = faux.cancelBubble; evt.returnValue = faux.returnValue; _trySetKeyCode(evt, faux.keyCode); } }, // Called in Event scope _stopPropagation: function(){ this.cancelBubble = true; }, _preventDefault: function(){ // Setting keyCode to 0 is the only way to prevent certain keypresses (namely // ctrl-combinations that correspond to menu accelerator keys). // Otoh, it prevents upstream listeners from getting this information // Try to split the difference here by clobbering keyCode only for ctrl // combinations. If you still need to access the key upstream, bubbledKeyCode is // provided as a workaround. this.bubbledKeyCode = this.keyCode; if(this.ctrlKey){_trySetKeyCode(this, 0);} this.returnValue = false; } }); // override stopEvent for IE dojo.stopEvent = function(evt){ evt = evt || window.event; del._stopPropagation.call(evt); del._preventDefault.call(evt); } } del._synthesizeEvent = function(evt, props){ var faux = dojo.mixin({}, evt, props); del._setKeyChar(faux); // FIXME: would prefer to use dojo.hitch: dojo.hitch(evt, evt.preventDefault); // but it throws an error when preventDefault is invoked on Safari // does Event.preventDefault not support "apply" on Safari? faux.preventDefault = function(){ evt.preventDefault(); }; faux.stopPropagation = function(){ evt.stopPropagation(); }; return faux; } // Opera event normalization if(dojo.isOpera){ dojo.mixin(del, { _fixEvent: function(evt, sender){ switch(evt.type){ case "keypress": var c = evt.which; if(c==3){ c=99; // Mozilla maps CTRL-BREAK to CTRL-c } // can't trap some keys at all, like INSERT and DELETE // there is no differentiating info between DELETE and ".", or INSERT and "-" c = c<41 && !evt.shiftKey ? 0 : c; if(evt.ctrlKey && !evt.shiftKey && c>=65 && c<=90){ // lowercase CTRL-[A-Z] keys c += 32; } return del._synthesizeEvent(evt, { charCode: c }); } return evt; } }); } // Webkit event normalization if(dojo.isWebKit){ del._add = del.add; del._remove = del.remove; dojo.mixin(del, { add: function(/*DOMNode*/ node, /*String*/ event, /*Function*/ fp){ if(!node){return;} // undefined var handle = del._add(node, event, fp); if(del._normalizeEventName(event) == "keypress"){ // we need to listen to onkeydown to synthesize // keypress events that otherwise won't fire // in Safari 3.1+: https://lists.webkit.org/pipermail/webkit-dev/2007-December/002992.html handle._stealthKeyDownHandle = del._add(node, "keydown", function(evt){ //A variation on the IE _stealthKeydown function //Synthesize an onkeypress event, but only for unprintable characters. var k=evt.keyCode; // These are Windows Virtual Key Codes // http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/WinUI/WindowsUserInterface/UserInput/VirtualKeyCodes.asp var unprintable = k!=13 && k!=32 && (k<48 || k>90) && (k<96 || k>111) && (k<186 || k>192) && (k<219 || k>222); // synthesize keypress for most unprintables and CTRL-keys if(unprintable || evt.ctrlKey){ var c = unprintable ? 0 : k; if(evt.ctrlKey){ if(k==3 || k==13){ return; // IE will post CTRL-BREAK, CTRL-ENTER as keypress natively }else if(c>95 && c<106){ c -= 48; // map CTRL-[numpad 0-9] to ASCII }else if(!evt.shiftKey && c>=65 && c<=90){ c += 32; // map CTRL-[A-Z] to lowercase }else{ c = del._punctMap[c] || c; // map other problematic CTRL combinations to ASCII } } // simulate a keypress event var faux = del._synthesizeEvent(evt, {type: 'keypress', faux: true, charCode: c}); fp.call(evt.currentTarget, faux); } }); } return handle; /*Handle*/ }, remove: function(/*DOMNode*/ node, /*String*/ event, /*Handle*/ handle){ if(node){ if(handle._stealthKeyDownHandle){ del._remove(node, "keydown", handle._stealthKeyDownHandle); } del._remove(node, event, handle); } }, _fixEvent: function(evt, sender){ switch(evt.type){ case "keypress": if(evt.faux){ return evt; } var c = evt.charCode; c = c>=32 ? c : 0; return del._synthesizeEvent(evt, {charCode: c, faux: true}); } return evt; } }); } })(); if(dojo.isIE){ // keep this out of the closure // closing over 'iel' or 'ieh' b0rks leak prevention // ls[i] is an index into the master handler array dojo._ieDispatcher = function(args, sender){ var ap = Array.prototype, h = dojo._ie_listener.handlers, c = args.callee, ls = c[dojo._ieListenersName], t = h[c.target]; // return value comes from original target function var r = t && t.apply(sender, args); // make local copy of listener array so it's immutable during processing var lls = [].concat(ls); // invoke listeners after target function for(var i in lls){ var f = h[lls[i]]; if(!(i in ap) && f){ f.apply(sender, args); } } return r; } dojo._getIeDispatcher = function(){ // ensure the returned function closes over nothing ("new Function" apparently doesn't close) return new Function(dojo._scopeName + "._ieDispatcher(arguments, this)"); // function } // keep this out of the closure to reduce RAM allocation dojo._event_listener._fixCallback = function(fp){ var f = dojo._event_listener._fixEvent; return function(e){ return fp.call(this, f(e, this)); }; } } } if(!dojo._hasResource["dojo._base.html"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.html"] = true; dojo.provide("dojo._base.html"); // FIXME: need to add unit tests for all the semi-public methods try{ document.execCommand("BackgroundImageCache", false, true); }catch(e){ // sane browsers don't have cache "issues" } // ============================= // DOM Functions // ============================= /*===== dojo.byId = function(id, doc){ // summary: // Returns DOM node with matching `id` attribute or `null` // if not found. If `id` is a DomNode, this function is a no-op. // // id: String|DOMNode // A string to match an HTML id attribute or a reference to a DOM Node // // doc: Document? // Document to work in. Defaults to the current value of // dojo.doc. Can be used to retrieve // node references from other documents. // // example: // Look up a node by ID: // | var n = dojo.byId("foo"); // // example: // Check if a node exists, and use it. // | var n = dojo.byId("bar"); // | if(n){ doStuff() ... } // // example: // Allow string or DomNode references to be passed to a custom function: // | var foo = function(nodeOrId){ // | nodeOrId = dojo.byId(nodeOrId); // | // ... more stuff // | } =====*/ if(dojo.isIE || dojo.isOpera){ dojo.byId = function(id, doc){ if(typeof id != "string"){ return id; } var _d = doc || dojo.doc, te = _d.getElementById(id); // attributes.id.value is better than just id in case the // user has a name=id inside a form if(te && (te.attributes.id.value == id || te.id == id)){ return te; }else{ var eles = _d.all[id]; if(!eles || eles.nodeName){ eles = [eles]; } // if more than 1, choose first with the correct id var i=0; while((te=eles[i++])){ if((te.attributes && te.attributes.id && te.attributes.id.value == id) || te.id == id){ return te; } } } }; }else{ dojo.byId = function(id, doc){ // inline'd type check return (typeof id == "string") ? (doc || dojo.doc).getElementById(id) : id; // DomNode }; } /*===== }; =====*/ (function(){ var d = dojo; var byId = d.byId; var _destroyContainer = null, _destroyDoc; d.addOnWindowUnload(function(){ _destroyContainer = null; //prevent IE leak }); /*===== dojo._destroyElement = function(node){ // summary: // Existing alias for `dojo.destroy`. Deprecated, will be removed // in 2.0 } =====*/ dojo._destroyElement = dojo.destroy = function(/*String|DomNode*/node){ // summary: // Removes a node from its parent, clobbering it and all of its // children. // // description: // Removes a node from its parent, clobbering it and all of its // children. Function only works with DomNodes, and returns nothing. // // node: // A String ID or DomNode reference of the element to be destroyed // // example: // Destroy a node byId: // | dojo.destroy("someId"); // // example: // Destroy all nodes in a list by reference: // | dojo.query(".someNode").forEach(dojo.destroy); node = byId(node); try{ var doc = node.ownerDocument; // cannot use _destroyContainer.ownerDocument since this can throw an exception on IE if(!_destroyContainer || _destroyDoc != doc){ _destroyContainer = doc.createElement("div"); _destroyDoc = doc; } _destroyContainer.appendChild(node.parentNode ? node.parentNode.removeChild(node) : node); // NOTE: see http://trac.dojotoolkit.org/ticket/2931. This may be a bug and not a feature _destroyContainer.innerHTML = ""; }catch(e){ /* squelch */ } }; dojo.isDescendant = function(/*DomNode|String*/node, /*DomNode|String*/ancestor){ // summary: // Returns true if node is a descendant of ancestor // node: string id or node reference to test // ancestor: string id or node reference of potential parent to test against // // example: // Test is node id="bar" is a descendant of node id="foo" // | if(dojo.isDescendant("bar", "foo")){ ... } try{ node = byId(node); ancestor = byId(ancestor); while(node){ if(node == ancestor){ return true; // Boolean } node = node.parentNode; } }catch(e){ /* squelch, return false */ } return false; // Boolean }; dojo.setSelectable = function(/*DomNode|String*/node, /*Boolean*/selectable){ // summary: // Enable or disable selection on a node // node: // id or reference to node // selectable: // state to put the node in. false indicates unselectable, true // allows selection. // example: // Make the node id="bar" unselectable // | dojo.setSelectable("bar"); // example: // Make the node id="bar" selectable // | dojo.setSelectable("bar", true); node = byId(node); if(d.isMozilla){ node.style.MozUserSelect = selectable ? "" : "none"; }else if(d.isKhtml || d.isWebKit){ node.style.KhtmlUserSelect = selectable ? "auto" : "none"; }else if(d.isIE){ var v = (node.unselectable = selectable ? "" : "on"); d.query("*", node).forEach("item.unselectable = '"+v+"'"); } //FIXME: else? Opera? }; var _insertBefore = function(/*DomNode*/node, /*DomNode*/ref){ var parent = ref.parentNode; if(parent){ parent.insertBefore(node, ref); } }; var _insertAfter = function(/*DomNode*/node, /*DomNode*/ref){ // summary: // Try to insert node after ref var parent = ref.parentNode; if(parent){ if(parent.lastChild == ref){ parent.appendChild(node); }else{ parent.insertBefore(node, ref.nextSibling); } } }; dojo.place = function(node, refNode, position){ // summary: // Attempt to insert node into the DOM, choosing from various positioning options. // Returns the first argument resolved to a DOM node. // // node: String|DomNode // id or node reference, or HTML fragment starting with "<" to place relative to refNode // // refNode: String|DomNode // id or node reference to use as basis for placement // // position: String|Number? // string noting the position of node relative to refNode or a // number indicating the location in the childNodes collection of refNode. // Accepted string values are: // | * before // | * after // | * replace // | * only // | * first // | * last // "first" and "last" indicate positions as children of refNode, "replace" replaces refNode, // "only" replaces all children. position defaults to "last" if not specified // // returns: DomNode // Returned values is the first argument resolved to a DOM node. // // .place() is also a method of `dojo.NodeList`, allowing `dojo.query` node lookups. // // example: // Place a node by string id as the last child of another node by string id: // | dojo.place("someNode", "anotherNode"); // // example: // Place a node by string id before another node by string id // | dojo.place("someNode", "anotherNode", "before"); // // example: // Create a Node, and place it in the body element (last child): // | dojo.place("<div></div>", dojo.body()); // // example: // Put a new LI as the first child of a list by id: // | dojo.place("<li></li>", "someUl", "first"); refNode = byId(refNode); if(typeof node == "string"){ // inline'd type check node = node.charAt(0) == "<" ? d._toDom(node, refNode.ownerDocument) : byId(node); } if(typeof position == "number"){ // inline'd type check var cn = refNode.childNodes; if(!cn.length || cn.length <= position){ refNode.appendChild(node); }else{ _insertBefore(node, cn[position < 0 ? 0 : position]); } }else{ switch(position){ case "before": _insertBefore(node, refNode); break; case "after": _insertAfter(node, refNode); break; case "replace": refNode.parentNode.replaceChild(node, refNode); break; case "only": d.empty(refNode); refNode.appendChild(node); break; case "first": if(refNode.firstChild){ _insertBefore(node, refNode.firstChild); break; } // else fallthrough... default: // aka: last refNode.appendChild(node); } } return node; // DomNode } // Box functions will assume this model. // On IE/Opera, BORDER_BOX will be set if the primary document is in quirks mode. // Can be set to change behavior of box setters. // can be either: // "border-box" // "content-box" (default) dojo.boxModel = "content-box"; // We punt per-node box mode testing completely. // If anybody cares, we can provide an additional (optional) unit // that overrides existing code to include per-node box sensitivity. // Opera documentation claims that Opera 9 uses border-box in BackCompat mode. // but experiments (Opera 9.10.8679 on Windows Vista) indicate that it actually continues to use content-box. // IIRC, earlier versions of Opera did in fact use border-box. // Opera guys, this is really confusing. Opera being broken in quirks mode is not our fault. if(d.isIE /*|| dojo.isOpera*/){ // client code may have to adjust if compatMode varies across iframes d.boxModel = document.compatMode == "BackCompat" ? "border-box" : "content-box"; } // ============================= // Style Functions // ============================= // getComputedStyle drives most of the style code. // Wherever possible, reuse the returned object. // // API functions below that need to access computed styles accept an // optional computedStyle parameter. // If this parameter is omitted, the functions will call getComputedStyle themselves. // This way, calling code can access computedStyle once, and then pass the reference to // multiple API functions. /*===== dojo.getComputedStyle = function(node){ // summary: // Returns a "computed style" object. // // description: // Gets a "computed style" object which can be used to gather // information about the current state of the rendered node. // // Note that this may behave differently on different browsers. // Values may have different formats and value encodings across // browsers. // // Note also that this method is expensive. Wherever possible, // reuse the returned object. // // Use the dojo.style() method for more consistent (pixelized) // return values. // // node: DOMNode // A reference to a DOM node. Does NOT support taking an // ID string for speed reasons. // example: // | dojo.getComputedStyle(dojo.byId('foo')).borderWidth; // // example: // Reusing the returned object, avoiding multiple lookups: // | var cs = dojo.getComputedStyle(dojo.byId("someNode")); // | var w = cs.width, h = cs.height; return; // CSS2Properties } =====*/ // Although we normally eschew argument validation at this // level, here we test argument 'node' for (duck)type, // by testing nodeType, ecause 'document' is the 'parentNode' of 'body' // it is frequently sent to this function even // though it is not Element. var gcs; if(d.isWebKit){ gcs = function(/*DomNode*/node){ var s; if(node.nodeType == 1){ var dv = node.ownerDocument.defaultView; s = dv.getComputedStyle(node, null); if(!s && node.style){ node.style.display = ""; s = dv.getComputedStyle(node, null); } } return s || {}; }; }else if(d.isIE){ gcs = function(node){ // IE (as of 7) doesn't expose Element like sane browsers return node.nodeType == 1 /* ELEMENT_NODE*/ ? node.currentStyle : {}; }; }else{ gcs = function(node){ return node.nodeType == 1 ? node.ownerDocument.defaultView.getComputedStyle(node, null) : {}; }; } dojo.getComputedStyle = gcs; if(!d.isIE){ d._toPixelValue = function(element, value){ // style values can be floats, client code may want // to round for integer pixels. return parseFloat(value) || 0; }; }else{ d._toPixelValue = function(element, avalue){ if(!avalue){ return 0; } // on IE7, medium is usually 4 pixels if(avalue == "medium"){ return 4; } // style values can be floats, client code may // want to round this value for integer pixels. if(avalue.slice && avalue.slice(-2) == 'px'){ return parseFloat(avalue); } with(element){ var sLeft = style.left; var rsLeft = runtimeStyle.left; runtimeStyle.left = currentStyle.left; try{ // 'avalue' may be incompatible with style.left, which can cause IE to throw // this has been observed for border widths using "thin", "medium", "thick" constants // those particular constants could be trapped by a lookup // but perhaps there are more style.left = avalue; avalue = style.pixelLeft; }catch(e){ avalue = 0; } style.left = sLeft; runtimeStyle.left = rsLeft; } return avalue; } } var px = d._toPixelValue; // FIXME: there opacity quirks on FF that we haven't ported over. Hrm. /*===== dojo._getOpacity = function(node){ // summary: // Returns the current opacity of the passed node as a // floating-point value between 0 and 1. // node: DomNode // a reference to a DOM node. Does NOT support taking an // ID string for speed reasons. // returns: Number between 0 and 1 return; // Number } =====*/ var astr = "DXImageTransform.Microsoft.Alpha"; var af = function(n, f){ try{ return n.filters.item(astr); }catch(e){ return f ? {} : null; } }; dojo._getOpacity = d.isIE ? function(node){ try{ return af(node).Opacity / 100; // Number }catch(e){ return 1; // Number } } : function(node){ return gcs(node).opacity; }; /*===== dojo._setOpacity = function(node, opacity){ // summary: // set the opacity of the passed node portably. Returns the // new opacity of the node. // node: DOMNode // a reference to a DOM node. Does NOT support taking an // ID string for performance reasons. // opacity: Number // A Number between 0 and 1. 0 specifies transparent. // returns: Number between 0 and 1 return; // Number } =====*/ dojo._setOpacity = d.isIE ? function(/*DomNode*/node, /*Number*/opacity){ var ov = opacity * 100, opaque = opacity == 1; node.style.zoom = opaque ? "" : 1; if(!af(node)){ if(opaque){ return opacity; } node.style.filter += " progid:" + astr + "(Opacity=" + ov + ")"; }else{ af(node, 1).Opacity = ov; } // on IE7 Alpha(Filter opacity=100) makes text look fuzzy so disable it altogether (bug #2661), //but still update the opacity value so we can get a correct reading if it is read later. af(node, 1).Enabled = !opaque; if(node.nodeName.toLowerCase() == "tr"){ d.query("> td", node).forEach(function(i){ d._setOpacity(i, opacity); }); } return opacity; } : function(node, opacity){ return node.style.opacity = opacity; }; var _pixelNamesCache = { left: true, top: true }; var _pixelRegExp = /margin|padding|width|height|max|min|offset/; // |border var _toStyleValue = function(node, type, value){ type = type.toLowerCase(); // FIXME: should we really be doing string case conversion here? Should we cache it? Need to profile! if(d.isIE){ if(value == "auto"){ if(type == "height"){ return node.offsetHeight; } if(type == "width"){ return node.offsetWidth; } } if(type == "fontweight"){ switch(value){ case 700: return "bold"; case 400: default: return "normal"; } } } if(!(type in _pixelNamesCache)){ _pixelNamesCache[type] = _pixelRegExp.test(type); } return _pixelNamesCache[type] ? px(node, value) : value; }; var _floatStyle = d.isIE ? "styleFloat" : "cssFloat", _floatAliases = { "cssFloat": _floatStyle, "styleFloat": _floatStyle, "float": _floatStyle } ; // public API dojo.style = function( /*DomNode|String*/ node, /*String?|Object?*/ style, /*String?*/ value){ // summary: // Accesses styles on a node. If 2 arguments are // passed, acts as a getter. If 3 arguments are passed, acts // as a setter. // description: // Getting the style value uses the computed style for the node, so the value // will be a calculated value, not just the immediate node.style value. // Also when getting values, use specific style names, // like "borderBottomWidth" instead of "border" since compound values like // "border" are not necessarily reflected as expected. // If you want to get node dimensions, use `dojo.marginBox()`, // `dojo.contentBox()` or `dojo.position()`. // node: // id or reference to node to get/set style for // style: // the style property to set in DOM-accessor format // ("borderWidth", not "border-width") or an object with key/value // pairs suitable for setting each property. // value: // If passed, sets value on the node for style, handling // cross-browser concerns. When setting a pixel value, // be sure to include "px" in the value. For instance, top: "200px". // Otherwise, in some cases, some browsers will not apply the style. // example: // Passing only an ID or node returns the computed style object of // the node: // | dojo.style("thinger"); // example: // Passing a node and a style property returns the current // normalized, computed value for that property: // | dojo.style("thinger", "opacity"); // 1 by default // // example: // Passing a node, a style property, and a value changes the // current display of the node and returns the new computed value // | dojo.style("thinger", "opacity", 0.5); // == 0.5 // // example: // Passing a node, an object-style style property sets each of the values in turn and returns the computed style object of the node: // | dojo.style("thinger", { // | "opacity": 0.5, // | "border": "3px solid black", // | "height": "300px" // | }); // // example: // When the CSS style property is hyphenated, the JavaScript property is camelCased. // font-size becomes fontSize, and so on. // | dojo.style("thinger",{ // | fontSize:"14pt", // | letterSpacing:"1.2em" // | }); // // example: // dojo.NodeList implements .style() using the same syntax, omitting the "node" parameter, calling // dojo.style() on every element of the list. See: `dojo.query()` and `dojo.NodeList()` // | dojo.query(".someClassName").style("visibility","hidden"); // | // or // | dojo.query("#baz > div").style({ // | opacity:0.75, // | fontSize:"13pt" // | }); var n = byId(node), args = arguments.length, op = (style == "opacity"); style = _floatAliases[style] || style; if(args == 3){ return op ? d._setOpacity(n, value) : n.style[style] = value; /*Number*/ } if(args == 2 && op){ return d._getOpacity(n); } var s = gcs(n); if(args == 2 && typeof style != "string"){ // inline'd type check for(var x in style){ d.style(node, x, style[x]); } return s; } return (args == 1) ? s : _toStyleValue(n, style, s[style] || n.style[style]); /* CSS2Properties||String||Number */ } // ============================= // Box Functions // ============================= dojo._getPadExtents = function(/*DomNode*/n, /*Object*/computedStyle){ // summary: // Returns object with special values specifically useful for node // fitting. // description: // Returns an object with `w`, `h`, `l`, `t` properties: // | l/t = left/top padding (respectively) // | w = the total of the left and right padding // | h = the total of the top and bottom padding // If 'node' has position, l/t forms the origin for child nodes. // The w/h are used for calculating boxes. // Normally application code will not need to invoke this // directly, and will use the ...box... functions instead. var s = computedStyle||gcs(n), l = px(n, s.paddingLeft), t = px(n, s.paddingTop); return { l: l, t: t, w: l+px(n, s.paddingRight), h: t+px(n, s.paddingBottom) }; } dojo._getBorderExtents = function(/*DomNode*/n, /*Object*/computedStyle){ // summary: // returns an object with properties useful for noting the border // dimensions. // description: // * l/t = the sum of left/top border (respectively) // * w = the sum of the left and right border // * h = the sum of the top and bottom border // // The w/h are used for calculating boxes. // Normally application code will not need to invoke this // directly, and will use the ...box... functions instead. var ne = "none", s = computedStyle||gcs(n), bl = (s.borderLeftStyle != ne ? px(n, s.borderLeftWidth) : 0), bt = (s.borderTopStyle != ne ? px(n, s.borderTopWidth) : 0); return { l: bl, t: bt, w: bl + (s.borderRightStyle!=ne ? px(n, s.borderRightWidth) : 0), h: bt + (s.borderBottomStyle!=ne ? px(n, s.borderBottomWidth) : 0) }; } dojo._getPadBorderExtents = function(/*DomNode*/n, /*Object*/computedStyle){ // summary: // Returns object with properties useful for box fitting with // regards to padding. // description: // * l/t = the sum of left/top padding and left/top border (respectively) // * w = the sum of the left and right padding and border // * h = the sum of the top and bottom padding and border // // The w/h are used for calculating boxes. // Normally application code will not need to invoke this // directly, and will use the ...box... functions instead. var s = computedStyle||gcs(n), p = d._getPadExtents(n, s), b = d._getBorderExtents(n, s); return { l: p.l + b.l, t: p.t + b.t, w: p.w + b.w, h: p.h + b.h }; } dojo._getMarginExtents = function(n, computedStyle){ // summary: // returns object with properties useful for box fitting with // regards to box margins (i.e., the outer-box). // // * l/t = marginLeft, marginTop, respectively // * w = total width, margin inclusive // * h = total height, margin inclusive // // The w/h are used for calculating boxes. // Normally application code will not need to invoke this // directly, and will use the ...box... functions instead. var s = computedStyle||gcs(n), l = px(n, s.marginLeft), t = px(n, s.marginTop), r = px(n, s.marginRight), b = px(n, s.marginBottom); if(d.isWebKit && (s.position != "absolute")){ // FIXME: Safari's version of the computed right margin // is the space between our right edge and the right edge // of our offsetParent. // What we are looking for is the actual margin value as // determined by CSS. // Hack solution is to assume left/right margins are the same. r = l; } return { l: l, t: t, w: l+r, h: t+b }; } // Box getters work in any box context because offsetWidth/clientWidth // are invariant wrt box context // // They do *not* work for display: inline objects that have padding styles // because the user agent ignores padding (it's bogus styling in any case) // // Be careful with IMGs because they are inline or block depending on // browser and browser mode. // Although it would be easier to read, there are not separate versions of // _getMarginBox for each browser because: // 1. the branching is not expensive // 2. factoring the shared code wastes cycles (function call overhead) // 3. duplicating the shared code wastes bytes dojo._getMarginBox = function(/*DomNode*/node, /*Object*/computedStyle){ // summary: // returns an object that encodes the width, height, left and top // positions of the node's margin box. var s = computedStyle || gcs(node), me = d._getMarginExtents(node, s); var l = node.offsetLeft - me.l, t = node.offsetTop - me.t, p = node.parentNode; if(d.isMoz){ // Mozilla: // If offsetParent has a computed overflow != visible, the offsetLeft is decreased // by the parent's border. // We don't want to compute the parent's style, so instead we examine node's // computed left/top which is more stable. var sl = parseFloat(s.left), st = parseFloat(s.top); if(!isNaN(sl) && !isNaN(st)){ l = sl, t = st; }else{ // If child's computed left/top are not parseable as a number (e.g. "auto"), we // have no choice but to examine the parent's computed style. if(p && p.style){ var pcs = gcs(p); if(pcs.overflow != "visible"){ var be = d._getBorderExtents(p, pcs); l += be.l, t += be.t; } } } }else if(d.isOpera || (d.isIE > 7 && !d.isQuirks)){ // On Opera and IE 8, offsetLeft/Top includes the parent's border if(p){ be = d._getBorderExtents(p); l -= be.l; t -= be.t; } } return { l: l, t: t, w: node.offsetWidth + me.w, h: node.offsetHeight + me.h }; } dojo._getContentBox = function(node, computedStyle){ // summary: // Returns an object that encodes the width, height, left and top // positions of the node's content box, irrespective of the // current box model. // clientWidth/Height are important since the automatically account for scrollbars // fallback to offsetWidth/Height for special cases (see #3378) var s = computedStyle || gcs(node), pe = d._getPadExtents(node, s), be = d._getBorderExtents(node, s), w = node.clientWidth, h ; if(!w){ w = node.offsetWidth, h = node.offsetHeight; }else{ h = node.clientHeight, be.w = be.h = 0; } // On Opera, offsetLeft includes the parent's border if(d.isOpera){ pe.l += be.l; pe.t += be.t; }; return { l: pe.l, t: pe.t, w: w - pe.w - be.w, h: h - pe.h - be.h }; } dojo._getBorderBox = function(node, computedStyle){ var s = computedStyle || gcs(node), pe = d._getPadExtents(node, s), cb = d._getContentBox(node, s) ; return { l: cb.l - pe.l, t: cb.t - pe.t, w: cb.w + pe.w, h: cb.h + pe.h }; } // Box setters depend on box context because interpretation of width/height styles // vary wrt box context. // // The value of dojo.boxModel is used to determine box context. // dojo.boxModel can be set directly to change behavior. // // Beware of display: inline objects that have padding styles // because the user agent ignores padding (it's a bogus setup anyway) // // Be careful with IMGs because they are inline or block depending on // browser and browser mode. // // Elements other than DIV may have special quirks, like built-in // margins or padding, or values not detectable via computedStyle. // In particular, margins on TABLE do not seems to appear // at all in computedStyle on Mozilla. dojo._setBox = function(/*DomNode*/node, /*Number?*/l, /*Number?*/t, /*Number?*/w, /*Number?*/h, /*String?*/u){ // summary: // sets width/height/left/top in the current (native) box-model // dimentions. Uses the unit passed in u. // node: // DOM Node reference. Id string not supported for performance // reasons. // l: // left offset from parent. // t: // top offset from parent. // w: // width in current box model. // h: // width in current box model. // u: // unit measure to use for other measures. Defaults to "px". u = u || "px"; var s = node.style; if(!isNaN(l)){ s.left = l + u; } if(!isNaN(t)){ s.top = t + u; } if(w >= 0){ s.width = w + u; } if(h >= 0){ s.height = h + u; } } dojo._isButtonTag = function(/*DomNode*/node) { // summary: // True if the node is BUTTON or INPUT.type="button". return node.tagName == "BUTTON" || node.tagName=="INPUT" && (node.getAttribute("type")||'').toUpperCase() == "BUTTON"; // boolean } dojo._usesBorderBox = function(/*DomNode*/node){ // summary: // True if the node uses border-box layout. // We could test the computed style of node to see if a particular box // has been specified, but there are details and we choose not to bother. // TABLE and BUTTON (and INPUT type=button) are always border-box by default. // If you have assigned a different box to either one via CSS then // box functions will break. var n = node.tagName; return d.boxModel=="border-box" || n=="TABLE" || d._isButtonTag(node); // boolean } dojo._setContentSize = function(/*DomNode*/node, /*Number*/widthPx, /*Number*/heightPx, /*Object*/computedStyle){ // summary: // Sets the size of the node's contents, irrespective of margins, // padding, or borders. if(d._usesBorderBox(node)){ var pb = d._getPadBorderExtents(node, computedStyle); if(widthPx >= 0){ widthPx += pb.w; } if(heightPx >= 0){ heightPx += pb.h; } } d._setBox(node, NaN, NaN, widthPx, heightPx); } dojo._setMarginBox = function(/*DomNode*/node, /*Number?*/leftPx, /*Number?*/topPx, /*Number?*/widthPx, /*Number?*/heightPx, /*Object*/computedStyle){ // summary: // sets the size of the node's margin box and placement // (left/top), irrespective of box model. Think of it as a // passthrough to dojo._setBox that handles box-model vagaries for // you. var s = computedStyle || gcs(node), // Some elements have special padding, margin, and box-model settings. // To use box functions you may need to set padding, margin explicitly. // Controlling box-model is harder, in a pinch you might set dojo.boxModel. bb = d._usesBorderBox(node), pb = bb ? _nilExtents : d._getPadBorderExtents(node, s) ; if(d.isWebKit){ // on Safari (3.1.2), button nodes with no explicit size have a default margin // setting an explicit size eliminates the margin. // We have to swizzle the width to get correct margin reading. if(d._isButtonTag(node)){ var ns = node.style; if(widthPx >= 0 && !ns.width) { ns.width = "4px"; } if(heightPx >= 0 && !ns.height) { ns.height = "4px"; } } } var mb = d._getMarginExtents(node, s); if(widthPx >= 0){ widthPx = Math.max(widthPx - pb.w - mb.w, 0); } if(heightPx >= 0){ heightPx = Math.max(heightPx - pb.h - mb.h, 0); } d._setBox(node, leftPx, topPx, widthPx, heightPx); } var _nilExtents = { l:0, t:0, w:0, h:0 }; // public API dojo.marginBox = function(/*DomNode|String*/node, /*Object?*/box){ // summary: // Getter/setter for the margin-box of node. // description: // Getter/setter for the margin-box of node. // Returns an object in the expected format of box (regardless // if box is passed). The object might look like: // `{ l: 50, t: 200, w: 300: h: 150 }` // for a node offset from its parent 50px to the left, 200px from // the top with a margin width of 300px and a margin-height of // 150px. // node: // id or reference to DOM Node to get/set box for // box: // If passed, denotes that dojo.marginBox() should // update/set the margin box for node. Box is an object in the // above format. All properties are optional if passed. // example: // Retrieve the marginbox of a passed node // | var box = dojo.marginBox("someNodeId"); // | console.dir(box); // // example: // Set a node's marginbox to the size of another node // | var box = dojo.marginBox("someNodeId"); // | dojo.marginBox("someOtherNode", box); var n = byId(node), s = gcs(n), b = box; return !b ? d._getMarginBox(n, s) : d._setMarginBox(n, b.l, b.t, b.w, b.h, s); // Object } dojo.contentBox = function(/*DomNode|String*/node, /*Object?*/box){ // summary: // Getter/setter for the content-box of node. // description: // Returns an object in the expected format of box (regardless if box is passed). // The object might look like: // `{ l: 50, t: 200, w: 300: h: 150 }` // for a node offset from its parent 50px to the left, 200px from // the top with a content width of 300px and a content-height of // 150px. Note that the content box may have a much larger border // or margin box, depending on the box model currently in use and // CSS values set/inherited for node. // While the getter will return top and left values, the // setter only accepts setting the width and height. // node: // id or reference to DOM Node to get/set box for // box: // If passed, denotes that dojo.contentBox() should // update/set the content box for node. Box is an object in the // above format, but only w (width) and h (height) are supported. // All properties are optional if passed. var n = byId(node), s = gcs(n), b = box; return !b ? d._getContentBox(n, s) : d._setContentSize(n, b.w, b.h, s); // Object } // ============================= // Positioning // ============================= var _sumAncestorProperties = function(node, prop){ if(!(node = (node||0).parentNode)){return 0} var val, retVal = 0, _b = d.body(); while(node && node.style){ if(gcs(node).position == "fixed"){ return 0; } val = node[prop]; if(val){ retVal += val - 0; // opera and khtml #body & #html has the same values, we only // need one value if(node == _b){ break; } } node = node.parentNode; } return retVal; // integer } dojo._docScroll = function(){ var n = d.global; return "pageXOffset" in n? { x:n.pageXOffset, y:n.pageYOffset } : (n=d.doc.documentElement, n.clientHeight? { x:d._fixIeBiDiScrollLeft(n.scrollLeft), y:n.scrollTop } : (n=d.body(), { x:n.scrollLeft||0, y:n.scrollTop||0 })); }; dojo._isBodyLtr = function(){ return "_bodyLtr" in d? d._bodyLtr : d._bodyLtr = (d.body().dir || d.doc.documentElement.dir || "ltr").toLowerCase() == "ltr"; // Boolean } dojo._getIeDocumentElementOffset = function(){ // summary: // returns the offset in x and y from the document body to the // visual edge of the page // description: // The following values in IE contain an offset: // | event.clientX // | event.clientY // | node.getBoundingClientRect().left // | node.getBoundingClientRect().top // But other position related values do not contain this offset, // such as node.offsetLeft, node.offsetTop, node.style.left and // node.style.top. The offset is always (2, 2) in LTR direction. // When the body is in RTL direction, the offset counts the width // of left scroll bar's width. This function computes the actual // offset. //NOTE: assumes we're being called in an IE browser var de = d.doc.documentElement; // only deal with HTML element here, _abs handles body/quirks if(d.isIE < 8){ var r = de.getBoundingClientRect(); // works well for IE6+ //console.debug('rect left,top = ' + r.left+','+r.top + ', html client left/top = ' + de.clientLeft+','+de.clientTop + ', rtl = ' + (!d._isBodyLtr()) + ', quirks = ' + d.isQuirks); var l = r.left, t = r.top; if(d.isIE < 7){ l += de.clientLeft; // scrollbar size in strict/RTL, or, t += de.clientTop; // HTML border size in strict } return { x: l < 0? 0 : l, // FRAME element border size can lead to inaccurate negative values y: t < 0? 0 : t }; }else{ return { x: 0, y: 0 }; } }; dojo._fixIeBiDiScrollLeft = function(/*Integer*/ scrollLeft){ // In RTL direction, scrollLeft should be a negative value, but IE < 8 // returns a positive one. All codes using documentElement.scrollLeft // must call this function to fix this error, otherwise the position // will offset to right when there is a horizontal scrollbar. var dd = d.doc; if(d.isIE < 8 && !d._isBodyLtr()){ var de = d.isQuirks ? dd.body : dd.documentElement; return scrollLeft + de.clientWidth - de.scrollWidth; // Integer } return scrollLeft; // Integer } // FIXME: need a setter for coords or a moveTo!! dojo._abs = dojo.position = function(/*DomNode*/node, /*Boolean?*/includeScroll){ // summary: // Gets the position and size of the passed element relative to // the viewport (if includeScroll==false), or relative to the // document root (if includeScroll==true). // // description: // Returns an object of the form: // { x: 100, y: 300, w: 20, h: 15 } // If includeScroll==true, the x and y values will include any // document offsets that may affect the position relative to the // viewport. // Uses the border-box model (inclusive of border and padding but // not margin). Does not act as a setter. var db = d.body(), dh = db.parentNode, ret; node = byId(node); if(node["getBoundingClientRect"]){ // IE6+, FF3+, super-modern WebKit, and Opera 9.6+ all take this branch ret = node.getBoundingClientRect(); ret = { x: ret.left, y: ret.top, w: ret.right - ret.left, h: ret.bottom - ret.top }; if(d.isIE){ // On IE there's a 2px offset that we need to adjust for, see _getIeDocumentElementOffset() var offset = d._getIeDocumentElementOffset(); // fixes the position in IE, quirks mode ret.x -= offset.x + (d.isQuirks ? db.clientLeft+db.offsetLeft : 0); ret.y -= offset.y + (d.isQuirks ? db.clientTop+db.offsetTop : 0); }else if(d.isFF == 3){ // In FF3 you have to subtract the document element margins. // Fixed in FF3.5 though. var cs = gcs(dh); ret.x -= px(dh, cs.marginLeft) + px(dh, cs.borderLeftWidth); ret.y -= px(dh, cs.marginTop) + px(dh, cs.borderTopWidth); } }else{ // FF2 and older WebKit ret = { x: 0, y: 0, w: node.offsetWidth, h: node.offsetHeight }; if(node["offsetParent"]){ ret.x -= _sumAncestorProperties(node, "scrollLeft"); ret.y -= _sumAncestorProperties(node, "scrollTop"); var curnode = node; do{ var n = curnode.offsetLeft, t = curnode.offsetTop; ret.x += isNaN(n) ? 0 : n; ret.y += isNaN(t) ? 0 : t; cs = gcs(curnode); if(curnode != node){ if(d.isMoz){ // tried left+right with differently sized left/right borders // it really is 2xleft border in FF, not left+right, even in RTL! ret.x += 2 * px(curnode,cs.borderLeftWidth); ret.y += 2 * px(curnode,cs.borderTopWidth); }else{ ret.x += px(curnode, cs.borderLeftWidth); ret.y += px(curnode, cs.borderTopWidth); } } // static children in a static div in FF2 are affected by the div's border as well // but offsetParent will skip this div! if(d.isMoz && cs.position=="static"){ var parent=curnode.parentNode; while(parent!=curnode.offsetParent){ var pcs=gcs(parent); if(pcs.position=="static"){ ret.x += px(curnode,pcs.borderLeftWidth); ret.y += px(curnode,pcs.borderTopWidth); } parent=parent.parentNode; } } curnode = curnode.offsetParent; }while((curnode != dh) && curnode); }else if(node.x && node.y){ ret.x += isNaN(node.x) ? 0 : node.x; ret.y += isNaN(node.y) ? 0 : node.y; } } // account for document scrolling // if offsetParent is used, ret value already includes scroll position // so we may have to actually remove that value if !includeScroll if(includeScroll){ var scroll = d._docScroll(); ret.x += scroll.x; ret.y += scroll.y; } return ret; // Object } dojo.coords = function(/*DomNode|String*/node, /*Boolean?*/includeScroll){ // summary: // Deprecated: Use position() for border-box x/y/w/h // or marginBox() for margin-box w/h/l/t. // Returns an object representing a node's size and position. // // description: // Returns an object that measures margin-box (w)idth/(h)eight // and absolute position x/y of the border-box. Also returned // is computed (l)eft and (t)op values in pixels from the // node's offsetParent as returned from marginBox(). // Return value will be in the form: //| { l: 50, t: 200, w: 300: h: 150, x: 100, y: 300 } // Does not act as a setter. If includeScroll is passed, the x and // y params are affected as one would expect in dojo.position(). var n = byId(node), s = gcs(n), mb = d._getMarginBox(n, s); var abs = d.position(n, includeScroll); mb.x = abs.x; mb.y = abs.y; return mb; } // ============================= // Element attribute Functions // ============================= // dojo.attr() should conform to http://www.w3.org/TR/DOM-Level-2-Core/ var _propNames = { // properties renamed to avoid clashes with reserved words "class": "className", "for": "htmlFor", // properties written as camelCase tabindex: "tabIndex", readonly: "readOnly", colspan: "colSpan", frameborder: "frameBorder", rowspan: "rowSpan", valuetype: "valueType" }, _attrNames = { // original attribute names classname: "class", htmlfor: "for", // for IE tabindex: "tabIndex", readonly: "readOnly" }, _forcePropNames = { innerHTML: 1, className: 1, htmlFor: d.isIE, value: 1 }; var _fixAttrName = function(/*String*/ name){ return _attrNames[name.toLowerCase()] || name; }; var _hasAttr = function(node, name){ var attr = node.getAttributeNode && node.getAttributeNode(name); return attr && attr.specified; // Boolean }; // There is a difference in the presence of certain properties and their default values // between browsers. For example, on IE "disabled" is present on all elements, // but it is value is "false"; "tabIndex" of <div> returns 0 by default on IE, yet other browsers // can return -1. dojo.hasAttr = function(/*DomNode|String*/node, /*String*/name){ // summary: // Returns true if the requested attribute is specified on the // given element, and false otherwise. // node: // id or reference to the element to check // name: // the name of the attribute // returns: // true if the requested attribute is specified on the // given element, and false otherwise var lc = name.toLowerCase(); return _forcePropNames[_propNames[lc] || name] || _hasAttr(byId(node), _attrNames[lc] || name); // Boolean } var _evtHdlrMap = {}, _ctr = 0, _attrId = dojo._scopeName + "attrid", // the next dictionary lists elements with read-only innerHTML on IE _roInnerHtml = {col: 1, colgroup: 1, // frameset: 1, head: 1, html: 1, style: 1, table: 1, tbody: 1, tfoot: 1, thead: 1, tr: 1, title: 1}; dojo.attr = function(/*DomNode|String*/node, /*String|Object*/name, /*String?*/value){ // summary: // Gets or sets an attribute on an HTML element. // description: // Handles normalized getting and setting of attributes on DOM // Nodes. If 2 arguments are passed, and a the second argumnt is a // string, acts as a getter. // // If a third argument is passed, or if the second argument is a // map of attributes, acts as a setter. // // When passing functions as values, note that they will not be // directly assigned to slots on the node, but rather the default // behavior will be removed and the new behavior will be added // using `dojo.connect()`, meaning that event handler properties // will be normalized and that some caveats with regards to // non-standard behaviors for onsubmit apply. Namely that you // should cancel form submission using `dojo.stopEvent()` on the // passed event object instead of returning a boolean value from // the handler itself. // node: // id or reference to the element to get or set the attribute on // name: // the name of the attribute to get or set. // value: // The value to set for the attribute // returns: // when used as a getter, the value of the requested attribute // or null if that attribute does not have a specified or // default value; // // when used as a setter, the DOM node // // example: // | // get the current value of the "foo" attribute on a node // | dojo.attr(dojo.byId("nodeId"), "foo"); // | // or we can just pass the id: // | dojo.attr("nodeId", "foo"); // // example: // | // use attr() to set the tab index // | dojo.attr("nodeId", "tabIndex", 3); // | // // example: // Set multiple values at once, including event handlers: // | dojo.attr("formId", { // | "foo": "bar", // | "tabIndex": -1, // | "method": "POST", // | "onsubmit": function(e){ // | // stop submitting the form. Note that the IE behavior // | // of returning true or false will have no effect here // | // since our handler is connect()ed to the built-in // | // onsubmit behavior and so we need to use // | // dojo.stopEvent() to ensure that the submission // | // doesn't proceed. // | dojo.stopEvent(e); // | // | // submit the form with Ajax // | dojo.xhrPost({ form: "formId" }); // | } // | }); // // example: // Style is s special case: Only set with an object hash of styles // | dojo.attr("someNode",{ // | id:"bar", // | style:{ // | width:"200px", height:"100px", color:"#000" // | } // | }); // // example: // Again, only set style as an object hash of styles: // | var obj = { color:"#fff", backgroundColor:"#000" }; // | dojo.attr("someNode", "style", obj); // | // | // though shorter to use `dojo.style()` in this case: // | dojo.style("someNode", obj); node = byId(node); var args = arguments.length, prop; if(args == 2 && typeof name != "string"){ // inline'd type check // the object form of setter: the 2nd argument is a dictionary for(var x in name){ d.attr(node, x, name[x]); } return node; // DomNode } var lc = name.toLowerCase(), propName = _propNames[lc] || name, forceProp = _forcePropNames[propName], attrName = _attrNames[lc] || name; if(args == 3){ // setter do{ if(propName == "style" && typeof value != "string"){ // inline'd type check // special case: setting a style d.style(node, value); break; } if(propName == "innerHTML"){ // special case: assigning HTML if(d.isIE && node.tagName.toLowerCase() in _roInnerHtml){ d.empty(node); node.appendChild(d._toDom(value, node.ownerDocument)); }else{ node[propName] = value; } break; } if(d.isFunction(value)){ // special case: assigning an event handler // clobber if we can var attrId = d.attr(node, _attrId); if(!attrId){ attrId = _ctr++; d.attr(node, _attrId, attrId); } if(!_evtHdlrMap[attrId]){ _evtHdlrMap[attrId] = {}; } var h = _evtHdlrMap[attrId][propName]; if(h){ d.disconnect(h); }else{ try{ delete node[propName]; }catch(e){} } // ensure that event objects are normalized, etc. _evtHdlrMap[attrId][propName] = d.connect(node, propName, value); break; } if(forceProp || typeof value == "boolean"){ // special case: forcing assignment to the property // special case: setting boolean to a property instead of attribute node[propName] = value; break; } // node's attribute node.setAttribute(attrName, value); }while(false); return node; // DomNode } // getter // should we access this attribute via a property or // via getAttribute()? value = node[propName]; if(forceProp && typeof value != "undefined"){ // node's property return value; // Anything } if(propName != "href" && (typeof value == "boolean" || d.isFunction(value))){ // node's property return value; // Anything } // node's attribute // we need _hasAttr() here to guard against IE returning a default value return _hasAttr(node, attrName) ? node.getAttribute(attrName) : null; // Anything } dojo.removeAttr = function(/*DomNode|String*/ node, /*String*/ name){ // summary: // Removes an attribute from an HTML element. // node: // id or reference to the element to remove the attribute from // name: // the name of the attribute to remove byId(node).removeAttribute(_fixAttrName(name)); } dojo.getNodeProp = function(/*DomNode|String*/ node, /*String*/ name){ // summary: // Returns an effective value of a property or an attribute. // node: // id or reference to the element to remove the attribute from // name: // the name of the attribute node = byId(node); var lc = name.toLowerCase(), propName = _propNames[lc] || name; if((propName in node) && propName != "href"){ // node's property return node[propName]; // Anything } // node's attribute var attrName = _attrNames[lc] || name; return _hasAttr(node, attrName) ? node.getAttribute(attrName) : null; // Anything } dojo.create = function(tag, attrs, refNode, pos){ // summary: // Create an element, allowing for optional attribute decoration // and placement. // // description: // A DOM Element creation function. A shorthand method for creating a node or // a fragment, and allowing for a convenient optional attribute setting step, // as well as an optional DOM placement reference. //| // Attributes are set by passing the optional object through `dojo.attr`. // See `dojo.attr` for noted caveats and nuances, and API if applicable. //| // Placement is done via `dojo.place`, assuming the new node to be the action // node, passing along the optional reference node and position. // // tag: String|DomNode // A string of the element to create (eg: "div", "a", "p", "li", "script", "br"), // or an existing DOM node to process. // // attrs: Object // An object-hash of attributes to set on the newly created node. // Can be null, if you don't want to set any attributes/styles. // See: `dojo.attr` for a description of available attributes. // // refNode: String?|DomNode? // Optional reference node. Used by `dojo.place` to place the newly created // node somewhere in the dom relative to refNode. Can be a DomNode reference // or String ID of a node. // // pos: String? // Optional positional reference. Defaults to "last" by way of `dojo.place`, // though can be set to "first","after","before","last", "replace" or "only" // to further control the placement of the new node relative to the refNode. // 'refNode' is required if a 'pos' is specified. // // returns: DomNode // // example: // Create a DIV: // | var n = dojo.create("div"); // // example: // Create a DIV with content: // | var n = dojo.create("div", { innerHTML:"<p>hi</p>" }); // // example: // Place a new DIV in the BODY, with no attributes set // | var n = dojo.create("div", null, dojo.body()); // // example: // Create an UL, and populate it with LI's. Place the list as the first-child of a // node with id="someId": // | var ul = dojo.create("ul", null, "someId", "first"); // | var items = ["one", "two", "three", "four"]; // | dojo.forEach(items, function(data){ // | dojo.create("li", { innerHTML: data }, ul); // | }); // // example: // Create an anchor, with an href. Place in BODY: // | dojo.create("a", { href:"foo.html", title:"Goto FOO!" }, dojo.body()); // // example: // Create a `dojo.NodeList()` from a new element (for syntatic sugar): // | dojo.query(dojo.create('div')) // | .addClass("newDiv") // | .onclick(function(e){ console.log('clicked', e.target) }) // | .place("#someNode"); // redundant, but cleaner. var doc = d.doc; if(refNode){ refNode = byId(refNode); doc = refNode.ownerDocument; } if(typeof tag == "string"){ // inline'd type check tag = doc.createElement(tag); } if(attrs){ d.attr(tag, attrs); } if(refNode){ d.place(tag, refNode, pos); } return tag; // DomNode } /*===== dojo.empty = function(node){ // summary: // safely removes all children of the node. // node: DOMNode|String // a reference to a DOM node or an id. // example: // Destroy node's children byId: // | dojo.empty("someId"); // // example: // Destroy all nodes' children in a list by reference: // | dojo.query(".someNode").forEach(dojo.empty); } =====*/ d.empty = d.isIE ? function(node){ node = byId(node); for(var c; c = node.lastChild;){ // intentional assignment d.destroy(c); } } : function(node){ byId(node).innerHTML = ""; }; /*===== dojo._toDom = function(frag, doc){ // summary: // instantiates an HTML fragment returning the corresponding DOM. // frag: String // the HTML fragment // doc: DocumentNode? // optional document to use when creating DOM nodes, defaults to // dojo.doc if not specified. // returns: DocumentFragment // // example: // Create a table row: // | var tr = dojo._toDom("<tr><td>First!</td></tr>"); } =====*/ // support stuff for dojo._toDom var tagWrap = { option: ["select"], tbody: ["table"], thead: ["table"], tfoot: ["table"], tr: ["table", "tbody"], td: ["table", "tbody", "tr"], th: ["table", "thead", "tr"], legend: ["fieldset"], caption: ["table"], colgroup: ["table"], col: ["table", "colgroup"], li: ["ul"] }, reTag = /<\s*([\w\:]+)/, masterNode = {}, masterNum = 0, masterName = "__" + d._scopeName + "ToDomId"; // generate start/end tag strings to use // for the injection for each special tag wrap case. for(var param in tagWrap){ var tw = tagWrap[param]; tw.pre = param == "option" ? '<select multiple="multiple">' : "<" + tw.join("><") + ">"; tw.post = "</" + tw.reverse().join("></") + ">"; // the last line is destructive: it reverses the array, // but we don't care at this point } d._toDom = function(frag, doc){ // summary: // converts HTML string into DOM nodes. doc = doc || d.doc; var masterId = doc[masterName]; if(!masterId){ doc[masterName] = masterId = ++masterNum + ""; masterNode[masterId] = doc.createElement("div"); } // make sure the frag is a string. frag += ""; // find the starting tag, and get node wrapper var match = frag.match(reTag), tag = match ? match[1].toLowerCase() : "", master = masterNode[masterId], wrap, i, fc, df; if(match && tagWrap[tag]){ wrap = tagWrap[tag]; master.innerHTML = wrap.pre + frag + wrap.post; for(i = wrap.length; i; --i){ master = master.firstChild; } }else{ master.innerHTML = frag; } // one node shortcut => return the node itself if(master.childNodes.length == 1){ return master.removeChild(master.firstChild); // DOMNode } // return multiple nodes as a document fragment df = doc.createDocumentFragment(); while(fc = master.firstChild){ // intentional assignment df.appendChild(fc); } return df; // DOMNode } // ============================= // (CSS) Class Functions // ============================= var _className = "className"; dojo.hasClass = function(/*DomNode|String*/node, /*String*/classStr){ // summary: // Returns whether or not the specified classes are a portion of the // class list currently applied to the node. // // node: // String ID or DomNode reference to check the class for. // // classStr: // A string class name to look for. // // example: // Do something if a node with id="someNode" has class="aSillyClassName" present // | if(dojo.hasClass("someNode","aSillyClassName")){ ... } return ((" "+ byId(node)[_className] +" ").indexOf(" " + classStr + " ") >= 0); // Boolean }; var spaces = /\s+/, a1 = [""], str2array = function(s){ if(typeof s == "string" || s instanceof String){ if(s.indexOf(" ") < 0){ a1[0] = s; return a1; }else{ return s.split(spaces); } } // assumed to be an array return s || ""; }; dojo.addClass = function(/*DomNode|String*/node, /*String|Array*/classStr){ // summary: // Adds the specified classes to the end of the class list on the // passed node. Will not re-apply duplicate classes. // // node: // String ID or DomNode reference to add a class string too // // classStr: // A String class name to add, or several space-separated class names, // or an array of class names. // // example: // Add a class to some node: // | dojo.addClass("someNode", "anewClass"); // // example: // Add two classes at once: // | dojo.addClass("someNode", "firstClass secondClass"); // // example: // Add two classes at once (using array): // | dojo.addClass("someNode", ["firstClass", "secondClass"]); // // example: // Available in `dojo.NodeList` for multiple additions // | dojo.query("ul > li").addClass("firstLevel"); node = byId(node); classStr = str2array(classStr); var cls = node[_className], oldLen; cls = cls ? " " + cls + " " : " "; oldLen = cls.length; for(var i = 0, len = classStr.length, c; i < len; ++i){ c = classStr[i]; if(c && cls.indexOf(" " + c + " ") < 0){ cls += c + " "; } } if(oldLen < cls.length){ node[_className] = cls.substr(1, cls.length - 2); } }; dojo.removeClass = function(/*DomNode|String*/node, /*String|Array?*/classStr){ // summary: // Removes the specified classes from node. No `dojo.hasClass` // check is required. // // node: // String ID or DomNode reference to remove the class from. // // classStr: // An optional String class name to remove, or several space-separated // class names, or an array of class names. If omitted, all class names // will be deleted. // // example: // Remove a class from some node: // | dojo.removeClass("someNode", "firstClass"); // // example: // Remove two classes from some node: // | dojo.removeClass("someNode", "firstClass secondClass"); // // example: // Remove two classes from some node (using array): // | dojo.removeClass("someNode", ["firstClass", "secondClass"]); // // example: // Remove all classes from some node: // | dojo.removeClass("someNode"); // // example: // Available in `dojo.NodeList()` for multiple removal // | dojo.query(".foo").removeClass("foo"); node = byId(node); var cls; if(classStr !== undefined){ classStr = str2array(classStr); cls = " " + node[_className] + " "; for(var i = 0, len = classStr.length; i < len; ++i){ cls = cls.replace(" " + classStr[i] + " ", " "); } cls = d.trim(cls); }else{ cls = ""; } if(node[_className] != cls){ node[_className] = cls; } }; dojo.toggleClass = function(/*DomNode|String*/node, /*String|Array*/classStr, /*Boolean?*/condition){ // summary: // Adds a class to node if not present, or removes if present. // Pass a boolean condition if you want to explicitly add or remove. // condition: // If passed, true means to add the class, false means to remove. // // example: // | dojo.toggleClass("someNode", "hovered"); // // example: // Forcefully add a class // | dojo.toggleClass("someNode", "hovered", true); // // example: // Available in `dojo.NodeList()` for multiple toggles // | dojo.query(".toggleMe").toggleClass("toggleMe"); if(condition === undefined){ condition = !d.hasClass(node, classStr); } d[condition ? "addClass" : "removeClass"](node, classStr); }; })(); } if(!dojo._hasResource["dojo._base.NodeList"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.NodeList"] = true; dojo.provide("dojo._base.NodeList"); (function(){ var d = dojo; var ap = Array.prototype, aps = ap.slice, apc = ap.concat; var tnl = function(/*Array*/ a, /*dojo.NodeList?*/ parent, /*Function?*/ NodeListCtor){ // summary: // decorate an array to make it look like a `dojo.NodeList`. // a: // Array of nodes to decorate. // parent: // An optional parent NodeList that generated the current // list of nodes. Used to call _stash() so the parent NodeList // can be accessed via end() later. // NodeListCtor: // An optional constructor function to use for any // new NodeList calls. This allows a certain chain of // NodeList calls to use a different object than dojo.NodeList. if(!a.sort){ // make sure it's a real array before we pass it on to be wrapped a = aps.call(a, 0); } var ctor = NodeListCtor || this._NodeListCtor || d._NodeListCtor; a.constructor = ctor; dojo._mixin(a, ctor.prototype); a._NodeListCtor = ctor; return parent ? a._stash(parent) : a; }; var loopBody = function(f, a, o){ a = [0].concat(aps.call(a, 0)); o = o || d.global; return function(node){ a[0] = node; return f.apply(o, a); }; }; // adapters var adaptAsForEach = function(f, o){ // summary: // adapts a single node function to be used in the forEach-type // actions. The initial object is returned from the specialized // function. // f: Function // a function to adapt // o: Object? // an optional context for f return function(){ this.forEach(loopBody(f, arguments, o)); return this; // Object }; }; var adaptAsMap = function(f, o){ // summary: // adapts a single node function to be used in the map-type // actions. The return is a new array of values, as via `dojo.map` // f: Function // a function to adapt // o: Object? // an optional context for f return function(){ return this.map(loopBody(f, arguments, o)); }; }; var adaptAsFilter = function(f, o){ // summary: // adapts a single node function to be used in the filter-type actions // f: Function // a function to adapt // o: Object? // an optional context for f return function(){ return this.filter(loopBody(f, arguments, o)); }; }; var adaptWithCondition = function(f, g, o){ // summary: // adapts a single node function to be used in the map-type // actions, behaves like forEach() or map() depending on arguments // f: Function // a function to adapt // g: Function // a condition function, if true runs as map(), otherwise runs as forEach() // o: Object? // an optional context for f and g return function(){ var a = arguments, body = loopBody(f, a, o); if(g.call(o || d.global, a)){ return this.map(body); // self } this.forEach(body); return this; // self }; }; var magicGuard = function(a){ // summary: // the guard function for dojo.attr() and dojo.style() return a.length == 1 && (typeof a[0] == "string"); // inline'd type check }; var orphan = function(node){ // summary: // function to orphan nodes var p = node.parentNode; if(p){ p.removeChild(node); } }; // FIXME: should we move orphan() to dojo.html? dojo.NodeList = function(){ // summary: // dojo.NodeList is an of Array subclass which adds syntactic // sugar for chaining, common iteration operations, animation, and // node manipulation. NodeLists are most often returned as the // result of dojo.query() calls. // description: // dojo.NodeList instances provide many utilities that reflect // core Dojo APIs for Array iteration and manipulation, DOM // manipulation, and event handling. Instead of needing to dig up // functions in the dojo.* namespace, NodeLists generally make the // full power of Dojo available for DOM manipulation tasks in a // simple, chainable way. // example: // create a node list from a node // | new dojo.NodeList(dojo.byId("foo")); // example: // get a NodeList from a CSS query and iterate on it // | var l = dojo.query(".thinger"); // | l.forEach(function(node, index, nodeList){ // | console.log(index, node.innerHTML); // | }); // example: // use native and Dojo-provided array methods to manipulate a // NodeList without needing to use dojo.* functions explicitly: // | var l = dojo.query(".thinger"); // | // since NodeLists are real arrays, they have a length // | // property that is both readable and writable and // | // push/pop/shift/unshift methods // | console.log(l.length); // | l.push(dojo.create("span")); // | // | // dojo's normalized array methods work too: // | console.log( l.indexOf(dojo.byId("foo")) ); // | // ...including the special "function as string" shorthand // | console.log( l.every("item.nodeType == 1") ); // | // | // NodeLists can be [..] indexed, or you can use the at() // | // function to get specific items wrapped in a new NodeList: // | var node = l[3]; // the 4th element // | var newList = l.at(1, 3); // the 2nd and 4th elements // example: // the style functions you expect are all there too: // | // style() as a getter... // | var borders = dojo.query(".thinger").style("border"); // | // ...and as a setter: // | dojo.query(".thinger").style("border", "1px solid black"); // | // class manipulation // | dojo.query("li:nth-child(even)").addClass("even"); // | // even getting the coordinates of all the items // | var coords = dojo.query(".thinger").coords(); // example: // DOM manipulation functions from the dojo.* namespace area also // available: // | // remove all of the elements in the list from their // | // parents (akin to "deleting" them from the document) // | dojo.query(".thinger").orphan(); // | // place all elements in the list at the front of #foo // | dojo.query(".thinger").place("foo", "first"); // example: // Event handling couldn't be easier. `dojo.connect` is mapped in, // and shortcut handlers are provided for most DOM events: // | // like dojo.connect(), but with implicit scope // | dojo.query("li").connect("onclick", console, "log"); // | // | // many common event handlers are already available directly: // | dojo.query("li").onclick(console, "log"); // | var toggleHovered = dojo.hitch(dojo, "toggleClass", "hovered"); // | dojo.query("p") // | .onmouseenter(toggleHovered) // | .onmouseleave(toggleHovered); // example: // chainability is a key advantage of NodeLists: // | dojo.query(".thinger") // | .onclick(function(e){ /* ... */ }) // | .at(1, 3, 8) // get a subset // | .style("padding", "5px") // | .forEach(console.log); return tnl(Array.apply(null, arguments)); }; //Allow things that new up a NodeList to use a delegated or alternate NodeList implementation. d._NodeListCtor = d.NodeList; var nl = d.NodeList, nlp = nl.prototype; // expose adapters and the wrapper as private functions nl._wrap = nlp._wrap = tnl; nl._adaptAsMap = adaptAsMap; nl._adaptAsForEach = adaptAsForEach; nl._adaptAsFilter = adaptAsFilter; nl._adaptWithCondition = adaptWithCondition; // mass assignment // add array redirectors d.forEach(["slice", "splice"], function(name){ var f = ap[name]; //Use a copy of the this array via this.slice() to allow .end() to work right in the splice case. // CANNOT apply ._stash()/end() to splice since it currently modifies // the existing this array -- it would break backward compatibility if we copy the array before // the splice so that we can use .end(). So only doing the stash option to this._wrap for slice. nlp[name] = function(){ return this._wrap(f.apply(this, arguments), name == "slice" ? this : null); }; }); // concat should be here but some browsers with native NodeList have problems with it // add array.js redirectors d.forEach(["indexOf", "lastIndexOf", "every", "some"], function(name){ var f = d[name]; nlp[name] = function(){ return f.apply(d, [this].concat(aps.call(arguments, 0))); }; }); // add conditional methods d.forEach(["attr", "style"], function(name){ nlp[name] = adaptWithCondition(d[name], magicGuard); }); // add forEach actions d.forEach(["connect", "addClass", "removeClass", "toggleClass", "empty", "removeAttr"], function(name){ nlp[name] = adaptAsForEach(d[name]); }); dojo.extend(dojo.NodeList, { _normalize: function(/*String||Element||Object||NodeList*/content, /*DOMNode?*/refNode){ // summary: // normalizes data to an array of items to insert. // description: // If content is an object, it can have special properties "template" and // "parse". If "template" is defined, then the template value is run through // dojo.string.substitute (if dojo.string.substitute has been dojo.required elsewhere), // or if templateFunc is a function on the content, that function will be used to // transform the template into a final string to be used for for passing to dojo._toDom. // If content.parse is true, then it is remembered for later, for when the content // nodes are inserted into the DOM. At that point, the nodes will be parsed for widgets // (if dojo.parser has been dojo.required elsewhere). //Wanted to just use a DocumentFragment, but for the array/NodeList //case that meant using cloneNode, but we may not want that. //Cloning should only happen if the node operations span //multiple refNodes. Also, need a real array, not a NodeList from the //DOM since the node movements could change those NodeLists. var parse = content.parse === true ? true : false; //Do we have an object that needs to be run through a template? if(typeof content.template == "string"){ var templateFunc = content.templateFunc || (dojo.string && dojo.string.substitute); content = templateFunc ? templateFunc(content.template, content) : content; } var type = (typeof content); if(type == "string" || type == "number"){ content = dojo._toDom(content, (refNode && refNode.ownerDocument)); if(content.nodeType == 11){ //DocumentFragment. It cannot handle cloneNode calls, so pull out the children. content = dojo._toArray(content.childNodes); }else{ content = [content]; } }else if(!dojo.isArrayLike(content)){ content = [content]; }else if(!dojo.isArray(content)){ //To get to this point, content is array-like, but //not an array, which likely means a DOM NodeList. Convert it now. content = dojo._toArray(content); } //Pass around the parse info if(parse){ content._runParse = true; } return content; //Array }, _cloneNode: function(/*DOMNode*/ node){ // summary: // private utiltity to clone a node. Not very interesting in the vanilla // dojo.NodeList case, but delegates could do interesting things like // clone event handlers if that is derivable from the node. return node.cloneNode(true); }, _place: function(/*Array*/ary, /*DOMNode*/refNode, /*String*/position, /*Boolean*/useClone){ // summary: // private utility to handle placing an array of nodes relative to another node. // description: // Allows for cloning the nodes in the array, and for // optionally parsing widgets, if ary._runParse is true. //Avoid a disallowed operation if trying to do an innerHTML on a non-element node. if(refNode.nodeType != 1 && position == "only"){ return; } var rNode = refNode, tempNode; //Always cycle backwards in case the array is really a //DOM NodeList and the DOM operations take it out of the live collection. var length = ary.length; for(var i = length - 1; i >= 0; i--){ var node = (useClone ? this._cloneNode(ary[i]) : ary[i]); //If need widget parsing, use a temp node, instead of waiting after inserting into //real DOM because we need to start widget parsing at one node up from current node, //which could cause some already parsed widgets to be parsed again. if(ary._runParse && dojo.parser && dojo.parser.parse){ if(!tempNode){ tempNode = rNode.ownerDocument.createElement("div"); } tempNode.appendChild(node); dojo.parser.parse(tempNode); node = tempNode.firstChild; while(tempNode.firstChild){ tempNode.removeChild(tempNode.firstChild); } } if(i == length - 1){ dojo.place(node, rNode, position); }else{ rNode.parentNode.insertBefore(node, rNode); } rNode = node; } }, _stash: function(parent){ // summary: // private function to hold to a parent NodeList. end() to return the parent NodeList. // // example: // How to make a `dojo.NodeList` method that only returns the third node in // the dojo.NodeList but allows access to the original NodeList by using this._stash: // | dojo.extend(dojo.NodeList, { // | third: function(){ // | var newNodeList = dojo.NodeList(this[2]); // | return newNodeList._stash(this); // | } // | }); // | // then see how _stash applies a sub-list, to be .end()'ed out of // | dojo.query(".foo") // | .third() // | .addClass("thirdFoo") // | .end() // | // access to the orig .foo list // | .removeClass("foo") // | // this._parent = parent; return this; //dojo.NodeList }, end: function(){ // summary: // Ends use of the current `dojo.NodeList` by returning the previous dojo.NodeList // that generated the current dojo.NodeList. // description: // Returns the `dojo.NodeList` that generated the current `dojo.NodeList`. If there // is no parent dojo.NodeList, an empty dojo.NodeList is returned. // example: // | dojo.query("a") // | .filter(".disabled") // | // operate on the anchors that only have a disabled class // | .style("color", "grey") // | .end() // | // jump back to the list of anchors // | .style(...) // if(this._parent){ return this._parent; }else{ //Just return empy list. return new this._NodeListCtor(); } }, // http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Global_Objects:Array#Methods // FIXME: handle return values for #3244 // http://trac.dojotoolkit.org/ticket/3244 // FIXME: // need to wrap or implement: // join (perhaps w/ innerHTML/outerHTML overload for toString() of items?) // reduce // reduceRight /*===== slice: function(begin, end){ // summary: // Returns a new NodeList, maintaining this one in place // description: // This method behaves exactly like the Array.slice method // with the caveat that it returns a dojo.NodeList and not a // raw Array. For more details, see Mozilla's (slice // documentation)[http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Global_Objects:Array:slice] // begin: Integer // Can be a positive or negative integer, with positive // integers noting the offset to begin at, and negative // integers denoting an offset from the end (i.e., to the left // of the end) // end: Integer? // Optional parameter to describe what position relative to // the NodeList's zero index to end the slice at. Like begin, // can be positive or negative. return this._wrap(a.slice.apply(this, arguments)); }, splice: function(index, howmany, item){ // summary: // Returns a new NodeList, manipulating this NodeList based on // the arguments passed, potentially splicing in new elements // at an offset, optionally deleting elements // description: // This method behaves exactly like the Array.splice method // with the caveat that it returns a dojo.NodeList and not a // raw Array. For more details, see Mozilla's (splice // documentation)[http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Global_Objects:Array:splice] // For backwards compatibility, calling .end() on the spliced NodeList // does not return the original NodeList -- splice alters the NodeList in place. // index: Integer // begin can be a positive or negative integer, with positive // integers noting the offset to begin at, and negative // integers denoting an offset from the end (i.e., to the left // of the end) // howmany: Integer? // Optional parameter to describe what position relative to // the NodeList's zero index to end the slice at. Like begin, // can be positive or negative. // item: Object...? // Any number of optional parameters may be passed in to be // spliced into the NodeList // returns: // dojo.NodeList return this._wrap(a.splice.apply(this, arguments)); }, indexOf: function(value, fromIndex){ // summary: // see dojo.indexOf(). The primary difference is that the acted-on // array is implicitly this NodeList // value: Object: // The value to search for. // fromIndex: Integer?: // The loction to start searching from. Optional. Defaults to 0. // description: // For more details on the behavior of indexOf, see Mozilla's // (indexOf // docs)[http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Global_Objects:Array:indexOf] // returns: // Positive Integer or 0 for a match, -1 of not found. return d.indexOf(this, value, fromIndex); // Integer }, lastIndexOf: function(value, fromIndex){ // summary: // see dojo.lastIndexOf(). The primary difference is that the // acted-on array is implicitly this NodeList // description: // For more details on the behavior of lastIndexOf, see // Mozilla's (lastIndexOf // docs)[http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Global_Objects:Array:lastIndexOf] // value: Object // The value to search for. // fromIndex: Integer? // The loction to start searching from. Optional. Defaults to 0. // returns: // Positive Integer or 0 for a match, -1 of not found. return d.lastIndexOf(this, value, fromIndex); // Integer }, every: function(callback, thisObject){ // summary: // see `dojo.every()` and the (Array.every // docs)[http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Global_Objects:Array:every]. // Takes the same structure of arguments and returns as // dojo.every() with the caveat that the passed array is // implicitly this NodeList // callback: Function: the callback // thisObject: Object?: the context return d.every(this, callback, thisObject); // Boolean }, some: function(callback, thisObject){ // summary: // Takes the same structure of arguments and returns as // `dojo.some()` with the caveat that the passed array is // implicitly this NodeList. See `dojo.some()` and Mozilla's // (Array.some // documentation)[http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Global_Objects:Array:some]. // callback: Function: the callback // thisObject: Object?: the context return d.some(this, callback, thisObject); // Boolean }, =====*/ concat: function(item){ // summary: // Returns a new NodeList comprised of items in this NodeList // as well as items passed in as parameters // description: // This method behaves exactly like the Array.concat method // with the caveat that it returns a `dojo.NodeList` and not a // raw Array. For more details, see the (Array.concat // docs)[http://developer.mozilla.org/en/docs/Core_JavaScript_1.5_Reference:Global_Objects:Array:concat] // item: Object? // Any number of optional parameters may be passed in to be // spliced into the NodeList // returns: // dojo.NodeList //return this._wrap(apc.apply(this, arguments)); // the line above won't work for the native NodeList :-( // implementation notes: // 1) Native NodeList is not an array, and cannot be used directly // in concat() --- the latter doesn't recognize it as an array, and // does not inline it, but append as a single entity. // 2) On some browsers (e.g., Safari) the "constructor" property is // read-only and cannot be changed. So we have to test for both // native NodeList and dojo.NodeList in this property to recognize // the node list. var t = d.isArray(this) ? this : aps.call(this, 0), m = d.map(arguments, function(a){ return a && !d.isArray(a) && (typeof NodeList != "undefined" && a.constructor === NodeList || a.constructor === this._NodeListCtor) ? aps.call(a, 0) : a; }); return this._wrap(apc.apply(t, m), this); // dojo.NodeList }, map: function(/*Function*/ func, /*Function?*/ obj){ // summary: // see dojo.map(). The primary difference is that the acted-on // array is implicitly this NodeList and the return is a // dojo.NodeList (a subclass of Array) ///return d.map(this, func, obj, d.NodeList); // dojo.NodeList return this._wrap(d.map(this, func, obj), this); // dojo.NodeList }, forEach: function(callback, thisObj){ // summary: // see `dojo.forEach()`. The primary difference is that the acted-on // array is implicitly this NodeList. If you want the option to break out // of the forEach loop, use every() or some() instead. d.forEach(this, callback, thisObj); // non-standard return to allow easier chaining return this; // dojo.NodeList }, /*===== coords: function(){ // summary: // Returns the box objects of all elements in a node list as // an Array (*not* a NodeList). Acts like `dojo.coords`, though assumes // the node passed is each node in this list. return d.map(this, d.coords); // Array }, position: function(){ // summary: // Returns border-box objects (x/y/w/h) of all elements in a node list // as an Array (*not* a NodeList). Acts like `dojo.position`, though // assumes the node passed is each node in this list. return d.map(this, d.position); // Array }, attr: function(property, value){ // summary: // gets or sets the DOM attribute for every element in the // NodeList. See also `dojo.attr` // property: String // the attribute to get/set // value: String? // optional. The value to set the property to // returns: // if no value is passed, the result is an array of attribute values // If a value is passed, the return is this NodeList // example: // Make all nodes with a particular class focusable: // | dojo.query(".focusable").attr("tabIndex", -1); // example: // Disable a group of buttons: // | dojo.query("button.group").attr("disabled", true); // example: // innerHTML can be assigned or retreived as well: // | // get the innerHTML (as an array) for each list item // | var ih = dojo.query("li.replaceable").attr("innerHTML"); return; // dojo.NodeList return; // Array }, style: function(property, value){ // summary: // gets or sets the CSS property for every element in the NodeList // property: String // the CSS property to get/set, in JavaScript notation // ("lineHieght" instead of "line-height") // value: String? // optional. The value to set the property to // returns: // if no value is passed, the result is an array of strings. // If a value is passed, the return is this NodeList return; // dojo.NodeList return; // Array }, addClass: function(className){ // summary: // adds the specified class to every node in the list // className: String|Array // A String class name to add, or several space-separated class names, // or an array of class names. return; // dojo.NodeList }, removeClass: function(className){ // summary: // removes the specified class from every node in the list // className: String|Array? // An optional String class name to remove, or several space-separated // class names, or an array of class names. If omitted, all class names // will be deleted. // returns: // dojo.NodeList, this list return; // dojo.NodeList }, toggleClass: function(className, condition){ // summary: // Adds a class to node if not present, or removes if present. // Pass a boolean condition if you want to explicitly add or remove. // condition: Boolean? // If passed, true means to add the class, false means to remove. // className: String // the CSS class to add return; // dojo.NodeList }, connect: function(methodName, objOrFunc, funcName){ // summary: // attach event handlers to every item of the NodeList. Uses dojo.connect() // so event properties are normalized // methodName: String // the name of the method to attach to. For DOM events, this should be // the lower-case name of the event // objOrFunc: Object|Function|String // if 2 arguments are passed (methodName, objOrFunc), objOrFunc should // reference a function or be the name of the function in the global // namespace to attach. If 3 arguments are provided // (methodName, objOrFunc, funcName), objOrFunc must be the scope to // locate the bound function in // funcName: String? // optional. A string naming the function in objOrFunc to bind to the // event. May also be a function reference. // example: // add an onclick handler to every button on the page // | dojo.query("div:nth-child(odd)").connect("onclick", function(e){ // | console.log("clicked!"); // | }); // example: // attach foo.bar() to every odd div's onmouseover // | dojo.query("div:nth-child(odd)").connect("onmouseover", foo, "bar"); }, empty: function(){ // summary: // clears all content from each node in the list. Effectively // equivalent to removing all child nodes from every item in // the list. return this.forEach("item.innerHTML='';"); // dojo.NodeList // FIXME: should we be checking for and/or disposing of widgets below these nodes? }, =====*/ // useful html methods coords: adaptAsMap(d.coords), position: adaptAsMap(d.position), // FIXME: connectPublisher()? connectRunOnce()? /* destroy: function(){ // summary: // destroys every item in the list. this.forEach(d.destroy); // FIXME: should we be checking for and/or disposing of widgets below these nodes? }, */ place: function(/*String||Node*/ queryOrNode, /*String*/ position){ // summary: // places elements of this node list relative to the first element matched // by queryOrNode. Returns the original NodeList. See: `dojo.place` // queryOrNode: // may be a string representing any valid CSS3 selector or a DOM node. // In the selector case, only the first matching element will be used // for relative positioning. // position: // can be one of: // | "last" (default) // | "first" // | "before" // | "after" // | "only" // | "replace" // or an offset in the childNodes property var item = d.query(queryOrNode)[0]; return this.forEach(function(node){ d.place(node, item, position); }); // dojo.NodeList }, orphan: function(/*String?*/ simpleFilter){ // summary: // removes elements in this list that match the simple filter // from their parents and returns them as a new NodeList. // simpleFilter: // single-expression CSS rule. For example, ".thinger" or // "#someId[attrName='value']" but not "div > span". In short, // anything which does not invoke a descent to evaluate but // can instead be used to test a single node is acceptable. // returns: // `dojo.NodeList` containing the orpahned elements return (simpleFilter ? d._filterQueryResult(this, simpleFilter) : this).forEach(orphan); // dojo.NodeList }, adopt: function(/*String||Array||DomNode*/ queryOrListOrNode, /*String?*/ position){ // summary: // places any/all elements in queryOrListOrNode at a // position relative to the first element in this list. // Returns a dojo.NodeList of the adopted elements. // queryOrListOrNode: // a DOM node or a query string or a query result. // Represents the nodes to be adopted relative to the // first element of this NodeList. // position: // can be one of: // | "last" (default) // | "first" // | "before" // | "after" // | "only" // | "replace" // or an offset in the childNodes property return d.query(queryOrListOrNode).place(this[0], position)._stash(this); // dojo.NodeList }, // FIXME: do we need this? query: function(/*String*/ queryStr){ // summary: // Returns a new list whose memebers match the passed query, // assuming elements of the current NodeList as the root for // each search. // example: // assume a DOM created by this markup: // | <div id="foo"> // | <p> // | bacon is tasty, <span>dontcha think?</span> // | </p> // | </div> // | <div id="bar"> // | <p>great commedians may not be funny <span>in person</span></p> // | </div> // If we are presented with the following defintion for a NodeList: // | var l = new dojo.NodeList(dojo.byId("foo"), dojo.byId("bar")); // it's possible to find all span elements under paragraphs // contained by these elements with this sub-query: // | var spans = l.query("p span"); // FIXME: probably slow if(!queryStr){ return this; } var ret = this.map(function(node){ // FIXME: why would we ever get undefined here? return d.query(queryStr, node).filter(function(subNode){ return subNode !== undefined; }); }); return this._wrap(apc.apply([], ret), this); // dojo.NodeList }, filter: function(/*String|Function*/ simpleFilter){ // summary: // "masks" the built-in javascript filter() method (supported // in Dojo via `dojo.filter`) to support passing a simple // string filter in addition to supporting filtering function // objects. // simpleFilter: // If a string, a single-expression CSS rule. For example, // ".thinger" or "#someId[attrName='value']" but not "div > // span". In short, anything which does not invoke a descent // to evaluate but can instead be used to test a single node // is acceptable. // example: // "regular" JS filter syntax as exposed in dojo.filter: // | dojo.query("*").filter(function(item){ // | // highlight every paragraph // | return (item.nodeName == "p"); // | }).style("backgroundColor", "yellow"); // example: // the same filtering using a CSS selector // | dojo.query("*").filter("p").styles("backgroundColor", "yellow"); var a = arguments, items = this, start = 0; if(typeof simpleFilter == "string"){ // inline'd type check items = d._filterQueryResult(this, a[0]); if(a.length == 1){ // if we only got a string query, pass back the filtered results return items._stash(this); // dojo.NodeList } // if we got a callback, run it over the filtered items start = 1; } return this._wrap(d.filter(items, a[start], a[start + 1]), this); // dojo.NodeList }, /* // FIXME: should this be "copyTo" and include parenting info? clone: function(){ // summary: // creates node clones of each element of this list // and returns a new list containing the clones }, */ addContent: function(/*String||DomNode||Object||dojo.NodeList*/ content, /*String||Integer?*/ position){ // summary: // add a node, NodeList or some HTML as a string to every item in the // list. Returns the original list. // description: // a copy of the HTML content is added to each item in the // list, with an optional position argument. If no position // argument is provided, the content is appended to the end of // each item. // content: // DOM node, HTML in string format, a NodeList or an Object. If a DOM node or // NodeList, the content will be cloned if the current NodeList has more than one // element. Only the DOM nodes are cloned, no event handlers. If it is an Object, // it should be an object with at "template" String property that has the HTML string // to insert. If dojo.string has already been dojo.required, then dojo.string.substitute // will be used on the "template" to generate the final HTML string. Other allowed // properties on the object are: "parse" if the HTML // string should be parsed for widgets (dojo.require("dojo.parser") to get that // option to work), and "templateFunc" if a template function besides dojo.string.substitute // should be used to transform the "template". // position: // can be one of: // | "last"||"end" (default) // | "first||"start" // | "before" // | "after" // | "replace" (replaces nodes in this NodeList with new content) // | "only" (removes other children of the nodes so new content is hte only child) // or an offset in the childNodes property // example: // appends content to the end if the position is ommitted // | dojo.query("h3 > p").addContent("hey there!"); // example: // add something to the front of each element that has a // "thinger" property: // | dojo.query("[thinger]").addContent("...", "first"); // example: // adds a header before each element of the list // | dojo.query(".note").addContent("<h4>NOTE:</h4>", "before"); // example: // add a clone of a DOM node to the end of every element in // the list, removing it from its existing parent. // | dojo.query(".note").addContent(dojo.byId("foo")); // example: // Append nodes from a templatized string. // dojo.require("dojo.string"); // dojo.query(".note").addContent({ // template: '<b>${id}: </b><span>${name}</span>', // id: "user332", // name: "Mr. Anderson" // }); // example: // Append nodes from a templatized string that also has widgets parsed. // dojo.require("dojo.string"); // dojo.require("dojo.parser"); // var notes = dojo.query(".note").addContent({ // template: '<button dojoType="dijit.form.Button">${text}</button>', // parse: true, // text: "Send" // }); content = this._normalize(content, this[0]); for(var i = 0, node; node = this[i]; i++){ this._place(content, node, position, i > 0); } return this; //dojo.NodeList }, instantiate: function(/*String|Object*/ declaredClass, /*Object?*/ properties){ // summary: // Create a new instance of a specified class, using the // specified properties and each node in the nodeList as a // srcNodeRef. // example: // Grabs all buttons in the page and converts them to diji.form.Buttons. // | var buttons = dojo.query("button").instantiate("dijit.form.Button", {showLabel: true}); var c = d.isFunction(declaredClass) ? declaredClass : d.getObject(declaredClass); properties = properties || {}; return this.forEach(function(node){ new c(properties, node); }); // dojo.NodeList }, at: function(/*===== index =====*/){ // summary: // Returns a new NodeList comprised of items in this NodeList // at the given index or indices. // // index: Integer... // One or more 0-based indices of items in the current // NodeList. A negative index will start at the end of the // list and go backwards. // // example: // Shorten the list to the first, second, and third elements // | dojo.query("a").at(0, 1, 2).forEach(fn); // // example: // Retrieve the first and last elements of a unordered list: // | dojo.query("ul > li").at(0, -1).forEach(cb); // // example: // Do something for the first element only, but end() out back to // the original list and continue chaining: // | dojo.query("a").at(0).onclick(fn).end().forEach(function(n){ // | console.log(n); // all anchors on the page. // | }) // // returns: // dojo.NodeList var t = new this._NodeListCtor(); d.forEach(arguments, function(i){ if(i < 0){ i = this.length + i } if(this[i]){ t.push(this[i]); } }, this); return t._stash(this); // dojo.NodeList } }); nl.events = [ // summary: list of all DOM events used in NodeList "blur", "focus", "change", "click", "error", "keydown", "keypress", "keyup", "load", "mousedown", "mouseenter", "mouseleave", "mousemove", "mouseout", "mouseover", "mouseup", "submit" ]; // FIXME: pseudo-doc the above automatically generated on-event functions // syntactic sugar for DOM events d.forEach(nl.events, function(evt){ var _oe = "on" + evt; nlp[_oe] = function(a, b){ return this.connect(_oe, a, b); } // FIXME: should these events trigger publishes? /* return (a ? this.connect(_oe, a, b) : this.forEach(function(n){ // FIXME: // listeners get buried by // addEventListener and can't be dug back // out to be triggered externally. // see: // http://developer.mozilla.org/en/docs/DOM:element console.log(n, evt, _oe); // FIXME: need synthetic event support! var _e = { target: n, faux: true, type: evt }; // dojo._event_listener._synthesizeEvent({}, { target: n, faux: true, type: evt }); try{ n[evt](_e); }catch(e){ console.log(e); } try{ n[_oe](_e); }catch(e){ console.log(e); } }) ); */ } ); })(); } if(!dojo._hasResource["dojo._base.query"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.query"] = true; if(typeof dojo != "undefined"){ dojo.provide("dojo._base.query"); } /* dojo.query() architectural overview: dojo.query is a relatively full-featured CSS3 query library. It is designed to take any valid CSS3 selector and return the nodes matching the selector. To do this quickly, it processes queries in several steps, applying caching where profitable. The steps (roughly in reverse order of the way they appear in the code): 1.) check to see if we already have a "query dispatcher" - if so, use that with the given parameterization. Skip to step 4. 2.) attempt to determine which branch to dispatch the query to: - JS (optimized DOM iteration) - native (FF3.1+, Safari 3.1+, IE 8+) 3.) tokenize and convert to executable "query dispatcher" - this is where the lion's share of the complexity in the system lies. In the DOM version, the query dispatcher is assembled as a chain of "yes/no" test functions pertaining to a section of a simple query statement (".blah:nth-child(odd)" but not "div div", which is 2 simple statements). Individual statement dispatchers are cached (to prevent re-definition) as are entire dispatch chains (to make re-execution of the same query fast) 4.) the resulting query dispatcher is called in the passed scope (by default the top-level document) - for DOM queries, this results in a recursive, top-down evaluation of nodes based on each simple query section - for native implementations, this may mean working around spec bugs. So be it. 5.) matched nodes are pruned to ensure they are unique (if necessary) */ ;(function(d){ // define everything in a closure for compressability reasons. "d" is an // alias to "dojo" (or the toolkit alias object, e.g., "acme"). //////////////////////////////////////////////////////////////////////// // Toolkit aliases //////////////////////////////////////////////////////////////////////// // if you are extracing dojo.query for use in your own system, you will // need to provide these methods and properties. No other porting should be // necessary, save for configuring the system to use a class other than // dojo.NodeList as the return instance instantiator var trim = d.trim; var each = d.forEach; // d.isIE; // float // d.isSafari; // float // d.isOpera; // float // d.isWebKit; // float // d.doc ; // document element var qlc = d._NodeListCtor = d.NodeList; var getDoc = function(){ return d.doc; }; // NOTE(alex): the spec is idiotic. CSS queries should ALWAYS be case-sensitive, but nooooooo var cssCaseBug = ((d.isWebKit||d.isMozilla) && ((getDoc().compatMode) == "BackCompat")); //////////////////////////////////////////////////////////////////////// // Global utilities //////////////////////////////////////////////////////////////////////// // on browsers that support the "children" collection we can avoid a lot of // iteration on chaff (non-element) nodes. // why. var childNodesName = !!getDoc().firstChild["children"] ? "children" : "childNodes"; var specials = ">~+"; // global thunk to determine whether we should treat the current query as // case sensitive or not. This switch is flipped by the query evaluator // based on the document passed as the context to search. var caseSensitive = false; // how high? var yesman = function(){ return true; }; //////////////////////////////////////////////////////////////////////// // Tokenizer //////////////////////////////////////////////////////////////////////// var getQueryParts = function(query){ // summary: // state machine for query tokenization // description: // instead of using a brittle and slow regex-based CSS parser, // dojo.query implements an AST-style query representation. This // representation is only generated once per query. For example, // the same query run multiple times or under different root nodes // does not re-parse the selector expression but instead uses the // cached data structure. The state machine implemented here // terminates on the last " " (space) charachter and returns an // ordered array of query component structures (or "parts"). Each // part represents an operator or a simple CSS filtering // expression. The structure for parts is documented in the code // below. // NOTE: // this code is designed to run fast and compress well. Sacrifices // to readibility and maintainability have been made. Your best // bet when hacking the tokenizer is to put The Donnas on *really* // loud (may we recommend their "Spend The Night" release?) and // just assume you're gonna make mistakes. Keep the unit tests // open and run them frequently. Knowing is half the battle ;-) if(specials.indexOf(query.slice(-1)) >= 0){ // if we end with a ">", "+", or "~", that means we're implicitly // searching all children, so make it explicit query += " * " }else{ // if you have not provided a terminator, one will be provided for // you... query += " "; } var ts = function(/*Integer*/ s, /*Integer*/ e){ // trim and slice. // take an index to start a string slice from and an end position // and return a trimmed copy of that sub-string return trim(query.slice(s, e)); } // the overall data graph of the full query, as represented by queryPart objects var queryParts = []; // state keeping vars var inBrackets = -1, inParens = -1, inMatchFor = -1, inPseudo = -1, inClass = -1, inId = -1, inTag = -1, lc = "", cc = "", pStart; // iteration vars var x = 0, // index in the query ql = query.length, currentPart = null, // data structure representing the entire clause _cp = null; // the current pseudo or attr matcher // several temporary variables are assigned to this structure durring a // potential sub-expression match: // attr: // a string representing the current full attribute match in a // bracket expression // type: // if there's an operator in a bracket expression, this is // used to keep track of it // value: // the internals of parenthetical expression for a pseudo. for // :nth-child(2n+1), value might be "2n+1" var endTag = function(){ // called when the tokenizer hits the end of a particular tag name. // Re-sets state variables for tag matching and sets up the matcher // to handle the next type of token (tag or operator). if(inTag >= 0){ var tv = (inTag == x) ? null : ts(inTag, x); // .toLowerCase(); currentPart[ (specials.indexOf(tv) < 0) ? "tag" : "oper" ] = tv; inTag = -1; } } var endId = function(){ // called when the tokenizer might be at the end of an ID portion of a match if(inId >= 0){ currentPart.id = ts(inId, x).replace(/\\/g, ""); inId = -1; } } var endClass = function(){ // called when the tokenizer might be at the end of a class name // match. CSS allows for multiple classes, so we augment the // current item with another class in its list if(inClass >= 0){ currentPart.classes.push(ts(inClass+1, x).replace(/\\/g, "")); inClass = -1; } } var endAll = function(){ // at the end of a simple fragment, so wall off the matches endId(); endTag(); endClass(); } var endPart = function(){ endAll(); if(inPseudo >= 0){ currentPart.pseudos.push({ name: ts(inPseudo+1, x) }); } // hint to the selector engine to tell it whether or not it // needs to do any iteration. Many simple selectors don't, and // we can avoid significant construction-time work by advising // the system to skip them currentPart.loops = ( currentPart.pseudos.length || currentPart.attrs.length || currentPart.classes.length ); currentPart.oquery = currentPart.query = ts(pStart, x); // save the full expression as a string // otag/tag are hints to suggest to the system whether or not // it's an operator or a tag. We save a copy of otag since the // tag name is cast to upper-case in regular HTML matches. The // system has a global switch to figure out if the current // expression needs to be case sensitive or not and it will use // otag or tag accordingly currentPart.otag = currentPart.tag = (currentPart["oper"]) ? null : (currentPart.tag || "*"); if(currentPart.tag){ // if we're in a case-insensitive HTML doc, we likely want // the toUpperCase when matching on element.tagName. If we // do it here, we can skip the string op per node // comparison currentPart.tag = currentPart.tag.toUpperCase(); } // add the part to the list if(queryParts.length && (queryParts[queryParts.length-1].oper)){ // operators are always infix, so we remove them from the // list and attach them to the next match. The evaluator is // responsible for sorting out how to handle them. currentPart.infixOper = queryParts.pop(); currentPart.query = currentPart.infixOper.query + " " + currentPart.query; /* console.debug( "swapping out the infix", currentPart.infixOper, "and attaching it to", currentPart); */ } queryParts.push(currentPart); currentPart = null; } // iterate over the query, charachter by charachter, building up a // list of query part objects for(; lc=cc, cc=query.charAt(x), x < ql; x++){ // cc: the current character in the match // lc: the last charachter (if any) // someone is trying to escape something, so don't try to match any // fragments. We assume we're inside a literal. if(lc == "\\"){ continue; } if(!currentPart){ // a part was just ended or none has yet been created // NOTE: I hate all this alloc, but it's shorter than writing tons of if's pStart = x; // rules describe full CSS sub-expressions, like: // #someId // .className:first-child // but not: // thinger > div.howdy[type=thinger] // the indidual components of the previous query would be // split into 3 parts that would be represented a structure // like: // [ // { // query: "thinger", // tag: "thinger", // }, // { // query: "div.howdy[type=thinger]", // classes: ["howdy"], // infixOper: { // query: ">", // oper: ">", // } // }, // ] currentPart = { query: null, // the full text of the part's rule pseudos: [], // CSS supports multiple pseud-class matches in a single rule attrs: [], // CSS supports multi-attribute match, so we need an array classes: [], // class matches may be additive, e.g.: .thinger.blah.howdy tag: null, // only one tag... oper: null, // ...or operator per component. Note that these wind up being exclusive. id: null, // the id component of a rule getTag: function(){ return (caseSensitive) ? this.otag : this.tag; } }; // if we don't have a part, we assume we're going to start at // the beginning of a match, which should be a tag name. This // might fault a little later on, but we detect that and this // iteration will still be fine. inTag = x; } if(inBrackets >= 0){ // look for a the close first if(cc == "]"){ // if we're in a [...] clause and we end, do assignment if(!_cp.attr){ // no attribute match was previously begun, so we // assume this is an attribute existance match in the // form of [someAttributeName] _cp.attr = ts(inBrackets+1, x); }else{ // we had an attribute already, so we know that we're // matching some sort of value, as in [attrName=howdy] _cp.matchFor = ts((inMatchFor||inBrackets+1), x); } var cmf = _cp.matchFor; if(cmf){ // try to strip quotes from the matchFor value. We want // [attrName=howdy] to match the same // as [attrName = 'howdy' ] if( (cmf.charAt(0) == '"') || (cmf.charAt(0) == "'") ){ _cp.matchFor = cmf.slice(1, -1); } } // end the attribute by adding it to the list of attributes. currentPart.attrs.push(_cp); _cp = null; // necessary? inBrackets = inMatchFor = -1; }else if(cc == "="){ // if the last char was an operator prefix, make sure we // record it along with the "=" operator. var addToCc = ("|~^$*".indexOf(lc) >=0 ) ? lc : ""; _cp.type = addToCc+cc; _cp.attr = ts(inBrackets+1, x-addToCc.length); inMatchFor = x+1; } // now look for other clause parts }else if(inParens >= 0){ // if we're in a parenthetical expression, we need to figure // out if it's attached to a pseduo-selector rule like // :nth-child(1) if(cc == ")"){ if(inPseudo >= 0){ _cp.value = ts(inParens+1, x); } inPseudo = inParens = -1; } }else if(cc == "#"){ // start of an ID match endAll(); inId = x+1; }else if(cc == "."){ // start of a class match endAll(); inClass = x; }else if(cc == ":"){ // start of a pseudo-selector match endAll(); inPseudo = x; }else if(cc == "["){ // start of an attribute match. endAll(); inBrackets = x; // provide a new structure for the attribute match to fill-in _cp = { /*===== attr: null, type: null, matchFor: null =====*/ }; }else if(cc == "("){ // we really only care if we've entered a parenthetical // expression if we're already inside a pseudo-selector match if(inPseudo >= 0){ // provide a new structure for the pseudo match to fill-in _cp = { name: ts(inPseudo+1, x), value: null } currentPart.pseudos.push(_cp); } inParens = x; }else if( (cc == " ") && // if it's a space char and the last char is too, consume the // current one without doing more work (lc != cc) ){ endPart(); } } return queryParts; }; //////////////////////////////////////////////////////////////////////// // DOM query infrastructure //////////////////////////////////////////////////////////////////////// var agree = function(first, second){ // the basic building block of the yes/no chaining system. agree(f1, // f2) generates a new function which returns the boolean results of // both of the passed functions to a single logical-anded result. If // either are not possed, the other is used exclusively. if(!first){ return second; } if(!second){ return first; } return function(){ return first.apply(window, arguments) && second.apply(window, arguments); } }; var getArr = function(i, arr){ // helps us avoid array alloc when we don't need it var r = arr||[]; // FIXME: should this be 'new d._NodeListCtor()' ? if(i){ r.push(i); } return r; }; var _isElement = function(n){ return (1 == n.nodeType); }; // FIXME: need to coalesce _getAttr with defaultGetter var blank = ""; var _getAttr = function(elem, attr){ if(!elem){ return blank; } if(attr == "class"){ return elem.className || blank; } if(attr == "for"){ return elem.htmlFor || blank; } if(attr == "style"){ return elem.style.cssText || blank; } return (caseSensitive ? elem.getAttribute(attr) : elem.getAttribute(attr, 2)) || blank; }; var attrs = { "*=": function(attr, value){ return function(elem){ // E[foo*="bar"] // an E element whose "foo" attribute value contains // the substring "bar" return (_getAttr(elem, attr).indexOf(value)>=0); } }, "^=": function(attr, value){ // E[foo^="bar"] // an E element whose "foo" attribute value begins exactly // with the string "bar" return function(elem){ return (_getAttr(elem, attr).indexOf(value)==0); } }, "$=": function(attr, value){ // E[foo$="bar"] // an E element whose "foo" attribute value ends exactly // with the string "bar" var tval = " "+value; return function(elem){ var ea = " "+_getAttr(elem, attr); return (ea.lastIndexOf(value)==(ea.length-value.length)); } }, "~=": function(attr, value){ // E[foo~="bar"] // an E element whose "foo" attribute value is a list of // space-separated values, one of which is exactly equal // to "bar" // return "[contains(concat(' ',@"+attr+",' '), ' "+ value +" ')]"; var tval = " "+value+" "; return function(elem){ var ea = " "+_getAttr(elem, attr)+" "; return (ea.indexOf(tval)>=0); } }, "|=": function(attr, value){ // E[hreflang|="en"] // an E element whose "hreflang" attribute has a // hyphen-separated list of values beginning (from the // left) with "en" var valueDash = " "+value+"-"; return function(elem){ var ea = " "+_getAttr(elem, attr); return ( (ea == value) || (ea.indexOf(valueDash)==0) ); } }, "=": function(attr, value){ return function(elem){ return (_getAttr(elem, attr) == value); } } }; // avoid testing for node type if we can. Defining this in the negative // here to avoid negation in the fast path. var _noNES = (typeof getDoc().firstChild.nextElementSibling == "undefined"); var _ns = !_noNES ? "nextElementSibling" : "nextSibling"; var _ps = !_noNES ? "previousElementSibling" : "previousSibling"; var _simpleNodeTest = (_noNES ? _isElement : yesman); var _lookLeft = function(node){ // look left while(node = node[_ps]){ if(_simpleNodeTest(node)){ return false; } } return true; }; var _lookRight = function(node){ // look right while(node = node[_ns]){ if(_simpleNodeTest(node)){ return false; } } return true; }; var getNodeIndex = function(node){ var root = node.parentNode; var i = 0, tret = root[childNodesName], ci = (node["_i"]||-1), cl = (root["_l"]||-1); if(!tret){ return -1; } var l = tret.length; // we calcuate the parent length as a cheap way to invalidate the // cache. It's not 100% accurate, but it's much more honest than what // other libraries do if( cl == l && ci >= 0 && cl >= 0 ){ // if it's legit, tag and release return ci; } // else re-key things root["_l"] = l; ci = -1; for(var te = root["firstElementChild"]||root["firstChild"]; te; te = te[_ns]){ if(_simpleNodeTest(te)){ te["_i"] = ++i; if(node === te){ // NOTE: // shortcuting the return at this step in indexing works // very well for benchmarking but we avoid it here since // it leads to potential O(n^2) behavior in sequential // getNodexIndex operations on a previously un-indexed // parent. We may revisit this at a later time, but for // now we just want to get the right answer more often // than not. ci = i; } } } return ci; }; var isEven = function(elem){ return !((getNodeIndex(elem)) % 2); }; var isOdd = function(elem){ return ((getNodeIndex(elem)) % 2); }; var pseudos = { "checked": function(name, condition){ return function(elem){ return !!("checked" in elem ? elem.checked : elem.selected); } }, "first-child": function(){ return _lookLeft; }, "last-child": function(){ return _lookRight; }, "only-child": function(name, condition){ return function(node){ if(!_lookLeft(node)){ return false; } if(!_lookRight(node)){ return false; } return true; }; }, "empty": function(name, condition){ return function(elem){ // DomQuery and jQuery get this wrong, oddly enough. // The CSS 3 selectors spec is pretty explicit about it, too. var cn = elem.childNodes; var cnl = elem.childNodes.length; // if(!cnl){ return true; } for(var x=cnl-1; x >= 0; x--){ var nt = cn[x].nodeType; if((nt === 1)||(nt == 3)){ return false; } } return true; } }, "contains": function(name, condition){ var cz = condition.charAt(0); if( cz == '"' || cz == "'" ){ //remove quote condition = condition.slice(1, -1); } return function(elem){ return (elem.innerHTML.indexOf(condition) >= 0); } }, "not": function(name, condition){ var p = getQueryParts(condition)[0]; var ignores = { el: 1 }; if(p.tag != "*"){ ignores.tag = 1; } if(!p.classes.length){ ignores.classes = 1; } var ntf = getSimpleFilterFunc(p, ignores); return function(elem){ return (!ntf(elem)); } }, "nth-child": function(name, condition){ var pi = parseInt; // avoid re-defining function objects if we can if(condition == "odd"){ return isOdd; }else if(condition == "even"){ return isEven; } // FIXME: can we shorten this? if(condition.indexOf("n") != -1){ var tparts = condition.split("n", 2); var pred = tparts[0] ? ((tparts[0] == '-') ? -1 : pi(tparts[0])) : 1; var idx = tparts[1] ? pi(tparts[1]) : 0; var lb = 0, ub = -1; if(pred > 0){ if(idx < 0){ idx = (idx % pred) && (pred + (idx % pred)); }else if(idx>0){ if(idx >= pred){ lb = idx - idx % pred; } idx = idx % pred; } }else if(pred<0){ pred *= -1; // idx has to be greater than 0 when pred is negative; // shall we throw an error here? if(idx > 0){ ub = idx; idx = idx % pred; } } if(pred > 0){ return function(elem){ var i = getNodeIndex(elem); return (i>=lb) && (ub<0 || i<=ub) && ((i % pred) == idx); } }else{ condition = idx; } } var ncount = pi(condition); return function(elem){ return (getNodeIndex(elem) == ncount); } } }; var defaultGetter = (d.isIE) ? function(cond){ var clc = cond.toLowerCase(); if(clc == "class"){ cond = "className"; } return function(elem){ return (caseSensitive ? elem.getAttribute(cond) : elem[cond]||elem[clc]); } } : function(cond){ return function(elem){ return (elem && elem.getAttribute && elem.hasAttribute(cond)); } }; var getSimpleFilterFunc = function(query, ignores){ // generates a node tester function based on the passed query part. The // query part is one of the structures generatd by the query parser // when it creates the query AST. The "ignores" object specifies which // (if any) tests to skip, allowing the system to avoid duplicating // work where it may have already been taken into account by other // factors such as how the nodes to test were fetched in the first // place if(!query){ return yesman; } ignores = ignores||{}; var ff = null; if(!("el" in ignores)){ ff = agree(ff, _isElement); } if(!("tag" in ignores)){ if(query.tag != "*"){ ff = agree(ff, function(elem){ return (elem && (elem.tagName == query.getTag())); }); } } if(!("classes" in ignores)){ each(query.classes, function(cname, idx, arr){ // get the class name /* var isWildcard = cname.charAt(cname.length-1) == "*"; if(isWildcard){ cname = cname.substr(0, cname.length-1); } // I dislike the regex thing, even if memozied in a cache, but it's VERY short var re = new RegExp("(?:^|\\s)" + cname + (isWildcard ? ".*" : "") + "(?:\\s|$)"); */ var re = new RegExp("(?:^|\\s)" + cname + "(?:\\s|$)"); ff = agree(ff, function(elem){ return re.test(elem.className); }); ff.count = idx; }); } if(!("pseudos" in ignores)){ each(query.pseudos, function(pseudo){ var pn = pseudo.name; if(pseudos[pn]){ ff = agree(ff, pseudos[pn](pn, pseudo.value)); } }); } if(!("attrs" in ignores)){ each(query.attrs, function(attr){ var matcher; var a = attr.attr; // type, attr, matchFor if(attr.type && attrs[attr.type]){ matcher = attrs[attr.type](a, attr.matchFor); }else if(a.length){ matcher = defaultGetter(a); } if(matcher){ ff = agree(ff, matcher); } }); } if(!("id" in ignores)){ if(query.id){ ff = agree(ff, function(elem){ return (!!elem && (elem.id == query.id)); }); } } if(!ff){ if(!("default" in ignores)){ ff = yesman; } } return ff; }; var _nextSibling = function(filterFunc){ return function(node, ret, bag){ while(node = node[_ns]){ if(_noNES && (!_isElement(node))){ continue; } if( (!bag || _isUnique(node, bag)) && filterFunc(node) ){ ret.push(node); } break; } return ret; } }; var _nextSiblings = function(filterFunc){ return function(root, ret, bag){ var te = root[_ns]; while(te){ if(_simpleNodeTest(te)){ if(bag && !_isUnique(te, bag)){ break; } if(filterFunc(te)){ ret.push(te); } } te = te[_ns]; } return ret; } }; // get an array of child *elements*, skipping text and comment nodes var _childElements = function(filterFunc){ filterFunc = filterFunc||yesman; return function(root, ret, bag){ // get an array of child elements, skipping text and comment nodes var te, x = 0, tret = root[childNodesName]; while(te = tret[x++]){ if( _simpleNodeTest(te) && (!bag || _isUnique(te, bag)) && (filterFunc(te, x)) ){ ret.push(te); } } return ret; }; }; /* // thanks, Dean! var itemIsAfterRoot = d.isIE ? function(item, root){ return (item.sourceIndex > root.sourceIndex); } : function(item, root){ return (item.compareDocumentPosition(root) == 2); }; */ // test to see if node is below root var _isDescendant = function(node, root){ var pn = node.parentNode; while(pn){ if(pn == root){ break; } pn = pn.parentNode; } return !!pn; }; var _getElementsFuncCache = {}; var getElementsFunc = function(query){ var retFunc = _getElementsFuncCache[query.query]; // if we've got a cached dispatcher, just use that if(retFunc){ return retFunc; } // else, generate a new on // NOTE: // this function returns a function that searches for nodes and // filters them. The search may be specialized by infix operators // (">", "~", or "+") else it will default to searching all // descendants (the " " selector). Once a group of children is // founde, a test function is applied to weed out the ones we // don't want. Many common cases can be fast-pathed. We spend a // lot of cycles to create a dispatcher that doesn't do more work // than necessary at any point since, unlike this function, the // dispatchers will be called every time. The logic of generating // efficient dispatchers looks like this in pseudo code: // // # if it's a purely descendant query (no ">", "+", or "~" modifiers) // if infixOperator == " ": // if only(id): // return def(root): // return d.byId(id, root); // // elif id: // return def(root): // return filter(d.byId(id, root)); // // elif cssClass && getElementsByClassName: // return def(root): // return filter(root.getElementsByClassName(cssClass)); // // elif only(tag): // return def(root): // return root.getElementsByTagName(tagName); // // else: // # search by tag name, then filter // return def(root): // return filter(root.getElementsByTagName(tagName||"*")); // // elif infixOperator == ">": // # search direct children // return def(root): // return filter(root.children); // // elif infixOperator == "+": // # search next sibling // return def(root): // return filter(root.nextElementSibling); // // elif infixOperator == "~": // # search rightward siblings // return def(root): // return filter(nextSiblings(root)); var io = query.infixOper; var oper = (io ? io.oper : ""); // the default filter func which tests for all conditions in the query // part. This is potentially inefficient, so some optimized paths may // re-define it to test fewer things. var filterFunc = getSimpleFilterFunc(query, { el: 1 }); var qt = query.tag; var wildcardTag = ("*" == qt); var ecs = getDoc()["getElementsByClassName"]; if(!oper){ // if there's no infix operator, then it's a descendant query. ID // and "elements by class name" variants can be accelerated so we // call them out explicitly: if(query.id){ // testing shows that the overhead of yesman() is acceptable // and can save us some bytes vs. re-defining the function // everywhere. filterFunc = (!query.loops && wildcardTag) ? yesman : getSimpleFilterFunc(query, { el: 1, id: 1 }); retFunc = function(root, arr){ var te = d.byId(query.id, (root.ownerDocument||root)); if(!te || !filterFunc(te)){ return; } if(9 == root.nodeType){ // if root's a doc, we just return directly return getArr(te, arr); }else{ // otherwise check ancestry if(_isDescendant(te, root)){ return getArr(te, arr); } } } }else if( ecs && // isAlien check. Workaround for Prototype.js being totally evil/dumb. /\{\s*\[native code\]\s*\}/.test(String(ecs)) && query.classes.length && !cssCaseBug ){ // it's a class-based query and we've got a fast way to run it. // ignore class and ID filters since we will have handled both filterFunc = getSimpleFilterFunc(query, { el: 1, classes: 1, id: 1 }); var classesString = query.classes.join(" "); retFunc = function(root, arr, bag){ var ret = getArr(0, arr), te, x=0; var tret = root.getElementsByClassName(classesString); while((te = tret[x++])){ if(filterFunc(te, root) && _isUnique(te, bag)){ ret.push(te); } } return ret; }; }else if(!wildcardTag && !query.loops){ // it's tag only. Fast-path it. retFunc = function(root, arr, bag){ var ret = getArr(0, arr), te, x=0; var tret = root.getElementsByTagName(query.getTag()); while((te = tret[x++])){ if(_isUnique(te, bag)){ ret.push(te); } } return ret; }; }else{ // the common case: // a descendant selector without a fast path. By now it's got // to have a tag selector, even if it's just "*" so we query // by that and filter filterFunc = getSimpleFilterFunc(query, { el: 1, tag: 1, id: 1 }); retFunc = function(root, arr, bag){ var ret = getArr(0, arr), te, x=0; // we use getTag() to avoid case sensitivity issues var tret = root.getElementsByTagName(query.getTag()); while((te = tret[x++])){ if(filterFunc(te, root) && _isUnique(te, bag)){ ret.push(te); } } return ret; }; } }else{ // the query is scoped in some way. Instead of querying by tag we // use some other collection to find candidate nodes var skipFilters = { el: 1 }; if(wildcardTag){ skipFilters.tag = 1; } filterFunc = getSimpleFilterFunc(query, skipFilters); if("+" == oper){ retFunc = _nextSibling(filterFunc); }else if("~" == oper){ retFunc = _nextSiblings(filterFunc); }else if(">" == oper){ retFunc = _childElements(filterFunc); } } // cache it and return return _getElementsFuncCache[query.query] = retFunc; }; var filterDown = function(root, queryParts){ // NOTE: // this is the guts of the DOM query system. It takes a list of // parsed query parts and a root and finds children which match // the selector represented by the parts var candidates = getArr(root), qp, x, te, qpl = queryParts.length, bag, ret; for(var i = 0; i < qpl; i++){ ret = []; qp = queryParts[i]; x = candidates.length - 1; if(x > 0){ // if we have more than one root at this level, provide a new // hash to use for checking group membership but tell the // system not to post-filter us since we will already have been // gauranteed to be unique bag = {}; ret.nozip = true; } var gef = getElementsFunc(qp); for(var j = 0; (te = candidates[j]); j++){ // for every root, get the elements that match the descendant // selector, adding them to the "ret" array and filtering them // via membership in this level's bag. If there are more query // parts, then this level's return will be used as the next // level's candidates gef(te, ret, bag); } if(!ret.length){ break; } candidates = ret; } return ret; }; //////////////////////////////////////////////////////////////////////// // the query runner //////////////////////////////////////////////////////////////////////// // these are the primary caches for full-query results. The query // dispatcher functions are generated then stored here for hash lookup in // the future var _queryFuncCacheDOM = {}, _queryFuncCacheQSA = {}; // this is the second level of spliting, from full-length queries (e.g., // "div.foo .bar") into simple query expressions (e.g., ["div.foo", // ".bar"]) var getStepQueryFunc = function(query){ var qparts = getQueryParts(trim(query)); // if it's trivial, avoid iteration and zipping costs if(qparts.length == 1){ // we optimize this case here to prevent dispatch further down the // chain, potentially slowing things down. We could more elegantly // handle this in filterDown(), but it's slower for simple things // that need to be fast (e.g., "#someId"). var tef = getElementsFunc(qparts[0]); return function(root){ var r = tef(root, new qlc()); if(r){ r.nozip = true; } return r; } } // otherwise, break it up and return a runner that iterates over the parts recursively return function(root){ return filterDown(root, qparts); } }; // NOTES: // * we can't trust QSA for anything but document-rooted queries, so // caching is split into DOM query evaluators and QSA query evaluators // * caching query results is dirty and leak-prone (or, at a minimum, // prone to unbounded growth). Other toolkits may go this route, but // they totally destroy their own ability to manage their memory // footprint. If we implement it, it should only ever be with a fixed // total element reference # limit and an LRU-style algorithm since JS // has no weakref support. Caching compiled query evaluators is also // potentially problematic, but even on large documents the size of the // query evaluators is often < 100 function objects per evaluator (and // LRU can be applied if it's ever shown to be an issue). // * since IE's QSA support is currently only for HTML documents and even // then only in IE 8's "standards mode", we have to detect our dispatch // route at query time and keep 2 separate caches. Ugg. // we need to determine if we think we can run a given query via // querySelectorAll or if we'll need to fall back on DOM queries to get // there. We need a lot of information about the environment and the query // to make the determiniation (e.g. does it support QSA, does the query in // question work in the native QSA impl, etc.). var nua = navigator.userAgent; // some versions of Safari provided QSA, but it was buggy and crash-prone. // We need te detect the right "internal" webkit version to make this work. var wk = "WebKit/"; var is525 = ( d.isWebKit && (nua.indexOf(wk) > 0) && (parseFloat(nua.split(wk)[1]) > 528) ); // IE QSA queries may incorrectly include comment nodes, so we throw the // zipping function into "remove" comments mode instead of the normal "skip // it" which every other QSA-clued browser enjoys var noZip = d.isIE ? "commentStrip" : "nozip"; var qsa = "querySelectorAll"; var qsaAvail = ( !!getDoc()[qsa] && // see #5832 (!d.isSafari || (d.isSafari > 3.1) || is525 ) ); //Don't bother with n+3 type of matches, IE complains if we modify those. var infixSpaceRe = /n\+\d|([^ ])?([>~+])([^ =])?/g; var infixSpaceFunc = function(match, pre, ch, post) { return ch ? (pre ? pre + " " : "") + ch + (post ? " " + post : "") : /*n+3*/ match; }; var getQueryFunc = function(query, forceDOM){ //Normalize query. The CSS3 selectors spec allows for omitting spaces around //infix operators, >, ~ and + //Do the work here since detection for spaces is used as a simple "not use QSA" //test below. query = query.replace(infixSpaceRe, infixSpaceFunc); if(qsaAvail){ // if we've got a cached variant and we think we can do it, run it! var qsaCached = _queryFuncCacheQSA[query]; if(qsaCached && !forceDOM){ return qsaCached; } } // else if we've got a DOM cached variant, assume that we already know // all we need to and use it var domCached = _queryFuncCacheDOM[query]; if(domCached){ return domCached; } // TODO: // today we're caching DOM and QSA branches separately so we // recalc useQSA every time. If we had a way to tag root+query // efficiently, we'd be in good shape to do a global cache. var qcz = query.charAt(0); var nospace = (-1 == query.indexOf(" ")); // byId searches are wicked fast compared to QSA, even when filtering // is required if( (query.indexOf("#") >= 0) && (nospace) ){ forceDOM = true; } var useQSA = ( qsaAvail && (!forceDOM) && // as per CSS 3, we can't currently start w/ combinator: // http://www.w3.org/TR/css3-selectors/#w3cselgrammar (specials.indexOf(qcz) == -1) && // IE's QSA impl sucks on pseudos (!d.isIE || (query.indexOf(":") == -1)) && (!(cssCaseBug && (query.indexOf(".") >= 0))) && // FIXME: // need to tighten up browser rules on ":contains" and "|=" to // figure out which aren't good // Latest webkit (around 531.21.8) does not seem to do well with :checked on option // elements, even though according to spec, selected options should // match :checked. So go nonQSA for it: // http://bugs.dojotoolkit.org/ticket/5179 (query.indexOf(":contains") == -1) && (query.indexOf(":checked") == -1) && (query.indexOf("|=") == -1) // some browsers don't grok it ); // TODO: // if we've got a descendant query (e.g., "> .thinger" instead of // just ".thinger") in a QSA-able doc, but are passed a child as a // root, it should be possible to give the item a synthetic ID and // trivially rewrite the query to the form "#synid > .thinger" to // use the QSA branch if(useQSA){ var tq = (specials.indexOf(query.charAt(query.length-1)) >= 0) ? (query + " *") : query; return _queryFuncCacheQSA[query] = function(root){ try{ // the QSA system contains an egregious spec bug which // limits us, effectively, to only running QSA queries over // entire documents. See: // http://ejohn.org/blog/thoughts-on-queryselectorall/ // despite this, we can also handle QSA runs on simple // selectors, but we don't want detection to be expensive // so we're just checking for the presence of a space char // right now. Not elegant, but it's cheaper than running // the query parser when we might not need to if(!((9 == root.nodeType) || nospace)){ throw ""; } var r = root[qsa](tq); // skip expensive duplication checks and just wrap in a NodeList r[noZip] = true; return r; }catch(e){ // else run the DOM branch on this query, ensuring that we // default that way in the future return getQueryFunc(query, true)(root); } } }else{ // DOM branch var parts = query.split(/\s*,\s*/); return _queryFuncCacheDOM[query] = ((parts.length < 2) ? // if not a compound query (e.g., ".foo, .bar"), cache and return a dispatcher getStepQueryFunc(query) : // if it *is* a complex query, break it up into its // constituent parts and return a dispatcher that will // merge the parts when run function(root){ var pindex = 0, // avoid array alloc for every invocation ret = [], tp; while((tp = parts[pindex++])){ ret = ret.concat(getStepQueryFunc(tp)(root)); } return ret; } ); } }; var _zipIdx = 0; // NOTE: // this function is Moo inspired, but our own impl to deal correctly // with XML in IE var _nodeUID = d.isIE ? function(node){ if(caseSensitive){ // XML docs don't have uniqueID on their nodes return (node.getAttribute("_uid") || node.setAttribute("_uid", ++_zipIdx) || _zipIdx); }else{ return node.uniqueID; } } : function(node){ return (node._uid || (node._uid = ++_zipIdx)); }; // determine if a node in is unique in a "bag". In this case we don't want // to flatten a list of unique items, but rather just tell if the item in // question is already in the bag. Normally we'd just use hash lookup to do // this for us but IE's DOM is busted so we can't really count on that. On // the upside, it gives us a built in unique ID function. var _isUnique = function(node, bag){ if(!bag){ return 1; } var id = _nodeUID(node); if(!bag[id]){ return bag[id] = 1; } return 0; }; // attempt to efficiently determine if an item in a list is a dupe, // returning a list of "uniques", hopefully in doucment order var _zipIdxName = "_zipIdx"; var _zip = function(arr){ if(arr && arr.nozip){ return (qlc._wrap) ? qlc._wrap(arr) : arr; } // var ret = new d._NodeListCtor(); var ret = new qlc(); if(!arr || !arr.length){ return ret; } if(arr[0]){ ret.push(arr[0]); } if(arr.length < 2){ return ret; } _zipIdx++; // we have to fork here for IE and XML docs because we can't set // expandos on their nodes (apparently). *sigh* if(d.isIE && caseSensitive){ var szidx = _zipIdx+""; arr[0].setAttribute(_zipIdxName, szidx); for(var x = 1, te; te = arr[x]; x++){ if(arr[x].getAttribute(_zipIdxName) != szidx){ ret.push(te); } te.setAttribute(_zipIdxName, szidx); } }else if(d.isIE && arr.commentStrip){ try{ for(var x = 1, te; te = arr[x]; x++){ if(_isElement(te)){ ret.push(te); } } }catch(e){ /* squelch */ } }else{ if(arr[0]){ arr[0][_zipIdxName] = _zipIdx; } for(var x = 1, te; te = arr[x]; x++){ if(arr[x][_zipIdxName] != _zipIdx){ ret.push(te); } te[_zipIdxName] = _zipIdx; } } return ret; }; // the main executor d.query = function(/*String*/ query, /*String|DOMNode?*/ root){ // summary: // Returns nodes which match the given CSS3 selector, searching the // entire document by default but optionally taking a node to scope // the search by. Returns an instance of dojo.NodeList. // description: // dojo.query() is the swiss army knife of DOM node manipulation in // Dojo. Much like Prototype's "$$" (bling-bling) function or JQuery's // "$" function, dojo.query provides robust, high-performance // CSS-based node selector support with the option of scoping searches // to a particular sub-tree of a document. // // Supported Selectors: // -------------------- // // dojo.query() supports a rich set of CSS3 selectors, including: // // * class selectors (e.g., `.foo`) // * node type selectors like `span` // * ` ` descendant selectors // * `>` child element selectors // * `#foo` style ID selectors // * `*` universal selector // * `~`, the immediately preceeded-by sibling selector // * `+`, the preceeded-by sibling selector // * attribute queries: // | * `[foo]` attribute presence selector // | * `[foo='bar']` attribute value exact match // | * `[foo~='bar']` attribute value list item match // | * `[foo^='bar']` attribute start match // | * `[foo$='bar']` attribute end match // | * `[foo*='bar']` attribute substring match // * `:first-child`, `:last-child`, and `:only-child` positional selectors // * `:empty` content emtpy selector // * `:checked` pseudo selector // * `:nth-child(n)`, `:nth-child(2n+1)` style positional calculations // * `:nth-child(even)`, `:nth-child(odd)` positional selectors // * `:not(...)` negation pseudo selectors // // Any legal combination of these selectors will work with // `dojo.query()`, including compound selectors ("," delimited). // Very complex and useful searches can be constructed with this // palette of selectors and when combined with functions for // manipulation presented by dojo.NodeList, many types of DOM // manipulation operations become very straightforward. // // Unsupported Selectors: // ---------------------- // // While dojo.query handles many CSS3 selectors, some fall outside of // what's resaonable for a programmatic node querying engine to // handle. Currently unsupported selectors include: // // * namespace-differentiated selectors of any form // * all `::` pseduo-element selectors // * certain pseduo-selectors which don't get a lot of day-to-day use: // | * `:root`, `:lang()`, `:target`, `:focus` // * all visual and state selectors: // | * `:root`, `:active`, `:hover`, `:visisted`, `:link`, // `:enabled`, `:disabled` // * `:*-of-type` pseudo selectors // // dojo.query and XML Documents: // ----------------------------- // // `dojo.query` (as of dojo 1.2) supports searching XML documents // in a case-sensitive manner. If an HTML document is served with // a doctype that forces case-sensitivity (e.g., XHTML 1.1 // Strict), dojo.query() will detect this and "do the right // thing". Case sensitivity is dependent upon the document being // searched and not the query used. It is therefore possible to // use case-sensitive queries on strict sub-documents (iframes, // etc.) or XML documents while still assuming case-insensitivity // for a host/root document. // // Non-selector Queries: // --------------------- // // If something other than a String is passed for the query, // `dojo.query` will return a new `dojo.NodeList` instance // constructed from that parameter alone and all further // processing will stop. This means that if you have a reference // to a node or NodeList, you can quickly construct a new NodeList // from the original by calling `dojo.query(node)` or // `dojo.query(list)`. // // query: // The CSS3 expression to match against. For details on the syntax of // CSS3 selectors, see <http://www.w3.org/TR/css3-selectors/#selectors> // root: // A DOMNode (or node id) to scope the search from. Optional. // returns: dojo.NodeList // An instance of `dojo.NodeList`. Many methods are available on // NodeLists for searching, iterating, manipulating, and handling // events on the matched nodes in the returned list. // example: // search the entire document for elements with the class "foo": // | dojo.query(".foo"); // these elements will match: // | <span class="foo"></span> // | <span class="foo bar"></span> // | <p class="thud foo"></p> // example: // search the entire document for elements with the classes "foo" *and* "bar": // | dojo.query(".foo.bar"); // these elements will match: // | <span class="foo bar"></span> // while these will not: // | <span class="foo"></span> // | <p class="thud foo"></p> // example: // find `<span>` elements which are descendants of paragraphs and // which have a "highlighted" class: // | dojo.query("p span.highlighted"); // the innermost span in this fragment matches: // | <p class="foo"> // | <span>... // | <span class="highlighted foo bar">...</span> // | </span> // | </p> // example: // set an "odd" class on all odd table rows inside of the table // `#tabular_data`, using the `>` (direct child) selector to avoid // affecting any nested tables: // | dojo.query("#tabular_data > tbody > tr:nth-child(odd)").addClass("odd"); // example: // remove all elements with the class "error" from the document // and store them in a list: // | var errors = dojo.query(".error").orphan(); // example: // add an onclick handler to every submit button in the document // which causes the form to be sent via Ajax instead: // | dojo.query("input[type='submit']").onclick(function(e){ // | dojo.stopEvent(e); // prevent sending the form // | var btn = e.target; // | dojo.xhrPost({ // | form: btn.form, // | load: function(data){ // | // replace the form with the response // | var div = dojo.doc.createElement("div"); // | dojo.place(div, btn.form, "after"); // | div.innerHTML = data; // | dojo.style(btn.form, "display", "none"); // | } // | }); // | }); //Set list constructor to desired value. This can change //between calls, so always re-assign here. qlc = d._NodeListCtor; if(!query){ return new qlc(); } if(query.constructor == qlc){ return query; } if(typeof query != "string"){ // inline'd type check return new qlc(query); // dojo.NodeList } if(typeof root == "string"){ // inline'd type check root = d.byId(root); if(!root){ return new qlc(); } } root = root||getDoc(); var od = root.ownerDocument||root.documentElement; // throw the big case sensitivity switch // NOTE: // Opera in XHTML mode doesn't detect case-sensitivity correctly // and it's not clear that there's any way to test for it caseSensitive = (root.contentType && root.contentType=="application/xml") || (d.isOpera && (root.doctype || od.toString() == "[object XMLDocument]")) || (!!od) && (d.isIE ? od.xml : (root.xmlVersion||od.xmlVersion)); // NOTE: // adding "true" as the 2nd argument to getQueryFunc is useful for // testing the DOM branch without worrying about the // behavior/performance of the QSA branch. var r = getQueryFunc(query)(root); // FIXME: // need to investigate this branch WRT #8074 and #8075 if(r && r.nozip && !qlc._wrap){ return r; } return _zip(r); // dojo.NodeList } // FIXME: need to add infrastructure for post-filtering pseudos, ala :last d.query.pseudos = pseudos; // one-off function for filtering a NodeList based on a simple selector d._filterQueryResult = function(nodeList, simpleFilter){ var tmpNodeList = new d._NodeListCtor(); var filterFunc = getSimpleFilterFunc(getQueryParts(simpleFilter)[0]); for(var x = 0, te; te = nodeList[x]; x++){ if(filterFunc(te)){ tmpNodeList.push(te); } } return tmpNodeList; } })(this["queryPortability"]||this["acme"]||dojo); /* */ } if(!dojo._hasResource["dojo._base.xhr"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.xhr"] = true; dojo.provide("dojo._base.xhr"); (function(){ var _d = dojo, cfg = _d.config; function setValue(/*Object*/obj, /*String*/name, /*String*/value){ //summary: // For the named property in object, set the value. If a value // already exists and it is a string, convert the value to be an // array of values. //Skip it if there is no value if(value === null){ return; } var val = obj[name]; if(typeof val == "string"){ // inline'd type check obj[name] = [val, value]; }else if(_d.isArray(val)){ val.push(value); }else{ obj[name] = value; } } dojo.fieldToObject = function(/*DOMNode||String*/ inputNode){ // summary: // Serialize a form field to a JavaScript object. // // description: // Returns the value encoded in a form field as // as a string or an array of strings. Disabled form elements // and unchecked radio and checkboxes are skipped. Multi-select // elements are returned as an array of string values. var ret = null; var item = _d.byId(inputNode); if(item){ var _in = item.name; var type = (item.type||"").toLowerCase(); if(_in && type && !item.disabled){ if(type == "radio" || type == "checkbox"){ if(item.checked){ ret = item.value } }else if(item.multiple){ ret = []; _d.query("option", item).forEach(function(opt){ if(opt.selected){ ret.push(opt.value); } }); }else{ ret = item.value; } } } return ret; // Object } dojo.formToObject = function(/*DOMNode||String*/ formNode){ // summary: // Serialize a form node to a JavaScript object. // description: // Returns the values encoded in an HTML form as // string properties in an object which it then returns. Disabled form // elements, buttons, and other non-value form elements are skipped. // Multi-select elements are returned as an array of string values. // // example: // This form: // | <form id="test_form"> // | <input type="text" name="blah" value="blah"> // | <input type="text" name="no_value" value="blah" disabled> // | <input type="button" name="no_value2" value="blah"> // | <select type="select" multiple name="multi" size="5"> // | <option value="blah">blah</option> // | <option value="thud" selected>thud</option> // | <option value="thonk" selected>thonk</option> // | </select> // | </form> // // yields this object structure as the result of a call to // formToObject(): // // | { // | blah: "blah", // | multi: [ // | "thud", // | "thonk" // | ] // | }; var ret = {}; var exclude = "file|submit|image|reset|button|"; _d.forEach(dojo.byId(formNode).elements, function(item){ var _in = item.name; var type = (item.type||"").toLowerCase(); if(_in && type && exclude.indexOf(type) == -1 && !item.disabled){ setValue(ret, _in, _d.fieldToObject(item)); if(type == "image"){ ret[_in+".x"] = ret[_in+".y"] = ret[_in].x = ret[_in].y = 0; } } }); return ret; // Object } dojo.objectToQuery = function(/*Object*/ map){ // summary: // takes a name/value mapping object and returns a string representing // a URL-encoded version of that object. // example: // this object: // // | { // | blah: "blah", // | multi: [ // | "thud", // | "thonk" // | ] // | }; // // yields the following query string: // // | "blah=blah&multi=thud&multi=thonk" // FIXME: need to implement encodeAscii!! var enc = encodeURIComponent; var pairs = []; var backstop = {}; for(var name in map){ var value = map[name]; if(value != backstop[name]){ var assign = enc(name) + "="; if(_d.isArray(value)){ for(var i=0; i < value.length; i++){ pairs.push(assign + enc(value[i])); } }else{ pairs.push(assign + enc(value)); } } } return pairs.join("&"); // String } dojo.formToQuery = function(/*DOMNode||String*/ formNode){ // summary: // Returns a URL-encoded string representing the form passed as either a // node or string ID identifying the form to serialize return _d.objectToQuery(_d.formToObject(formNode)); // String } dojo.formToJson = function(/*DOMNode||String*/ formNode, /*Boolean?*/prettyPrint){ // summary: // Create a serialized JSON string from a form node or string // ID identifying the form to serialize return _d.toJson(_d.formToObject(formNode), prettyPrint); // String } dojo.queryToObject = function(/*String*/ str){ // summary: // Create an object representing a de-serialized query section of a // URL. Query keys with multiple values are returned in an array. // // example: // This string: // // | "foo=bar&foo=baz&thinger=%20spaces%20=blah&zonk=blarg&" // // results in this object structure: // // | { // | foo: [ "bar", "baz" ], // | thinger: " spaces =blah", // | zonk: "blarg" // | } // // Note that spaces and other urlencoded entities are correctly // handled. // FIXME: should we grab the URL string if we're not passed one? var ret = {}; var qp = str.split("&"); var dec = decodeURIComponent; _d.forEach(qp, function(item){ if(item.length){ var parts = item.split("="); var name = dec(parts.shift()); var val = dec(parts.join("=")); if(typeof ret[name] == "string"){ // inline'd type check ret[name] = [ret[name]]; } if(_d.isArray(ret[name])){ ret[name].push(val); }else{ ret[name] = val; } } }); return ret; // Object } // need to block async callbacks from snatching this thread as the result // of an async callback might call another sync XHR, this hangs khtml forever // must checked by watchInFlight() dojo._blockAsync = false; // MOW: remove dojo._contentHandlers alias in 2.0 var handlers = _d._contentHandlers = dojo.contentHandlers = { // summary: // A map of availble XHR transport handle types. Name matches the // `handleAs` attribute passed to XHR calls. // // description: // A map of availble XHR transport handle types. Name matches the // `handleAs` attribute passed to XHR calls. Each contentHandler is // called, passing the xhr object for manipulation. The return value // from the contentHandler will be passed to the `load` or `handle` // functions defined in the original xhr call. // // example: // Creating a custom content-handler: // | dojo.contentHandlers.makeCaps = function(xhr){ // | return xhr.responseText.toUpperCase(); // | } // | // and later: // | dojo.xhrGet({ // | url:"foo.txt", // | handleAs:"makeCaps", // | load: function(data){ /* data is a toUpper version of foo.txt */ } // | }); text: function(xhr){ // summary: A contentHandler which simply returns the plaintext response data return xhr.responseText; }, json: function(xhr){ // summary: A contentHandler which returns a JavaScript object created from the response data return _d.fromJson(xhr.responseText || null); }, "json-comment-filtered": function(xhr){ // summary: A contentHandler which expects comment-filtered JSON. // description: // A contentHandler which expects comment-filtered JSON. // the json-comment-filtered option was implemented to prevent // "JavaScript Hijacking", but it is less secure than standard JSON. Use // standard JSON instead. JSON prefixing can be used to subvert hijacking. // // Will throw a notice suggesting to use application/json mimetype, as // json-commenting can introduce security issues. To decrease the chances of hijacking, // use the standard `json` contentHandler, and prefix your "JSON" with: {}&& // // use djConfig.useCommentedJson = true to turn off the notice if(!dojo.config.useCommentedJson){ console.warn("Consider using the standard mimetype:application/json." + " json-commenting can introduce security issues. To" + " decrease the chances of hijacking, use the standard the 'json' handler and" + " prefix your json with: {}&&\n" + "Use djConfig.useCommentedJson=true to turn off this message."); } var value = xhr.responseText; var cStartIdx = value.indexOf("\/*"); var cEndIdx = value.lastIndexOf("*\/"); if(cStartIdx == -1 || cEndIdx == -1){ throw new Error("JSON was not comment filtered"); } return _d.fromJson(value.substring(cStartIdx+2, cEndIdx)); }, javascript: function(xhr){ // summary: A contentHandler which evaluates the response data, expecting it to be valid JavaScript // FIXME: try Moz and IE specific eval variants? return _d.eval(xhr.responseText); }, xml: function(xhr){ // summary: A contentHandler returning an XML Document parsed from the response data var result = xhr.responseXML; if(_d.isIE && (!result || !result.documentElement)){ //WARNING: this branch used by the xml handling in dojo.io.iframe, //so be sure to test dojo.io.iframe if making changes below. var ms = function(n){ return "MSXML" + n + ".DOMDocument"; } var dp = ["Microsoft.XMLDOM", ms(6), ms(4), ms(3), ms(2)]; _d.some(dp, function(p){ try{ var dom = new ActiveXObject(p); dom.async = false; dom.loadXML(xhr.responseText); result = dom; }catch(e){ return false; } return true; }); } return result; // DOMDocument }, "json-comment-optional": function(xhr){ // summary: A contentHandler which checks the presence of comment-filtered JSON and // alternates between the `json` and `json-comment-filtered` contentHandlers. if(xhr.responseText && /^[^{\[]*\/\*/.test(xhr.responseText)){ return handlers["json-comment-filtered"](xhr); }else{ return handlers["json"](xhr); } } }; /*===== dojo.__IoArgs = function(){ // url: String // URL to server endpoint. // content: Object? // Contains properties with string values. These // properties will be serialized as name1=value2 and // passed in the request. // timeout: Integer? // Milliseconds to wait for the response. If this time // passes, the then error callbacks are called. // form: DOMNode? // DOM node for a form. Used to extract the form values // and send to the server. // preventCache: Boolean? // Default is false. If true, then a // "dojo.preventCache" parameter is sent in the request // with a value that changes with each request // (timestamp). Useful only with GET-type requests. // handleAs: String? // Acceptable values depend on the type of IO // transport (see specific IO calls for more information). // rawBody: String? // Sets the raw body for an HTTP request. If this is used, then the content // property is ignored. This is mostly useful for HTTP methods that have // a body to their requests, like PUT or POST. This property can be used instead // of postData and putData for dojo.rawXhrPost and dojo.rawXhrPut respectively. // ioPublish: Boolean? // Set this explicitly to false to prevent publishing of topics related to // IO operations. Otherwise, if djConfig.ioPublish is set to true, topics // will be published via dojo.publish for different phases of an IO operation. // See dojo.__IoPublish for a list of topics that are published. // load: Function? // This function will be // called on a successful HTTP response code. // error: Function? // This function will // be called when the request fails due to a network or server error, the url // is invalid, etc. It will also be called if the load or handle callback throws an // exception, unless djConfig.debugAtAllCosts is true. This allows deployed applications // to continue to run even when a logic error happens in the callback, while making // it easier to troubleshoot while in debug mode. // handle: Function? // This function will // be called at the end of every request, whether or not an error occurs. this.url = url; this.content = content; this.timeout = timeout; this.form = form; this.preventCache = preventCache; this.handleAs = handleAs; this.ioPublish = ioPublish; this.load = function(response, ioArgs){ // ioArgs: dojo.__IoCallbackArgs // Provides additional information about the request. // response: Object // The response in the format as defined with handleAs. } this.error = function(response, ioArgs){ // ioArgs: dojo.__IoCallbackArgs // Provides additional information about the request. // response: Object // The response in the format as defined with handleAs. } this.handle = function(loadOrError, response, ioArgs){ // loadOrError: String // Provides a string that tells you whether this function // was called because of success (load) or failure (error). // response: Object // The response in the format as defined with handleAs. // ioArgs: dojo.__IoCallbackArgs // Provides additional information about the request. } } =====*/ /*===== dojo.__IoCallbackArgs = function(args, xhr, url, query, handleAs, id, canDelete, json){ // args: Object // the original object argument to the IO call. // xhr: XMLHttpRequest // For XMLHttpRequest calls only, the // XMLHttpRequest object that was used for the // request. // url: String // The final URL used for the call. Many times it // will be different than the original args.url // value. // query: String // For non-GET requests, the // name1=value1&name2=value2 parameters sent up in // the request. // handleAs: String // The final indicator on how the response will be // handled. // id: String // For dojo.io.script calls only, the internal // script ID used for the request. // canDelete: Boolean // For dojo.io.script calls only, indicates // whether the script tag that represents the // request can be deleted after callbacks have // been called. Used internally to know when // cleanup can happen on JSONP-type requests. // json: Object // For dojo.io.script calls only: holds the JSON // response for JSONP-type requests. Used // internally to hold on to the JSON responses. // You should not need to access it directly -- // the same object should be passed to the success // callbacks directly. this.args = args; this.xhr = xhr; this.url = url; this.query = query; this.handleAs = handleAs; this.id = id; this.canDelete = canDelete; this.json = json; } =====*/ /*===== dojo.__IoPublish = function(){ // summary: // This is a list of IO topics that can be published // if djConfig.ioPublish is set to true. IO topics can be // published for any Input/Output, network operation. So, // dojo.xhr, dojo.io.script and dojo.io.iframe can all // trigger these topics to be published. // start: String // "/dojo/io/start" is sent when there are no outstanding IO // requests, and a new IO request is started. No arguments // are passed with this topic. // send: String // "/dojo/io/send" is sent whenever a new IO request is started. // It passes the dojo.Deferred for the request with the topic. // load: String // "/dojo/io/load" is sent whenever an IO request has loaded // successfully. It passes the response and the dojo.Deferred // for the request with the topic. // error: String // "/dojo/io/error" is sent whenever an IO request has errored. // It passes the error and the dojo.Deferred // for the request with the topic. // done: String // "/dojo/io/done" is sent whenever an IO request has completed, // either by loading or by erroring. It passes the error and // the dojo.Deferred for the request with the topic. // stop: String // "/dojo/io/stop" is sent when all outstanding IO requests have // finished. No arguments are passed with this topic. this.start = "/dojo/io/start"; this.send = "/dojo/io/send"; this.load = "/dojo/io/load"; this.error = "/dojo/io/error"; this.done = "/dojo/io/done"; this.stop = "/dojo/io/stop"; } =====*/ dojo._ioSetArgs = function(/*dojo.__IoArgs*/args, /*Function*/canceller, /*Function*/okHandler, /*Function*/errHandler){ // summary: // sets up the Deferred and ioArgs property on the Deferred so it // can be used in an io call. // args: // The args object passed into the public io call. Recognized properties on // the args object are: // canceller: // The canceller function used for the Deferred object. The function // will receive one argument, the Deferred object that is related to the // canceller. // okHandler: // The first OK callback to be registered with Deferred. It has the opportunity // to transform the OK response. It will receive one argument -- the Deferred // object returned from this function. // errHandler: // The first error callback to be registered with Deferred. It has the opportunity // to do cleanup on an error. It will receive two arguments: error (the // Error object) and dfd, the Deferred object returned from this function. var ioArgs = {args: args, url: args.url}; //Get values from form if requestd. var formObject = null; if(args.form){ var form = _d.byId(args.form); //IE requires going through getAttributeNode instead of just getAttribute in some form cases, //so use it for all. See #2844 var actnNode = form.getAttributeNode("action"); ioArgs.url = ioArgs.url || (actnNode ? actnNode.value : null); formObject = _d.formToObject(form); } // set up the query params var miArgs = [{}]; if(formObject){ // potentially over-ride url-provided params w/ form values miArgs.push(formObject); } if(args.content){ // stuff in content over-rides what's set by form miArgs.push(args.content); } if(args.preventCache){ miArgs.push({"dojo.preventCache": new Date().valueOf()}); } ioArgs.query = _d.objectToQuery(_d.mixin.apply(null, miArgs)); // .. and the real work of getting the deferred in order, etc. ioArgs.handleAs = args.handleAs || "text"; var d = new _d.Deferred(canceller); d.addCallbacks(okHandler, function(error){ return errHandler(error, d); }); //Support specifying load, error and handle callback functions from the args. //For those callbacks, the "this" object will be the args object. //The callbacks will get the deferred result value as the //first argument and the ioArgs object as the second argument. var ld = args.load; if(ld && _d.isFunction(ld)){ d.addCallback(function(value){ return ld.call(args, value, ioArgs); }); } var err = args.error; if(err && _d.isFunction(err)){ d.addErrback(function(value){ return err.call(args, value, ioArgs); }); } var handle = args.handle; if(handle && _d.isFunction(handle)){ d.addBoth(function(value){ return handle.call(args, value, ioArgs); }); } //Plug in topic publishing, if dojo.publish is loaded. if(cfg.ioPublish && _d.publish && ioArgs.args.ioPublish !== false){ d.addCallbacks( function(res){ _d.publish("/dojo/io/load", [d, res]); return res; }, function(res){ _d.publish("/dojo/io/error", [d, res]); return res; } ); d.addBoth(function(res){ _d.publish("/dojo/io/done", [d, res]); return res; }); } d.ioArgs = ioArgs; // FIXME: need to wire up the xhr object's abort method to something // analagous in the Deferred return d; } var _deferredCancel = function(/*Deferred*/dfd){ // summary: canceller function for dojo._ioSetArgs call. dfd.canceled = true; var xhr = dfd.ioArgs.xhr; var _at = typeof xhr.abort; if(_at == "function" || _at == "object" || _at == "unknown"){ xhr.abort(); } var err = dfd.ioArgs.error; if(!err){ err = new Error("xhr cancelled"); err.dojoType="cancel"; } return err; } var _deferredOk = function(/*Deferred*/dfd){ // summary: okHandler function for dojo._ioSetArgs call. var ret = handlers[dfd.ioArgs.handleAs](dfd.ioArgs.xhr); return ret === undefined ? null : ret; } var _deferError = function(/*Error*/error, /*Deferred*/dfd){ // summary: errHandler function for dojo._ioSetArgs call. if(!dfd.ioArgs.args.failOk){ console.error(error); } return error; } // avoid setting a timer per request. It degrades performance on IE // something fierece if we don't use unified loops. var _inFlightIntvl = null; var _inFlight = []; //Use a separate count for knowing if we are starting/stopping io calls. //Cannot use _inFlight.length since it can change at a different time than //when we want to do this kind of test. We only want to decrement the count //after a callback/errback has finished, since the callback/errback should be //considered as part of finishing a request. var _pubCount = 0; var _checkPubCount = function(dfd){ if(_pubCount <= 0){ _pubCount = 0; if(cfg.ioPublish && _d.publish && (!dfd || dfd && dfd.ioArgs.args.ioPublish !== false)){ _d.publish("/dojo/io/stop"); } } }; var _watchInFlight = function(){ //summary: // internal method that checks each inflight XMLHttpRequest to see // if it has completed or if the timeout situation applies. var now = (new Date()).getTime(); // make sure sync calls stay thread safe, if this callback is called // during a sync call and this results in another sync call before the // first sync call ends the browser hangs if(!_d._blockAsync){ // we need manual loop because we often modify _inFlight (and therefore 'i') while iterating // note: the second clause is an assigment on purpose, lint may complain for(var i = 0, tif; i < _inFlight.length && (tif = _inFlight[i]); i++){ var dfd = tif.dfd; var func = function(){ if(!dfd || dfd.canceled || !tif.validCheck(dfd)){ _inFlight.splice(i--, 1); _pubCount -= 1; }else if(tif.ioCheck(dfd)){ _inFlight.splice(i--, 1); tif.resHandle(dfd); _pubCount -= 1; }else if(dfd.startTime){ //did we timeout? if(dfd.startTime + (dfd.ioArgs.args.timeout || 0) < now){ _inFlight.splice(i--, 1); var err = new Error("timeout exceeded"); err.dojoType = "timeout"; dfd.errback(err); //Cancel the request so the io module can do appropriate cleanup. dfd.cancel(); _pubCount -= 1; } } }; if(dojo.config.debugAtAllCosts){ func.call(this); }else{ try{ func.call(this); }catch(e){ dfd.errback(e); } } } } _checkPubCount(dfd); if(!_inFlight.length){ clearInterval(_inFlightIntvl); _inFlightIntvl = null; return; } } dojo._ioCancelAll = function(){ //summary: Cancels all pending IO requests, regardless of IO type //(xhr, script, iframe). try{ _d.forEach(_inFlight, function(i){ try{ i.dfd.cancel(); }catch(e){/*squelch*/} }); }catch(e){/*squelch*/} } //Automatically call cancel all io calls on unload //in IE for trac issue #2357. if(_d.isIE){ _d.addOnWindowUnload(_d._ioCancelAll); } _d._ioNotifyStart = function(/*Deferred*/dfd){ // summary: // If dojo.publish is available, publish topics // about the start of a request queue and/or the // the beginning of request. // description: // Used by IO transports. An IO transport should // call this method before making the network connection. if(cfg.ioPublish && _d.publish && dfd.ioArgs.args.ioPublish !== false){ if(!_pubCount){ _d.publish("/dojo/io/start"); } _pubCount += 1; _d.publish("/dojo/io/send", [dfd]); } } _d._ioWatch = function(dfd, validCheck, ioCheck, resHandle){ // summary: // Watches the io request represented by dfd to see if it completes. // dfd: Deferred // The Deferred object to watch. // validCheck: Function // Function used to check if the IO request is still valid. Gets the dfd // object as its only argument. // ioCheck: Function // Function used to check if basic IO call worked. Gets the dfd // object as its only argument. // resHandle: Function // Function used to process response. Gets the dfd // object as its only argument. var args = dfd.ioArgs.args; if(args.timeout){ dfd.startTime = (new Date()).getTime(); } _inFlight.push({dfd: dfd, validCheck: validCheck, ioCheck: ioCheck, resHandle: resHandle}); if(!_inFlightIntvl){ _inFlightIntvl = setInterval(_watchInFlight, 50); } // handle sync requests //A weakness: async calls in flight //could have their handlers called as part of the //_watchInFlight call, before the sync's callbacks // are called. if(args.sync){ _watchInFlight(); } } var _defaultContentType = "application/x-www-form-urlencoded"; var _validCheck = function(/*Deferred*/dfd){ return dfd.ioArgs.xhr.readyState; //boolean } var _ioCheck = function(/*Deferred*/dfd){ return 4 == dfd.ioArgs.xhr.readyState; //boolean } var _resHandle = function(/*Deferred*/dfd){ var xhr = dfd.ioArgs.xhr; if(_d._isDocumentOk(xhr)){ dfd.callback(dfd); }else{ var err = new Error("Unable to load " + dfd.ioArgs.url + " status:" + xhr.status); err.status = xhr.status; err.responseText = xhr.responseText; dfd.errback(err); } } dojo._ioAddQueryToUrl = function(/*dojo.__IoCallbackArgs*/ioArgs){ //summary: Adds query params discovered by the io deferred construction to the URL. //Only use this for operations which are fundamentally GET-type operations. if(ioArgs.query.length){ ioArgs.url += (ioArgs.url.indexOf("?") == -1 ? "?" : "&") + ioArgs.query; ioArgs.query = null; } } /*===== dojo.declare("dojo.__XhrArgs", dojo.__IoArgs, { constructor: function(){ // summary: // In addition to the properties listed for the dojo._IoArgs type, // the following properties are allowed for dojo.xhr* methods. // handleAs: String? // Acceptable values are: text (default), json, json-comment-optional, // json-comment-filtered, javascript, xml. See `dojo.contentHandlers` // sync: Boolean? // false is default. Indicates whether the request should // be a synchronous (blocking) request. // headers: Object? // Additional HTTP headers to send in the request. // failOk: Boolean? // false is default. Indicates whether a request should be // allowed to fail (and therefore no console error message in // the event of a failure) this.handleAs = handleAs; this.sync = sync; this.headers = headers; this.failOk = failOk; } }); =====*/ dojo.xhr = function(/*String*/ method, /*dojo.__XhrArgs*/ args, /*Boolean?*/ hasBody){ // summary: // Sends an HTTP request with the given method. // description: // Sends an HTTP request with the given method. // See also dojo.xhrGet(), xhrPost(), xhrPut() and dojo.xhrDelete() for shortcuts // for those HTTP methods. There are also methods for "raw" PUT and POST methods // via dojo.rawXhrPut() and dojo.rawXhrPost() respectively. // method: // HTTP method to be used, such as GET, POST, PUT, DELETE. Should be uppercase. // hasBody: // If the request has an HTTP body, then pass true for hasBody. //Make the Deferred object for this xhr request. var dfd = _d._ioSetArgs(args, _deferredCancel, _deferredOk, _deferError); var ioArgs = dfd.ioArgs; //Pass the args to _xhrObj, to allow alternate XHR calls based specific calls, like //the one used for iframe proxies. var xhr = ioArgs.xhr = _d._xhrObj(ioArgs.args); //If XHR factory fails, cancel the deferred. if(!xhr){ dfd.cancel(); return dfd; } //Allow for specifying the HTTP body completely. if("postData" in args){ ioArgs.query = args.postData; }else if("putData" in args){ ioArgs.query = args.putData; }else if("rawBody" in args){ ioArgs.query = args.rawBody; }else if((arguments.length > 2 && !hasBody) || "POST|PUT".indexOf(method.toUpperCase()) == -1){ //Check for hasBody being passed. If no hasBody, //then only append query string if not a POST or PUT request. _d._ioAddQueryToUrl(ioArgs); } // IE 6 is a steaming pile. It won't let you call apply() on the native function (xhr.open). // workaround for IE6's apply() "issues" xhr.open(method, ioArgs.url, args.sync !== true, args.user || undefined, args.password || undefined); if(args.headers){ for(var hdr in args.headers){ if(hdr.toLowerCase() === "content-type" && !args.contentType){ args.contentType = args.headers[hdr]; }else if(args.headers[hdr]){ //Only add header if it has a value. This allows for instnace, skipping //insertion of X-Requested-With by specifying empty value. xhr.setRequestHeader(hdr, args.headers[hdr]); } } } // FIXME: is this appropriate for all content types? xhr.setRequestHeader("Content-Type", args.contentType || _defaultContentType); if(!args.headers || !("X-Requested-With" in args.headers)){ xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest"); } // FIXME: set other headers here! _d._ioNotifyStart(dfd); if(dojo.config.debugAtAllCosts){ xhr.send(ioArgs.query); }else{ try{ xhr.send(ioArgs.query); }catch(e){ ioArgs.error = e; dfd.cancel(); } } _d._ioWatch(dfd, _validCheck, _ioCheck, _resHandle); xhr = null; return dfd; // dojo.Deferred } dojo.xhrGet = function(/*dojo.__XhrArgs*/ args){ // summary: // Sends an HTTP GET request to the server. return _d.xhr("GET", args); // dojo.Deferred } dojo.rawXhrPost = dojo.xhrPost = function(/*dojo.__XhrArgs*/ args){ // summary: // Sends an HTTP POST request to the server. In addtion to the properties // listed for the dojo.__XhrArgs type, the following property is allowed: // postData: // String. Send raw data in the body of the POST request. return _d.xhr("POST", args, true); // dojo.Deferred } dojo.rawXhrPut = dojo.xhrPut = function(/*dojo.__XhrArgs*/ args){ // summary: // Sends an HTTP PUT request to the server. In addtion to the properties // listed for the dojo.__XhrArgs type, the following property is allowed: // putData: // String. Send raw data in the body of the PUT request. return _d.xhr("PUT", args, true); // dojo.Deferred } dojo.xhrDelete = function(/*dojo.__XhrArgs*/ args){ // summary: // Sends an HTTP DELETE request to the server. return _d.xhr("DELETE", args); //dojo.Deferred } /* dojo.wrapForm = function(formNode){ //summary: // A replacement for FormBind, but not implemented yet. // FIXME: need to think harder about what extensions to this we might // want. What should we allow folks to do w/ this? What events to // set/send? throw new Error("dojo.wrapForm not yet implemented"); } */ })(); } if(!dojo._hasResource["dojo._base.fx"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.fx"] = true; dojo.provide("dojo._base.fx"); /* Animation loosely package based on Dan Pupius' work, contributed under CLA: http://pupius.co.uk/js/Toolkit.Drawing.js */ (function(){ var d = dojo; var _mixin = d._mixin; dojo._Line = function(/*int*/ start, /*int*/ end){ // summary: // dojo._Line is the object used to generate values from a start value // to an end value // start: int // Beginning value for range // end: int // Ending value for range this.start = start; this.end = end; }; dojo._Line.prototype.getValue = function(/*float*/ n){ // summary: Returns the point on the line // n: a floating point number greater than 0 and less than 1 return ((this.end - this.start) * n) + this.start; // Decimal }; dojo.Animation = function(args){ // summary: // A generic animation class that fires callbacks into its handlers // object at various states. // description: // A generic animation class that fires callbacks into its handlers // object at various states. Nearly all dojo animation functions // return an instance of this method, usually without calling the // .play() method beforehand. Therefore, you will likely need to // call .play() on instances of `dojo.Animation` when one is // returned. // args: Object // The 'magic argument', mixing all the properties into this // animation instance. _mixin(this, args); if(d.isArray(this.curve)){ this.curve = new d._Line(this.curve[0], this.curve[1]); } }; // Alias to drop come 2.0: d._Animation = d.Animation; d.extend(dojo.Animation, { // duration: Integer // The time in milliseonds the animation will take to run duration: 350, /*===== // curve: dojo._Line|Array // A two element array of start and end values, or a `dojo._Line` instance to be // used in the Animation. curve: null, // easing: Function? // A Function to adjust the acceleration (or deceleration) of the progress // across a dojo._Line easing: null, =====*/ // repeat: Integer? // The number of times to loop the animation repeat: 0, // rate: Integer? // the time in milliseconds to wait before advancing to next frame // (used as a fps timer: 1000/rate = fps) rate: 20 /* 50 fps */, /*===== // delay: Integer? // The time in milliseconds to wait before starting animation after it // has been .play()'ed delay: null, // beforeBegin: Event? // Synthetic event fired before a dojo.Animation begins playing (synchronous) beforeBegin: null, // onBegin: Event? // Synthetic event fired as a dojo.Animation begins playing (useful?) onBegin: null, // onAnimate: Event? // Synthetic event fired at each interval of a `dojo.Animation` onAnimate: null, // onEnd: Event? // Synthetic event fired after the final frame of a `dojo.Animation` onEnd: null, // onPlay: Event? // Synthetic event fired any time a `dojo.Animation` is play()'ed onPlay: null, // onPause: Event? // Synthetic event fired when a `dojo.Animation` is paused onPause: null, // onStop: Event // Synthetic event fires when a `dojo.Animation` is stopped onStop: null, =====*/ _percent: 0, _startRepeatCount: 0, _getStep: function(){ var _p = this._percent, _e = this.easing ; return _e ? _e(_p) : _p; }, _fire: function(/*Event*/ evt, /*Array?*/ args){ // summary: // Convenience function. Fire event "evt" and pass it the // arguments specified in "args". // description: // Convenience function. Fire event "evt" and pass it the // arguments specified in "args". // Fires the callback in the scope of the `dojo.Animation` // instance. // evt: // The event to fire. // args: // The arguments to pass to the event. var a = args||[]; if(this[evt]){ if(d.config.debugAtAllCosts){ this[evt].apply(this, a); }else{ try{ this[evt].apply(this, a); }catch(e){ // squelch and log because we shouldn't allow exceptions in // synthetic event handlers to cause the internal timer to run // amuck, potentially pegging the CPU. I'm not a fan of this // squelch, but hopefully logging will make it clear what's // going on console.error("exception in animation handler for:", evt); console.error(e); } } } return this; // dojo.Animation }, play: function(/*int?*/ delay, /*Boolean?*/ gotoStart){ // summary: // Start the animation. // delay: // How many milliseconds to delay before starting. // gotoStart: // If true, starts the animation from the beginning; otherwise, // starts it from its current position. // returns: dojo.Animation // The instance to allow chaining. var _t = this; if(_t._delayTimer){ _t._clearTimer(); } if(gotoStart){ _t._stopTimer(); _t._active = _t._paused = false; _t._percent = 0; }else if(_t._active && !_t._paused){ return _t; } _t._fire("beforeBegin", [_t.node]); var de = delay || _t.delay, _p = dojo.hitch(_t, "_play", gotoStart); if(de > 0){ _t._delayTimer = setTimeout(_p, de); return _t; } _p(); return _t; }, _play: function(gotoStart){ var _t = this; if(_t._delayTimer){ _t._clearTimer(); } _t._startTime = new Date().valueOf(); if(_t._paused){ _t._startTime -= _t.duration * _t._percent; } _t._active = true; _t._paused = false; var value = _t.curve.getValue(_t._getStep()); if(!_t._percent){ if(!_t._startRepeatCount){ _t._startRepeatCount = _t.repeat; } _t._fire("onBegin", [value]); } _t._fire("onPlay", [value]); _t._cycle(); return _t; // dojo.Animation }, pause: function(){ // summary: Pauses a running animation. var _t = this; if(_t._delayTimer){ _t._clearTimer(); } _t._stopTimer(); if(!_t._active){ return _t; /*dojo.Animation*/ } _t._paused = true; _t._fire("onPause", [_t.curve.getValue(_t._getStep())]); return _t; // dojo.Animation }, gotoPercent: function(/*Decimal*/ percent, /*Boolean?*/ andPlay){ // summary: // Sets the progress of the animation. // percent: // A percentage in decimal notation (between and including 0.0 and 1.0). // andPlay: // If true, play the animation after setting the progress. var _t = this; _t._stopTimer(); _t._active = _t._paused = true; _t._percent = percent; if(andPlay){ _t.play(); } return _t; // dojo.Animation }, stop: function(/*boolean?*/ gotoEnd){ // summary: Stops a running animation. // gotoEnd: If true, the animation will end. var _t = this; if(_t._delayTimer){ _t._clearTimer(); } if(!_t._timer){ return _t; /* dojo.Animation */ } _t._stopTimer(); if(gotoEnd){ _t._percent = 1; } _t._fire("onStop", [_t.curve.getValue(_t._getStep())]); _t._active = _t._paused = false; return _t; // dojo.Animation }, status: function(){ // summary: // Returns a string token representation of the status of // the animation, one of: "paused", "playing", "stopped" if(this._active){ return this._paused ? "paused" : "playing"; // String } return "stopped"; // String }, _cycle: function(){ var _t = this; if(_t._active){ var curr = new Date().valueOf(); var step = (curr - _t._startTime) / (_t.duration); if(step >= 1){ step = 1; } _t._percent = step; // Perform easing if(_t.easing){ step = _t.easing(step); } _t._fire("onAnimate", [_t.curve.getValue(step)]); if(_t._percent < 1){ _t._startTimer(); }else{ _t._active = false; if(_t.repeat > 0){ _t.repeat--; _t.play(null, true); }else if(_t.repeat == -1){ _t.play(null, true); }else{ if(_t._startRepeatCount){ _t.repeat = _t._startRepeatCount; _t._startRepeatCount = 0; } } _t._percent = 0; _t._fire("onEnd", [_t.node]); !_t.repeat && _t._stopTimer(); } } return _t; // dojo.Animation }, _clearTimer: function(){ // summary: Clear the play delay timer clearTimeout(this._delayTimer); delete this._delayTimer; } }); // the local timer, stubbed into all Animation instances var ctr = 0, timer = null, runner = { run: function(){} }; d.extend(d.Animation, { _startTimer: function(){ if(!this._timer){ this._timer = d.connect(runner, "run", this, "_cycle"); ctr++; } if(!timer){ timer = setInterval(d.hitch(runner, "run"), this.rate); } }, _stopTimer: function(){ if(this._timer){ d.disconnect(this._timer); this._timer = null; ctr--; } if(ctr <= 0){ clearInterval(timer); timer = null; ctr = 0; } } }); var _makeFadeable = d.isIE ? function(node){ // only set the zoom if the "tickle" value would be the same as the // default var ns = node.style; // don't set the width to auto if it didn't already cascade that way. // We don't want to f anyones designs if(!ns.width.length && d.style(node, "width") == "auto"){ ns.width = "auto"; } } : function(){}; dojo._fade = function(/*Object*/ args){ // summary: // Returns an animation that will fade the node defined by // args.node from the start to end values passed (args.start // args.end) (end is mandatory, start is optional) args.node = d.byId(args.node); var fArgs = _mixin({ properties: {} }, args), props = (fArgs.properties.opacity = {}); props.start = !("start" in fArgs) ? function(){ return +d.style(fArgs.node, "opacity")||0; } : fArgs.start; props.end = fArgs.end; var anim = d.animateProperty(fArgs); d.connect(anim, "beforeBegin", d.partial(_makeFadeable, fArgs.node)); return anim; // dojo.Animation }; /*===== dojo.__FadeArgs = function(node, duration, easing){ // node: DOMNode|String // The node referenced in the animation // duration: Integer? // Duration of the animation in milliseconds. // easing: Function? // An easing function. this.node = node; this.duration = duration; this.easing = easing; } =====*/ dojo.fadeIn = function(/*dojo.__FadeArgs*/ args){ // summary: // Returns an animation that will fade node defined in 'args' from // its current opacity to fully opaque. return d._fade(_mixin({ end: 1 }, args)); // dojo.Animation }; dojo.fadeOut = function(/*dojo.__FadeArgs*/ args){ // summary: // Returns an animation that will fade node defined in 'args' // from its current opacity to fully transparent. return d._fade(_mixin({ end: 0 }, args)); // dojo.Animation }; dojo._defaultEasing = function(/*Decimal?*/ n){ // summary: The default easing function for dojo.Animation(s) return 0.5 + ((Math.sin((n + 1.5) * Math.PI)) / 2); }; var PropLine = function(properties){ // PropLine is an internal class which is used to model the values of // an a group of CSS properties across an animation lifecycle. In // particular, the "getValue" function handles getting interpolated // values between start and end for a particular CSS value. this._properties = properties; for(var p in properties){ var prop = properties[p]; if(prop.start instanceof d.Color){ // create a reusable temp color object to keep intermediate results prop.tempColor = new d.Color(); } } }; PropLine.prototype.getValue = function(r){ var ret = {}; for(var p in this._properties){ var prop = this._properties[p], start = prop.start; if(start instanceof d.Color){ ret[p] = d.blendColors(start, prop.end, r, prop.tempColor).toCss(); }else if(!d.isArray(start)){ ret[p] = ((prop.end - start) * r) + start + (p != "opacity" ? prop.units || "px" : 0); } } return ret; }; /*===== dojo.declare("dojo.__AnimArgs", [dojo.__FadeArgs], { // Properties: Object? // A hash map of style properties to Objects describing the transition, // such as the properties of dojo._Line with an additional 'units' property properties: {} //TODOC: add event callbacks }); =====*/ dojo.animateProperty = function(/*dojo.__AnimArgs*/ args){ // summary: // Returns an animation that will transition the properties of // node defined in `args` depending how they are defined in // `args.properties` // // description: // `dojo.animateProperty` is the foundation of most `dojo.fx` // animations. It takes an object of "properties" corresponding to // style properties, and animates them in parallel over a set // duration. // // example: // A simple animation that changes the width of the specified node. // | dojo.animateProperty({ // | node: "nodeId", // | properties: { width: 400 }, // | }).play(); // Dojo figures out the start value for the width and converts the // integer specified for the width to the more expressive but // verbose form `{ width: { end: '400', units: 'px' } }` which you // can also specify directly. Defaults to 'px' if ommitted. // // example: // Animate width, height, and padding over 2 seconds... the // pedantic way: // | dojo.animateProperty({ node: node, duration:2000, // | properties: { // | width: { start: '200', end: '400', units:"px" }, // | height: { start:'200', end: '400', units:"px" }, // | paddingTop: { start:'5', end:'50', units:"px" } // | } // | }).play(); // Note 'paddingTop' is used over 'padding-top'. Multi-name CSS properties // are written using "mixed case", as the hyphen is illegal as an object key. // // example: // Plug in a different easing function and register a callback for // when the animation ends. Easing functions accept values between // zero and one and return a value on that basis. In this case, an // exponential-in curve. // | dojo.animateProperty({ // | node: "nodeId", // | // dojo figures out the start value // | properties: { width: { end: 400 } }, // | easing: function(n){ // | return (n==0) ? 0 : Math.pow(2, 10 * (n - 1)); // | }, // | onEnd: function(node){ // | // called when the animation finishes. The animation // | // target is passed to this function // | } // | }).play(500); // delay playing half a second // // example: // Like all `dojo.Animation`s, animateProperty returns a handle to the // Animation instance, which fires the events common to Dojo FX. Use `dojo.connect` // to access these events outside of the Animation definiton: // | var anim = dojo.animateProperty({ // | node:"someId", // | properties:{ // | width:400, height:500 // | } // | }); // | dojo.connect(anim,"onEnd", function(){ // | console.log("animation ended"); // | }); // | // play the animation now: // | anim.play(); // // example: // Each property can be a function whose return value is substituted along. // Additionally, each measurement (eg: start, end) can be a function. The node // reference is passed direcly to callbacks. // | dojo.animateProperty({ // | node:"mine", // | properties:{ // | height:function(node){ // | // shrink this node by 50% // | return dojo.position(node).h / 2 // | }, // | width:{ // | start:function(node){ return 100; }, // | end:function(node){ return 200; } // | } // | } // | }).play(); // var n = args.node = d.byId(args.node); if(!args.easing){ args.easing = d._defaultEasing; } var anim = new d.Animation(args); d.connect(anim, "beforeBegin", anim, function(){ var pm = {}; for(var p in this.properties){ // Make shallow copy of properties into pm because we overwrite // some values below. In particular if start/end are functions // we don't want to overwrite them or the functions won't be // called if the animation is reused. if(p == "width" || p == "height"){ this.node.display = "block"; } var prop = this.properties[p]; if(d.isFunction(prop)){ prop = prop(n); } prop = pm[p] = _mixin({}, (d.isObject(prop) ? prop: { end: prop })); if(d.isFunction(prop.start)){ prop.start = prop.start(n); } if(d.isFunction(prop.end)){ prop.end = prop.end(n); } var isColor = (p.toLowerCase().indexOf("color") >= 0); function getStyle(node, p){ // dojo.style(node, "height") can return "auto" or "" on IE; this is more reliable: var v = { height: node.offsetHeight, width: node.offsetWidth }[p]; if(v !== undefined){ return v; } v = d.style(node, p); return (p == "opacity") ? +v : (isColor ? v : parseFloat(v)); } if(!("end" in prop)){ prop.end = getStyle(n, p); }else if(!("start" in prop)){ prop.start = getStyle(n, p); } if(isColor){ prop.start = new d.Color(prop.start); prop.end = new d.Color(prop.end); }else{ prop.start = (p == "opacity") ? +prop.start : parseFloat(prop.start); } } this.curve = new PropLine(pm); }); d.connect(anim, "onAnimate", d.hitch(d, "style", anim.node)); return anim; // dojo.Animation }; dojo.anim = function( /*DOMNode|String*/ node, /*Object*/ properties, /*Integer?*/ duration, /*Function?*/ easing, /*Function?*/ onEnd, /*Integer?*/ delay){ // summary: // A simpler interface to `dojo.animateProperty()`, also returns // an instance of `dojo.Animation` but begins the animation // immediately, unlike nearly every other Dojo animation API. // description: // `dojo.anim` is a simpler (but somewhat less powerful) version // of `dojo.animateProperty`. It uses defaults for many basic properties // and allows for positional parameters to be used in place of the // packed "property bag" which is used for other Dojo animation // methods. // // The `dojo.Animation` object returned from `dojo.anim` will be // already playing when it is returned from this function, so // calling play() on it again is (usually) a no-op. // node: // a DOM node or the id of a node to animate CSS properties on // duration: // The number of milliseconds over which the animation // should run. Defaults to the global animation default duration // (350ms). // easing: // An easing function over which to calculate acceleration // and deceleration of the animation through its duration. // A default easing algorithm is provided, but you may // plug in any you wish. A large selection of easing algorithms // are available in `dojo.fx.easing`. // onEnd: // A function to be called when the animation finishes // running. // delay: // The number of milliseconds to delay beginning the // animation by. The default is 0. // example: // Fade out a node // | dojo.anim("id", { opacity: 0 }); // example: // Fade out a node over a full second // | dojo.anim("id", { opacity: 0 }, 1000); return d.animateProperty({ // dojo.Animation node: node, duration: duration || d.Animation.prototype.duration, properties: properties, easing: easing, onEnd: onEnd }).play(delay || 0); }; })(); } if(!dojo._hasResource["dojo._base.browser"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojo._base.browser"] = true; dojo.provide("dojo._base.browser"); //Need this to be the last code segment in base, so do not place any //dojo.requireIf calls in this file. Otherwise, due to how the build system //puts all requireIf dependencies after the current file, the require calls //could be called before all of base is defined. dojo.forEach(dojo.config.require, function(i){ dojo["require"](i); }); } /* Copyright (c) 2004-2010, The Dojo Foundation All Rights Reserved. Available via Academic Free License >= 2.1 OR the modified BSD license. see: http://dojotoolkit.org/license for details */ if(!dojo._hasResource["dojox.io.windowName"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojox.io.windowName"] = true; dojo.provide("dojox.io.windowName"); // Implements the window.name transport dojox.io.windowName = { send: function(/*String*/ method, /*dojo.__IoArgs*/ args){ // summary: // Provides secure cross-domain request capability. // Sends a request using an iframe (POST or GET) and reads the response through the // frame's window.name. // // method: // The method to use to send the request, GET or POST // // args: // See dojo.xhr // // args.authElement: DOMNode? // By providing an authElement, this indicates that windowName should use the // authorized window.name protocol, relying on // the loaded XD resource to return to the provided return URL on completion // of authorization/authentication. The provided authElement will be used to place // the iframe in, so the user can interact with the server resource for authentication // and/or authorization to access the resource. // // args.onAuthLoad: Function? // When using authorized access to resources, this function will be called when the // authorization page has been loaded. (When authorization is actually completed, // the deferred callback function is called with the result). The primary use for this // is to make the authElement visible to the user once the resource has loaded // (this can be preferable to showing the iframe while the resource is loading // since it may not require authorization, it may simply return the resource). // // description: // In order to provide a windowname transport accessible resources/web services, a server // should check for the presence of a parameter window.name=true and if a request includes // such a parameter, it should respond to the request with an HTML // document that sets it's window.name to the string that is to be // delivered to the client. For example, if a client makes a window.name request like: // | http://othersite.com/greeting?windowname=true // And server wants to respond to the client with "Hello", it should return an html page: // | <html><script type="text/javascript"> // | window.name="Hello"; // | </script></html> // One can provide XML or JSON data by simply quoting the data as a string, and parsing the data // on the client. // If you use the authorization window.name protocol, the requester should include an // authElement element in the args, and a request will be created like: // | http://othersite.com/greeting?windowname=auth // And the server can respond like this: // | <html><script type="text/javascript"> // | var loc = window.name; // | authorizationButton.onclick = function(){ // | window.name="Hello"; // | location = loc; // | }; // | </script></html> // When using windowName from a XD Dojo build, make sure to set the // dojo.dojoBlankHtmlUrl property to a local URL. args.url += (args.url.match(/\?/) ? '&' : '?') + "windowname=" + (args.authElement ? "auth" : true); // indicate our desire for window.name communication var authElement = args.authElement; var cleanup = function(result){ try{ // we have to do this to stop the wait cursor in FF var innerDoc = dfd.ioArgs.frame.contentWindow.document; innerDoc.write(" "); innerDoc.close(); }catch(e){} (authElement || dojo.body()).removeChild(dfd.ioArgs.outerFrame); // clean up return result; } var dfd = dojo._ioSetArgs(args,cleanup,cleanup,cleanup); if(args.timeout){ setTimeout(function(){ if(dfd.fired == -1){ dfd.callback(new Error("Timeout")); } }, args.timeout ); } var self = dojox.io.windowName; if(dojo.body()){ // the DOM is ready self._send(dfd, method, authElement, args.onAuthLoad); }else{ // we will wait for the DOM to be ready to proceed dojo.addOnLoad(function(){ self._send(dfd, method, authElement, args.onAuthLoad); }); } return dfd; }, _send: function(dfd, method, authTarget, onAuthLoad){ var ioArgs = dfd.ioArgs; var frameNum = dojox.io.windowName._frameNum++; var sameDomainUrl = (dojo.config.dojoBlankHtmlUrl||dojo.config.dojoCallbackUrl||dojo.moduleUrl("dojo", "resources/blank.html")) + "#" + frameNum; var frameName = new dojo._Url(window.location, sameDomainUrl); var doc = dojo.doc; var frameContainer = authTarget || dojo.body(); function styleFrame(frame){ frame.style.width="100%"; frame.style.height="100%"; frame.style.border="0px"; } if(dojo.isMoz && ![].reduce){ // FF2 allows unsafe sibling frame modification, // the fix for this is to create nested frames with getters and setters to protect access var outerFrame = doc.createElement("iframe"); styleFrame(outerFrame); if(!authTarget){ outerFrame.style.display='none'; } frameContainer.appendChild(outerFrame); var firstWindow = outerFrame.contentWindow; doc = firstWindow.document; doc.write("<html><body margin='0px'><iframe style='width:100%;height:100%;border:0px' name='protectedFrame'></iframe></body></html>"); doc.close(); var secondWindow = firstWindow[0]; firstWindow.__defineGetter__(0,function(){}); firstWindow.__defineGetter__("protectedFrame",function(){}); doc = secondWindow.document; doc.write("<html><body margin='0px'></body></html>"); doc.close(); frameContainer = doc.body; } /** OWF PATCH to fix createElement usage for IE9 standards mode **/ //var frame = ioArgs.frame = frame = doc.createElement(dojo.isIE ? '<iframe src="init_src.html" name="' + frameName + '" onload="'+dojox._scopeName+'.io.windowName['+frameNum+']()">' : 'iframe'); var frame = ioArgs.frame = frame = doc.createElement(dojo.isIE < 9 ? '<iframe src="init_src.html" name="' + frameName + '" onload="'+dojox._scopeName+'.io.windowName['+frameNum+']()">' : 'iframe'); styleFrame(frame); ioArgs.outerFrame = outerFrame = outerFrame || frame; if(!authTarget){ outerFrame.style.display='none'; } var state = 0; function getData(){ var data = frame.contentWindow.name; if(typeof data == 'string'){ if(data != frameName){ state = 2; // we are done now dfd.ioArgs.hash = frame.contentWindow.location.hash; dfd.callback(data); } } } dojox.io.windowName[frameNum] = frame.onload = function(){ try{ if(frame.contentWindow.location =='about:blank'){ // opera and safari will do an onload for about:blank first, we can ignore this first onload return; } }catch(e){ // if we are in the target domain, frame.contentWindow.location will throw an ignorable error } if(!state){ // we have loaded the target resource, now time to navigate back to our domain so we can read the frame name state=1; if(authTarget){ // call the callback so it can make it visible if(onAuthLoad){ onAuthLoad(); } }else{ // we are doing a synchronous capture, go directly to our same domain URL and retrieve the resource frame.contentWindow.location = sameDomainUrl; } } // back to our domain, we should be able to access the frame name now try{ if(state<2){ getData(); } } catch(e){ } }; frame.name = frameName; if(method.match(/GET/i)){ // if it is a GET we can just the iframe our src url dojo._ioAddQueryToUrl(ioArgs); frame.src = ioArgs.url; frameContainer.appendChild(frame); // Commented these lines out for OWF-3131: REST calls get aborted when Marketplace widget is used in OWF. // if(frame.contentWindow){ // frame.contentWindow.location.replace(ioArgs.url); // } }else if(method.match(/POST/i)){ // if it is a POST we will build a form to post it frameContainer.appendChild(frame); var form = dojo.doc.createElement("form"); dojo.body().appendChild(form); var query = dojo.queryToObject(ioArgs.query); for(var i in query){ var values = query[i]; values = values instanceof Array ? values : [values]; for(var j = 0; j < values.length; j++){ // create hidden inputs for all the parameters var input = doc.createElement("input"); input.type = 'hidden'; input.name = i; input.value = values[j]; form.appendChild(input); } } form.method = 'POST'; form.action = ioArgs.url; form.target = frameName;// connect the form to the iframe form.submit(); form.parentNode.removeChild(form); }else{ throw new Error("Method " + method + " not supported with the windowName transport"); } if(frame.contentWindow){ frame.contentWindow.name = frameName; // IE likes it afterwards } }, _frameNum: 0 } } if(!dojo._hasResource["dojox.secure.capability"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code. dojo._hasResource["dojox.secure.capability"] = true; dojo.provide("dojox.secure.capability"); dojox.secure.badProps = /^__|^(apply|call|callee|caller|constructor|eval|prototype|this|unwatch|valueOf|watch)$|__$/; dojox.secure.capability = { keywords: ["break", "case", "catch", "const", "continue","debugger", "default", "delete", "do", "else", "enum","false", "finally", "for", "function","if", "in", "instanceof", "new", "null","yield","return", "switch", "throw", "true", "try", "typeof", "var", "void", "while"], validate : function(/*string*/script,/*Array*/safeLibraries,/*Object*/safeGlobals) { // summary: // pass in the text of a script. If it passes and it can be eval'ed, it should be safe. // Note that this does not do full syntax checking, it relies on eval to reject invalid scripts. // There are also known false rejections: // Nesting vars inside blocks will not declare the variable for the outer block // Named functions are not treated as declaration so they are generally not allowed unless the name is declared with a var. // Var declaration that involve multiple comma delimited variable assignments are not accepted // // script: // the script to execute // // safeLibraries: // The safe libraries that can be called (the functions can not be access/modified by the untrusted code, only called) // // safeGlobals: // These globals can be freely interacted with by the untrusted code var keywords = this.keywords; for (var i = 0; i < keywords.length; i++) { safeGlobals[keywords[i]]=true; } var badThis = "|this| keyword in object literal without a Class call"; var blocks = []; // keeps track of the outer references from each inner block if(script.match(/[\u200c-\u200f\u202a-\u202e\u206a-\u206f\uff00-\uffff]/)){ throw new Error("Illegal unicode characters detected"); } if(script.match(/\/\*@cc_on/)){ throw new Error("Conditional compilation token is not allowed"); } script = script.replace(/\\["'\\\/bfnrtu]/g, '@'). // borrows some tricks from json.js // now clear line comments, block comments, regular expressions, and strings. // By doing it all at once, the regular expression uses left to right parsing, and the most // left token is read first. It is also more compact. replace(/\/\/.*|\/\*[\w\W]*?\*\/|\/(\\[\/\\]|[^*\/])(\\.|[^\/\n\\])*\/[gim]*|("[^"]*")|('[^']*')/g,function(t) { return t.match(/^\/\/|^\/\*/) ? ' ' : '0'; // comments are replaced with a space, strings and regex are replaced with a single safe token (0) }). replace(/\.\s*([a-z\$_A-Z][\w\$_]*)|([;,{])\s*([a-z\$_A-Z][\w\$_]*\s*):/g,function(t,prop,prefix,key) { // find all the dot property references, all the object literal keys, and labels prop = prop || key; if(/^__|^(apply|call|callee|caller|constructor|eval|prototype|this|unwatch|valueOf|watch)$|__$/.test(prop)){ throw new Error("Illegal property name " + prop); } return (prefix && (prefix + "0:")) || '~'; // replace literal keys with 0: and replace properties with the innocuous ~ }); script.replace(/([^\[][\]\}]\s*=)|((\Wreturn|\S)\s*\[\s*\+?)|([^=!][=!]=[^=])/g,function(oper) {// check for illegal operator usages if(!oper.match(/((\Wreturn|[=\&\|\:\?\,])\s*\[)|\[\s*\+$/)){ // the whitelist for [ operator for array initializer context or [+num] syntax throw new Error("Illegal operator " + oper.substring(1)); } }); script = script.replace(new RegExp("(" + safeLibraries.join("|") + ")[\\s~]*\\(","g"),function(call) { // find library calls and make them look safe return "new("; // turn into a known safe call }); function findOuterRefs(block,func) { var outerRefs = {}; block.replace(/#\d+/g,function(b) { // graft in the outer references from the inner scopes var refs = blocks[b.substring(1)]; for (var i in refs) { if(i == badThis) { throw i; } if(i == 'this' && refs[':method'] && refs['this'] == 1) { // if we are in an object literal the function may be a bindable method, this must only be in the local scope i = badThis; } if(i != ':method'){ outerRefs[i] = 2; // the reference is more than just local } } }); block.replace(/(\W|^)([a-z_\$A-Z][\w_\$]*)/g,function(t,a,identifier) { // find all the identifiers if(identifier.charAt(0)=='_'){ throw new Error("Names may not start with _"); } outerRefs[identifier] = 1; }); return outerRefs; } var newScript,outerRefs; function parseBlock(t,func,a,b,params,block) { block.replace(/(^|,)0:\s*function#(\d+)/g,function(t,a,b) { // find functions in object literals // note that if named functions are allowed, it could be possible to have label: function name() {} which is a security breach var refs = blocks[b]; refs[':method'] = 1;//mark it as a method }); block = block.replace(/(^|[^_\w\$])Class\s*\(\s*([_\w\$]+\s*,\s*)*#(\d+)/g,function(t,p,a,b) { // find Class calls var refs = blocks[b]; delete refs[badThis]; return (p||'') + (a||'') + "#" + b; }); outerRefs = findOuterRefs(block,func); // find the variables in this block function parseVars(t,a,b,decl) { // find var decls decl.replace(/,?([a-z\$A-Z][_\w\$]*)/g,function(t,identifier) { if(identifier == 'Class'){ throw new Error("Class is reserved"); } delete outerRefs[identifier]; // outer reference is safely referenced here }); } if(func) { parseVars(t,a,a,params); // the parameters are declare variables } block.replace(/(\W|^)(var) ([ \t,_\w\$]+)/g,parseVars); // and vars declare variables // FIXME: Give named functions #name syntax so they can be detected as vars in outer scopes (but be careful of nesting) return (a || '') + (b || '') + "#" + (blocks.push(outerRefs)-1); // return a block reference so the outer block can fetch it } do { // get all the blocks, starting with inside and moving out, capturing the parameters of functions and catchs as variables along the way newScript = script.replace(/((function|catch)(\s+[_\w\$]+)?\s*\(([^\)]*)\)\s*)?{([^{}]*)}/g, parseBlock); } while(newScript != script && (script = newScript)); // keep going until we can't find anymore blocks parseBlock(0,0,0,0,0,script); //findOuterRefs(script); // find the references in the outside scope for (i in outerRefs) { if(!(i in safeGlobals)) { throw new Error("Illegal reference to " + i); } } } }; } //INSERT dojo.i18n._preloadLocalizations HERE //Check if document already complete, and if so, just trigger page load //listeners. NOTE: does not work with Firefox before 3.6. To support //those browsers, set djConfig.afterOnLoad = true when you know Dojo is added //after page load. Using a timeout so the rest of this //script gets evaluated properly. This work needs to happen after the //dojo.config.require work done in dojo._base. if(dojo.isBrowser && (document.readyState === "complete" || dojo.config.afterOnLoad)){ window.setTimeout(dojo._loadInit, 100); } })();
NextCenturyCorporation/neon-gtd
client/assets/vendor/dojo-1.5.0-windowname-only/dojo/owfdojo.js.uncompressed.js
JavaScript
apache-2.0
393,124
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import junit.framework.TestCase; class ProtobufTest extends TestCase { // So we don't have to add byte casts within hard-coded arrays. protected static byte[] asBytes(int[] ints) { byte[] bytes = new byte[ints.length]; for (int i = 0; i < ints.length; i++) { bytes[i] = (byte) ints[i]; } return bytes; } protected String byteArrayAsString(byte[] bytes) { StringBuilder sb = new StringBuilder("{ "); for (int i = 0; i < bytes.length; i++) { if (i > 0) { sb.append(", "); } sb.append(String.format("0x%02X", bytes[i])); } sb.append(" }"); return sb.toString(); } protected void failBytesCheck(byte[] expected, byte[] actual) { if (expected.length < 1000) { fail("Bytes don't match. Expected " + byteArrayAsString(expected) + " but was " + byteArrayAsString(actual)); } else { fail("Bytes don't match."); } } protected void checkBytes(byte[] expected, byte[] actual) { if (expected.length != actual.length) { failBytesCheck(expected, actual); } for (int i = 0; i < expected.length; i++) { if (expected[i] != actual[i]) { failBytesCheck(expected, actual); } } } }
google/j2objc
protobuf/tests/ProtobufTest.java
Java
apache-2.0
1,787
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudsearch.model; import java.io.Serializable; /** * <p> * A response message that contains the stemming options for a search domain. * </p> */ @Deprecated public class DescribeStemmingOptionsResult implements Serializable { /** * The stemming options configured for this search domain and the current * status of those options. */ private StemmingOptionsStatus stems; /** * The stemming options configured for this search domain and the current * status of those options. * * @return The stemming options configured for this search domain and the current * status of those options. */ public StemmingOptionsStatus getStems() { return stems; } /** * The stemming options configured for this search domain and the current * status of those options. * * @param stems The stemming options configured for this search domain and the current * status of those options. */ public void setStems(StemmingOptionsStatus stems) { this.stems = stems; } /** * The stemming options configured for this search domain and the current * status of those options. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param stems The stemming options configured for this search domain and the current * status of those options. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeStemmingOptionsResult withStems(StemmingOptionsStatus stems) { this.stems = stems; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getStems() != null) sb.append("Stems: " + getStems() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getStems() == null) ? 0 : getStems().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeStemmingOptionsResult == false) return false; DescribeStemmingOptionsResult other = (DescribeStemmingOptionsResult)obj; if (other.getStems() == null ^ this.getStems() == null) return false; if (other.getStems() != null && other.getStems().equals(this.getStems()) == false) return false; return true; } }
dimdung/aws-sdk-java
aws-java-sdk-cloudsearch/src/main/java/com/amazonaws/services/cloudsearch/model/DescribeStemmingOptionsResult.java
Java
apache-2.0
3,568