gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * The MIT License (MIT) * Copyright (c) 2015 Berner Fachhochschule (BFH) - www.bfh.ch * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software * and associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, * sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package eu.focusnet.app.model; import android.support.annotation.NonNull; import java.util.ArrayList; import java.util.Collections; import java.util.concurrent.Callable; import java.util.concurrent.Future; import eu.focusnet.app.controller.PriorityTask; import eu.focusnet.app.model.gson.ProjectTemplate; import eu.focusnet.app.util.Constant; import eu.focusnet.app.util.FocusBadTypeException; import eu.focusnet.app.util.FocusMissingResourceException; /** * This object instantiates a project, out of a {@link ProjectTemplate}. */ public class ProjectInstance extends AbstractInstance implements IterableInstance { /** * Unique identifier for the project. If an iterator is defined for this project, it will be * altered such that we are able to distinguish between the different versions of the project. * <p/> * See {@link eu.focusnet.app.util.Constant.Navigation}, * {@link ProjectInstance#createProjects(ArrayList, DataContext)} * and {@link #fillWithAcquiredData()} */ private String guid; /** * The title of the project. */ private String title; /** * The description of the project */ private String description; /** * The list of {@link PageInstance}s in the "dashboards" library of the project. */ private ArrayList<PageInstance> dashboards; /** * The list of {@link PageInstance}s in the "tools" library of the project. */ private ArrayList<PageInstance> tools; /** * The list of inner {@link ProjectInstance}s of the project: a project can contain projects. */ private ArrayList<ProjectInstance> projects; /** * The template used to build the current project instance. */ private ProjectTemplate template; /** * Tells whether this project is disabled and should consequently not be accessible. */ private boolean disabled; /** * C'tor * * @param projectTemplate Template to use to contruct this instance. * @param dataContext The {@link DataContext} of this instance */ public ProjectInstance(ProjectTemplate projectTemplate, @NonNull DataContext dataContext) { super(dataContext.getDataManager()); this.template = projectTemplate; this.dataContext = dataContext; this.guid = null; this.dashboards = new ArrayList<>(); this.tools = new ArrayList<>(); this.projects = new ArrayList<>(); this.disabled = this.template.isDisabled(); this.build(); } /** * Factory function for creating a set of projects out of a template. * * @param projectTemplates The template used to build the current project instances. * @param parentContext Parent context on the top of which we will define a new {@link DataContext} for created instances * @return A list of new {@link ProjectInstance}s */ public static ArrayList<ProjectInstance> createProjects(ArrayList<ProjectTemplate> projectTemplates, DataContext parentContext) { ArrayList<ProjectInstance> projInstancesTemp = new ArrayList<>(); for (ProjectTemplate projTpl : projectTemplates) { // Iterators use application-level data context list of urls // we cannot postpone fetching these ones. Let's do it now. if (projTpl.getIterator() != null) { ArrayList<String> urls; try { urls = parentContext.resolveToArrayOfUrls(projTpl.getIterator()); } catch (FocusMissingResourceException | FocusBadTypeException e) { // Resource not found or invalid iterator. // continue silently continue; } ArrayList<DataContext> contexts = new ArrayList<>(); for (String url : urls) { DataContext newCtx = new DataContext(parentContext); newCtx.registerIterator(projTpl.getGuid(), url); contexts.add(newCtx); } for (DataContext newCtx : contexts) { // the guid is adapted in the ProjectInstance constructor ProjectInstance p = new ProjectInstance(projTpl, newCtx); projInstancesTemp.add(p); } } else { DataContext newCtx = new DataContext(parentContext); ProjectInstance p = new ProjectInstance(projTpl, newCtx); projInstancesTemp.add(p); } } // fill projects with real data // this is done as parallel tasks // See similar logic in PageInstance for (ProjectInstance pi : projInstancesTemp) { pi.fillWithAcquiredData(); } return projInstancesTemp; } /** * Build the PageInstance's for this project. */ private void build() { // register the project-specific data to our data context // provide data called AFTER projects set iterator this.dataContext.provideData(this.template.getData()); // I don't wait for iterator here, and that's not good. SO iterators should ALWAYS have higher priority! FIXME if (this.description == null) { this.description = ""; } // build the content of the project this.dashboards = PageInstance.createPageInstances(this.template, PageInstance.PageType.DASHBOARD, this.dataContext); this.tools = PageInstance.createPageInstances(this.template, PageInstance.PageType.TOOL, this.dataContext); this.projects = ProjectInstance.createProjects(this.template.getProjects(), this.dataContext); } /** * Check that all the content is valid, and if this is not the case, mark this project instance * as invalid. * <p/> * This method is intended to be called after the full application content has been built. */ public void checkValidity() { boolean isValid = true; for (PageInstance pi : this.dashboards) { isValid &= pi.isValid(); } for (PageInstance pi : this.tools) { isValid &= pi.isValid(); } for (ProjectInstance pi : this.projects) { pi.checkValidity(); isValid &= pi.isValid(); } if (!isValid) { this.markAsInvalid(); } } /** * Fill instance with data that have been acquired via {@link DataContext#register(String, String)} * * @return a {@code Future} on which we may listen to know if the operation is finished. */ private Future fillWithAcquiredData() { // post-pone setting information after having fetched all resources related to this object Callable todo = new Callable() { @Override public Boolean call() throws Exception { guid = template.getGuid(); if (template.getIterator() != null) { guid = guid + Constant.Navigation.PATH_SELECTOR_OPEN + dataContext.getIteratorValue() + Constant.Navigation.PATH_SELECTOR_CLOSE; } try { title = dataContext.resolveToString(template.getTitle()); description = dataContext.resolveToString(template.getDescription()); } catch (FocusMissingResourceException | FocusBadTypeException ex) { // silent skipping return false; } freeDataContext(); return true; } }; // priority: just a little bit less than the current data context priority, such that is executed // just after all data from the data context have been retrieved PriorityTask<Object> future = new PriorityTask<>(this.getDataContext().getPriority() - Constant.AppConfig.PRIORITY_SMALL_DELTA, todo); this.dataManager.executeOnAppBuilderPool(future); return future; } /** * Get this project title * * @return The title */ public String getTitle() { return this.title; } /** * Get this project description * * @return The description or the empty String */ public String getDescription() { return this.description; } /** * Get the list of dashboards pages * * @return A list of {@link PageInstance}s */ public ArrayList<PageInstance> getDashboards() { return this.dashboards; } /** * Get the list of tools pages * * @return A list of {@link PageInstance}s */ public ArrayList<PageInstance> getTools() { return this.tools; } /** * Return the application projects instances. * * @return A list of {@link ProjectInstance}s. */ public ArrayList<ProjectInstance> getProjects() { return this.projects; } /** * Inherited. * * @param searchedPath The path to look after. * @return Inherited. */ @Override protected AbstractInstance propagatePathLookup(String searchedPath) { ArrayList<ArrayList> sources = new ArrayList<>(); sources.add(this.dashboards); sources.add(this.tools); sources.add(this.projects); for (ArrayList<AbstractInstance> array : sources) { for (AbstractInstance i : array) { AbstractInstance ret = i.lookupByPath(searchedPath); if (ret != null) { return ret; } } } return null; } /** * Inherited. * * @param parentPath The parent path on the top of which the new path must be defined. */ @Override public void buildPaths(String parentPath) { this.path = parentPath + Constant.Navigation.PATH_SEPARATOR + this.guid; ArrayList<ArrayList> sources = new ArrayList<>(); sources.add(this.dashboards); sources.add(this.tools); sources.add(this.projects); for (ArrayList<AbstractInstance> ar : sources) { for (AbstractInstance i : ar) { i.buildPaths(this.path); } } } /** * Tells whether the current instance is disabled. * * @return {@code true} if this is the case, {@code false} otherwise. */ public boolean isDisabled() { return this.disabled; } /** * Reorder list elements in this instance. This must be done after application content * instance construction because the title is not known until then. */ public void reorderListEelments() { Collections.sort(this.dashboards, PageInstance.getComparator()); Collections.sort(this.tools, PageInstance.getComparator()); Collections.sort(this.projects, ProjectInstance.getComparator()); for (ProjectInstance p : this.projects) { p.reorderListEelments(); } } }
/* * Copyright 2013 Erlend Hamnaberg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.hamnaberg.funclite; import java.util.Iterator; import java.util.Objects; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; public abstract class Optional<A> implements Iterable<A> { public static None<Object> NONE = new None<Object>(); Optional() { } public abstract A get(); public abstract boolean isSome(); @Override public abstract int hashCode(); @Override public abstract boolean equals(Object obj); public final boolean isNone() { return !isSome(); } public boolean isPresent() { return isSome(); } public boolean isEmpty() { return isNone(); } public final <B> Optional<B> map(Function<A, B> f) { if (isNone()) { return none(); } else { return new Some<>(f.apply(get())); } } public final <B> Optional<B> flatMap(Function<A, Optional<B>> f) { if (isNone()) { return none(); } else { return Objects.requireNonNull(f.apply(get()), "Optional.flatMap produced null"); } } public Stream<A> stream() { return CollectionOps.stream(this); } public final void foreach(Consumer<A> e) { CollectionOps.foreach(this, e); } public final Optional<A> filter(Predicate<A> input) { if (isSome() && input.test(get())) { return this; } else { return none(); } } public boolean forall(Predicate<A> input) { return CollectionOps.forall(this, input); } public boolean exists(Predicate<A> input) { return CollectionOps.exists(this, input); } public boolean contains(A value) { return exists(v -> v.equals(value)); } public static <A> Optional<A> fromNullable(A value) { return value != null ? some(value) : Optional.<A>none(); } public static <A> Optional<A> ofNullable(A value) { return fromNullable(value); } public static <A> Optional<A> some(A value) { return new Some<A>(Objects.requireNonNull(value)); } public static <A> Optional<A> of(A value) { return new Some<A>(Objects.requireNonNull(value)); } @SuppressWarnings("unchecked") public static <A> Optional<A> none() { return (Optional<A>) NONE; } @SuppressWarnings("unchecked") public static <A> Optional<A> empty() { return none(); } public A orNull() { return isSome() ? get() : null; } public A getOrElse(A orElse) { return isSome() ? get() : orElse; } public A getOrElse(Supplier<A> orElse) { return isSome() ? get() : orElse.get(); } public Optional<A> or(Optional<A> orElse) { return isSome() ? this : orElse; } public <B> B fold(Supplier<B> noneF, Function<A, B> someF) { return isNone() ? noneF.get() : someF.apply(get()); } @Override public final Iterator<A> iterator() { return new Iterator<A>() { private volatile boolean used = false; @Override public boolean hasNext() { return !used && isSome(); } @Override public A next() { A value = get(); used = true; return value; } @Override public void remove() { throw new UnsupportedOperationException("Not supported"); } }; } } final class Some<A> extends Optional<A> { private final A value; Some(A value) { this.value = value; } @Override public A get() { return value; } @Override public boolean isSome() { return true; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Some some = (Some) o; if (value != null ? !value.equals(some.value) : some.value != null) return false; return true; } @Override public int hashCode() { return value != null ? value.hashCode() : 0; } @Override public String toString() { return String.format("Some{%s}", value); } } final class None<A> extends Optional<A> { @Override public A get() { throw new UnsupportedOperationException("Cannot get from None"); } @Override public boolean equals(Object obj) { return obj instanceof None; } @Override public int hashCode() { return 31; } @Override public boolean isSome() { return false; } @Override public String toString() { return "None"; } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.media.router.cast; import android.content.Context; import android.os.Bundle; import com.google.android.gms.cast.ApplicationMetadata; import com.google.android.gms.cast.Cast; import com.google.android.gms.cast.CastStatusCodes; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.PendingResult; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.api.Status; import org.chromium.base.Log; import org.chromium.chrome.browser.media.router.ChromeMediaRouter; import org.chromium.chrome.browser.media.router.MediaRoute; /** * Establishes a {@link MediaRoute} by starting a Cast application represented by the given * presentation URL. Reports success or failure to {@link ChromeMediaRouter}. * Since there're numerous asynchronous calls involved in getting the application to launch * the class is implemented as a state machine. */ public class CreateRouteRequest implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener, ResultCallback<Cast.ApplicationConnectionResult> { private static final String TAG = "MediaRouter"; private static final int STATE_IDLE = 0; private static final int STATE_CONNECTING_TO_API = 1; private static final int STATE_API_CONNECTION_SUSPENDED = 2; private static final int STATE_LAUNCHING_APPLICATION = 3; private static final int STATE_LAUNCH_SUCCEEDED = 4; private static final int STATE_TERMINATED = 5; private static final String ERROR_NEW_ROUTE_LAUNCH_APPLICATION_FAILED = "Launch application failed: %s, %s"; private static final String ERROR_NEW_ROUTE_LAUNCH_APPLICATION_FAILED_STATUS = "Launch application failed with status: %s, %d, %s"; private static final String ERROR_NEW_ROUTE_CLIENT_CONNECTION_FAILED = "GoogleApiClient connection failed: %d, %b"; private class CastListener extends Cast.Listener { private CastSession mSession; CastListener() {} void setSession(CastSession session) { mSession = session; } @Override public void onApplicationStatusChanged() { if (mSession == null) return; mSession.updateSessionStatus(); } @Override public void onApplicationMetadataChanged(ApplicationMetadata metadata) { if (mSession == null) return; mSession.updateSessionStatus(); } @Override public void onApplicationDisconnected(int errorCode) { if (errorCode != CastStatusCodes.SUCCESS) { Log.e(TAG, String.format( "Application disconnected with: %d", errorCode)); } // This callback can be called more than once if the application is stopped from Chrome. if (mSession == null) return; mSession.stopApplication(); mSession = null; } @Override public void onVolumeChanged() { if (mSession == null) return; mSession.onVolumeChanged(); } } private final MediaSource mSource; private final MediaSink mSink; private final String mPresentationId; private final String mOrigin; private final int mTabId; private final boolean mIsIncognito; private final int mRequestId; private final CastMediaRouteProvider mRouteProvider; private final CastListener mCastListener = new CastListener(); private GoogleApiClient mApiClient; private int mState = STATE_IDLE; /** * Initializes the request. * @param source The {@link MediaSource} defining the application to launch on the Cast device. * @param sink The {@link MediaSink} identifying the selected Cast device. * @param presentationId The presentation id assigned to the route by {@link ChromeMediaRouter}. * @param origin The origin of the frame requesting the route. * @param tabId The id of the tab containing the frame requesting the route. * @param isIncognito Whether the route is being requested from an Incognito profile. * @param requestId The id of the route creation request for tracking by * {@link ChromeMediaRouter}. * @param routeProvider The instance of {@link CastMediaRouteProvider} handling the request. */ public CreateRouteRequest( MediaSource source, MediaSink sink, String presentationId, String origin, int tabId, boolean isIncognito, int requestId, CastMediaRouteProvider routeProvider) { assert source != null; assert sink != null; mSource = source; mSink = sink; mPresentationId = presentationId; mOrigin = origin; mTabId = tabId; mIsIncognito = isIncognito; mRequestId = requestId; mRouteProvider = routeProvider; } public MediaSource getSource() { return mSource; } public MediaSink getSink() { return mSink; } public String getPresentationId() { return mPresentationId; } public String getOrigin() { return mOrigin; } public int getTabId() { return mTabId; } public boolean isIncognito() { return mIsIncognito; } public int getNativeRequestId() { return mRequestId; } /** * Starts the process of launching the application on the Cast device. * @param applicationContext application context * implementation provided by the caller. */ public void start(Context applicationContext) { assert applicationContext != null; if (mState != STATE_IDLE) throwInvalidState(); mApiClient = createApiClient(mCastListener, applicationContext); mApiClient.connect(); mState = STATE_CONNECTING_TO_API; } @Override public void onConnected(Bundle connectionHint) { if (mState != STATE_CONNECTING_TO_API && mState != STATE_API_CONNECTION_SUSPENDED) { throwInvalidState(); } // TODO(avayvod): switch to using ConnectedTask class for GoogleApiClient operations. // See https://crbug.com/522478 if (mState == STATE_API_CONNECTION_SUSPENDED) return; try { launchApplication(mApiClient, mSource.getApplicationId(), false) .setResultCallback(this); mState = STATE_LAUNCHING_APPLICATION; } catch (Exception e) { reportError(String.format(ERROR_NEW_ROUTE_LAUNCH_APPLICATION_FAILED, mSource.getApplicationId(), e)); } } // TODO(avayvod): switch to using ConnectedTask class for GoogleApiClient operations. // See https://crbug.com/522478 @Override public void onConnectionSuspended(int cause) { mState = STATE_API_CONNECTION_SUSPENDED; } @Override public void onResult(Cast.ApplicationConnectionResult result) { if (mState != STATE_LAUNCHING_APPLICATION && mState != STATE_API_CONNECTION_SUSPENDED) { throwInvalidState(); } Status status = result.getStatus(); if (!status.isSuccess()) { reportError(String.format( ERROR_NEW_ROUTE_LAUNCH_APPLICATION_FAILED_STATUS, mSource.getApplicationId(), status.getStatusCode(), status.getStatusMessage())); } mState = STATE_LAUNCH_SUCCEEDED; reportSuccess(result); } // TODO(avayvod): switch to using ConnectedTask class for GoogleApiClient operations. // See https://crbug.com/522478 @Override public void onConnectionFailed(ConnectionResult result) { if (mState != STATE_CONNECTING_TO_API) throwInvalidState(); reportError(String.format( ERROR_NEW_ROUTE_CLIENT_CONNECTION_FAILED, result.getErrorCode(), result.hasResolution())); } private GoogleApiClient createApiClient(Cast.Listener listener, Context context) { Cast.CastOptions.Builder apiOptionsBuilder = Cast.CastOptions .builder(mSink.getDevice(), listener) // TODO(avayvod): hide this behind the flag or remove .setVerboseLoggingEnabled(true); return new GoogleApiClient.Builder(context) .addApi(Cast.API, apiOptionsBuilder.build()) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .build(); } private PendingResult<Cast.ApplicationConnectionResult> launchApplication( GoogleApiClient apiClient, String appId, boolean relaunchIfRunning) { return Cast.CastApi.launchApplication(apiClient, appId, relaunchIfRunning); } private void throwInvalidState() { throw new RuntimeException(String.format("Invalid state: %d", mState)); } private void reportSuccess(Cast.ApplicationConnectionResult result) { if (mState != STATE_LAUNCH_SUCCEEDED) throwInvalidState(); CastSession session = new CastSessionImpl( mApiClient, result.getSessionId(), result.getApplicationMetadata(), result.getApplicationStatus(), mSink.getDevice(), mOrigin, mTabId, mIsIncognito, mSource, mRouteProvider); mCastListener.setSession(session); mRouteProvider.onSessionCreated(session); terminate(); } private void reportError(String message) { if (mState == STATE_TERMINATED) throwInvalidState(); assert mRouteProvider != null; mRouteProvider.onRouteRequestError(message, mRequestId); terminate(); } private void terminate() { mApiClient.unregisterConnectionCallbacks(this); mApiClient.unregisterConnectionFailedListener(this); mState = STATE_TERMINATED; } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2022 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.registry; import org.eclipse.equinox.security.storage.ISecurePreferences; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBPDataSourceContainer; import org.jkiss.dbeaver.model.DBPDataSourceFolder; import org.jkiss.dbeaver.model.DBPDataSourceProvider; import org.jkiss.dbeaver.model.DBPInformationProvider; import org.jkiss.dbeaver.model.app.DBASecureStorage; import org.jkiss.dbeaver.model.app.DBPDataSourceRegistry; import org.jkiss.dbeaver.model.app.DBPProject; import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration; import org.jkiss.dbeaver.model.connection.DBPDriver; import org.jkiss.dbeaver.model.net.DBWHandlerConfiguration; import org.jkiss.dbeaver.model.net.DBWHandlerType; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.dbeaver.utils.GeneralUtils; import org.jkiss.utils.CommonUtils; import java.util.HashMap; import java.util.Map; /** * Connection spec utils. */ public class DataSourceUtils { public static final String PARAM_ID = "id"; public static final String PARAM_DRIVER = "driver"; public static final String PARAM_NAME = "name"; public static final String PARAM_URL = "url"; public static final String PARAM_HOST = "host"; public static final String PARAM_PORT = "port"; public static final String PARAM_SERVER = "server"; public static final String PARAM_DATABASE = "database"; public static final String PARAM_USER = "user"; private static final String PARAM_PASSWORD = "password"; private static final String PARAM_SAVE_PASSWORD = "savePassword"; private static final String PARAM_AUTH_MODEL = "auth"; private static final String PARAM_SHOW_SYSTEM_OBJECTS = "showSystemObjects"; private static final String PARAM_SHOW_UTILITY_OBJECTS = "showUtilityObjects"; private static final String PARAM_SHOW_ONLY_ENTITIES = "showOnlyEntities"; private static final String PARAM_HIDE_FOLDERS = "hideFolders"; private static final String PARAM_HIDE_SCHEMAS = "hideSchemas"; private static final String PARAM_MERGE_ENTITIES = "mergeEntities"; private static final String PARAM_FOLDER = "folder"; private static final String PARAM_AUTO_COMMIT = "autoCommit"; private static final String PREFIX_HANDLER = "handler."; private static final String PREFIX_PROP = "prop."; private static final String PREFIX_AUTH_PROP = "authProp."; private static final Log log = Log.getLog(DataSourceUtils.class); public static DBPDataSourceContainer getDataSourceBySpec( @NotNull DBPProject project, @NotNull String connectionSpec, @Nullable GeneralUtils.IParameterHandler parameterHandler, boolean searchByParameters, boolean createNewDataSource) { String driverName = null, url = null, host = null, port = null, server = null, database = null, user = null, password = null, authModelId = null; boolean showSystemObjects = false, showUtilityObjects = false, showOnlyEntities = false, hideFolders = false, hideSchemas = false, mergeEntities = false, savePassword = true; Boolean autoCommit = null; Map<String, String> conProperties = new HashMap<>(); Map<String, Map<String, String>> handlerProps = new HashMap<>(); Map<String, String> authProperties = new HashMap<>(); DBPDataSourceFolder folder = null; String dsId = null, dsName = null; DBPDataSourceRegistry dsRegistry = project == null ? null : project.getDataSourceRegistry(); if (dsRegistry == null) { log.debug("No datasource registry for project '" + project.getName() + "'"); return null; } String[] conParams = connectionSpec.split("\\|"); for (String cp : conParams) { int divPos = cp.indexOf('='); if (divPos == -1) { continue; } String paramName = cp.substring(0, divPos); String paramValue = cp.substring(divPos + 1); switch (paramName) { case PARAM_ID: dsId = paramValue; break; case PARAM_DRIVER: driverName = paramValue; break; case PARAM_NAME: dsName = paramValue; break; case PARAM_URL: url = paramValue; break; case PARAM_HOST: host = paramValue; break; case PARAM_PORT: port = paramValue; break; case PARAM_SERVER: server = paramValue; break; case PARAM_DATABASE: database = paramValue; break; case PARAM_USER: user = paramValue; break; case PARAM_PASSWORD: password = paramValue; break; case PARAM_AUTH_MODEL: authModelId = paramValue; break; case PARAM_SAVE_PASSWORD: savePassword = CommonUtils.toBoolean(paramValue); break; case PARAM_SHOW_SYSTEM_OBJECTS: showSystemObjects = CommonUtils.toBoolean(paramValue); break; case PARAM_SHOW_UTILITY_OBJECTS: showUtilityObjects = CommonUtils.toBoolean(paramValue); break; case PARAM_SHOW_ONLY_ENTITIES: showOnlyEntities = CommonUtils.toBoolean(paramValue); break; case PARAM_HIDE_FOLDERS: hideFolders = CommonUtils.toBoolean(paramValue); break; case PARAM_HIDE_SCHEMAS: hideSchemas = CommonUtils.toBoolean(paramValue); break; case PARAM_MERGE_ENTITIES: mergeEntities = CommonUtils.toBoolean(paramValue); break; case PARAM_FOLDER: folder = dsRegistry.getFolder(paramValue); break; case PARAM_AUTO_COMMIT: autoCommit = CommonUtils.toBoolean(paramValue); break; default: boolean handled = false; if (paramName.length() > PREFIX_PROP.length() && paramName.startsWith(PREFIX_PROP)) { paramName = paramName.substring(PREFIX_PROP.length()); conProperties.put(paramName, paramValue); handled = true; } else if (paramName.length() > PREFIX_AUTH_PROP.length() && paramName.startsWith(PREFIX_AUTH_PROP)) { paramName = paramName.substring(PREFIX_AUTH_PROP.length()); authProperties.put(paramName, paramValue); handled = true; } else if (paramName.length() > PREFIX_HANDLER.length() && paramName.startsWith(PREFIX_HANDLER)) { // network handler prop paramName = paramName.substring(PREFIX_HANDLER.length()); divPos = paramName.indexOf('.'); if (divPos == -1) { log.debug("Wrong handler parameter: '" + paramName + "'"); continue; } String handlerId = paramName.substring(0, divPos); paramName = paramName.substring(divPos + 1); Map<String, String> handlerPopMap = handlerProps.computeIfAbsent(handlerId, k -> new HashMap<>()); handlerPopMap.put(paramName, paramValue); handled = true; } else if (parameterHandler != null) { handled = parameterHandler.setParameter(paramName, paramValue); } if (!handled) { log.debug("Unknown connection parameter '" + paramName + "'"); } } } DBPDataSourceContainer dataSource = null; if (dsId != null) { dataSource = dsRegistry.getDataSource(dsId); } if (dsName != null) { dataSource = dsRegistry.findDataSourceByName(dsName); } if (dataSource != null) { DBPConnectionConfiguration connConfig = dataSource.getConnectionConfiguration(); if (!CommonUtils.isEmpty(database)) connConfig.setDatabaseName(database); if (!CommonUtils.isEmpty(user)) connConfig.setUserName(user); if (!CommonUtils.isEmpty(password)) connConfig.setUserPassword(password); if (!CommonUtils.isEmpty(conProperties)) connConfig.setProperties(conProperties); if (!CommonUtils.isEmpty(authProperties)) connConfig.setAuthProperties(authProperties); if (!CommonUtils.isEmpty(authModelId)) connConfig.setAuthModelId(authModelId); return dataSource; } if (searchByParameters) { // Try to find by parameters / handler props if (url != null) { for (DBPDataSourceContainer ds : dsRegistry.getDataSources()) { if (url.equals(ds.getConnectionConfiguration().getUrl())) { if (user == null || user.equals(ds.getConnectionConfiguration().getUserName())) { return ds; } } } } else { for (DBPDataSourceContainer ds : dsRegistry.getDataSources()) { DBPConnectionConfiguration cfg = ds.getConnectionConfiguration(); if (server != null && !server.equals(cfg.getServerName()) || host != null && !host.equals(cfg.getHostName()) || port != null && !port.equals(cfg.getHostPort()) || database != null && !database.equals(cfg.getDatabaseName()) || user != null && !user.equals(cfg.getUserName())) { continue; } boolean matched = true; if (!conProperties.isEmpty()) { for (Map.Entry<String, String> prop : conProperties.entrySet()) { if (!CommonUtils.equalObjects(cfg.getProperty(prop.getKey()), prop.getValue())) { matched = false; break; } } if (!matched) { continue; } } if (!handlerProps.isEmpty()) { for (Map.Entry<String, Map<String, String>> handlerProp : handlerProps.entrySet()) { DBWHandlerConfiguration handler = cfg.getHandler(handlerProp.getKey()); if (handler == null) { matched = false; break; } for (Map.Entry<String, String> prop : handlerProp.getValue().entrySet()) { if (!CommonUtils.equalObjects(handler.getProperty(prop.getKey()), prop.getValue())) { matched = false; break; } } if (!matched) { break; } } if (!matched) { continue; } } return ds; } } } if (!createNewDataSource) { return null; } if (driverName == null) { log.error("Driver name not specified - can't create new datasource"); return null; } DBPDriver driver = DBWorkbench.getPlatform().getDataSourceProviderRegistry().findDriver(driverName); if (driver == null) { log.error("Driver '" + driverName + "' not found"); return null; } // Create new datasource with specified parameters if (dsName == null) { dsName = "Ext: " + driver.getName(); if (database != null) { dsName += " - " + database; } else if (server != null) { dsName += " - " + server; } } DBPConnectionConfiguration connConfig = new DBPConnectionConfiguration(); connConfig.setUrl(url); connConfig.setHostName(host); connConfig.setHostPort(port); connConfig.setServerName(server); connConfig.setDatabaseName(database); connConfig.setUserName(user); connConfig.setUserPassword(password); connConfig.setProperties(conProperties); if (!CommonUtils.isEmpty(authProperties)) { connConfig.setAuthProperties(authProperties); } if (!CommonUtils.isEmpty(authModelId)) { connConfig.setAuthModelId(authModelId); } if (autoCommit != null) { connConfig.getBootstrap().setDefaultAutoCommit(autoCommit); } DBPDataSourceContainer newDS = dsRegistry.createDataSource(driver, connConfig); newDS.setName(dsName); ((DataSourceDescriptor)newDS).setTemporary(true); if (savePassword) { newDS.setSavePassword(true); } if (folder != null) { newDS.setFolder(folder); } DataSourceNavigatorSettings navSettings = ((DataSourceDescriptor)newDS).getNavigatorSettings(); navSettings.setShowSystemObjects(showSystemObjects); navSettings.setShowUtilityObjects(showUtilityObjects); navSettings.setShowOnlyEntities(showOnlyEntities); navSettings.setHideSchemas(hideSchemas); navSettings.setHideFolders(hideFolders); navSettings.setMergeEntities(mergeEntities); //ds.set dsRegistry.addDataSource(newDS); return newDS; } /** * Save secure config in protected storage. * @return true on success (if protected storage is available and configured) */ static boolean saveCredentialsInSecuredStorage( @NotNull DBPProject project, @Nullable DataSourceDescriptor dataSource, @Nullable String subNode, @NotNull SecureCredentials credentials) { final DBASecureStorage secureStorage = project.getSecureStorage(); { try { ISecurePreferences prefNode = dataSource == null ? project.getSecureStorage().getSecurePreferences() : dataSource.getSecurePreferences(); if (!secureStorage.useSecurePreferences()) { prefNode.removeNode(); } else { if (subNode != null) { for (String nodeName : subNode.split("/")) { prefNode = prefNode.node(nodeName); } } prefNode.put("name", dataSource != null ? dataSource.getName() : project.getName(), false); if (!CommonUtils.isEmpty(credentials.getUserName())) { prefNode.put(RegistryConstants.ATTR_USER, credentials.getUserName(), true); } else { prefNode.remove(RegistryConstants.ATTR_USER); } if (!CommonUtils.isEmpty(credentials.getUserPassword())) { prefNode.put(RegistryConstants.ATTR_PASSWORD, credentials.getUserPassword(), true); } else { prefNode.remove(RegistryConstants.ATTR_PASSWORD); } if (!CommonUtils.isEmpty(credentials.getProperties())) { for (Map.Entry<String, String> prop : credentials.getProperties().entrySet()) { prefNode.put(prop.getKey(), prop.getValue(), true); } } return true; } } catch (Throwable e) { log.error("Can't save credentials in secure storage", e); } } return false; } @NotNull public static String getDataSourceAddressText(DBPDataSourceContainer dataSourceContainer) { if (dataSourceContainer.getDriver().isCustomEndpointInformation()) { DBPDataSourceProvider dataSourceProvider = dataSourceContainer.getDriver().getDataSourceProvider(); if (dataSourceProvider instanceof DBPInformationProvider) { String objectInformation = ((DBPInformationProvider) dataSourceProvider).getObjectInformation(dataSourceContainer, DBPInformationProvider.INFO_TARGET_ADDRESS); if (!CommonUtils.isEmpty(objectInformation)) { return objectInformation; } } } DBPConnectionConfiguration cfg = dataSourceContainer.getConnectionConfiguration(); String hostText = getTargetTunnelHostName(cfg); String hostPort = cfg.getHostPort(); if (!CommonUtils.isEmpty(hostPort)) { return hostText + ":" + hostPort; } return hostText; } @NotNull public static String getTargetTunnelHostName(DBPConnectionConfiguration cfg) { String hostText = cfg.getHostName(); // For localhost ry to get real host name from tunnel configuration if (CommonUtils.isEmpty(hostText) || hostText.equals("localhost") || hostText.equals("127.0.0.1")) { for (DBWHandlerConfiguration hc : cfg.getHandlers()) { if (hc.isEnabled() && hc.getType() == DBWHandlerType.TUNNEL) { String tunnelHost = hc.getStringProperty(DBWHandlerConfiguration.PROP_HOST); if (!CommonUtils.isEmpty(tunnelHost)) { hostText = tunnelHost; break; } } } } return CommonUtils.notEmpty(hostText); } }
/* * Copyright 2017 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package flink.process; import com.navercorp.pinpoint.common.server.bo.stat.join.*; import com.navercorp.pinpoint.flink.mapper.thrift.stat.JoinAgentStatBoMapper; import com.navercorp.pinpoint.flink.process.ApplicationCache; import com.navercorp.pinpoint.flink.process.TBaseFlatMapper; import com.navercorp.pinpoint.thrift.dto.flink.*; import org.apache.flink.api.common.functions.util.ListCollector; import org.apache.flink.api.java.tuple.Tuple3; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * @author minwoo.jung */ public class TBaseFlatMapperTest { final static String AGENT_ID = "testAgent"; final static String APPLICATION_ID = "testApplication"; @Test public void flatMapTest() throws Exception { ApplicationCache applicationCache = newMockApplicationCache(); TBaseFlatMapper mapper = new TBaseFlatMapper(new JoinAgentStatBoMapper(), applicationCache); TFAgentStatBatch tfAgentStatBatch = createTFAgentStatBatch(); ArrayList<Tuple3<String, JoinStatBo, Long>> dataList = new ArrayList<>(); ListCollector<Tuple3<String, JoinStatBo, Long>> collector = new ListCollector<>(dataList); mapper.flatMap(tfAgentStatBatch, collector); assertEquals(dataList.size(), 2); Tuple3<String, JoinStatBo, Long> data1 = dataList.get(0); assertEquals(data1.f0, AGENT_ID); assertEquals(data1.f2.longValue(), 1491274143454L); JoinAgentStatBo joinAgentStatBo = (JoinAgentStatBo) data1.f1; assertEquals(joinAgentStatBo.getId(), AGENT_ID); assertEquals(joinAgentStatBo.getAgentStartTimestamp(), 1491274142454L); assertEquals(joinAgentStatBo.getTimestamp(), 1491274143454L); assertJoinCpuLoadBo(joinAgentStatBo.getJoinCpuLoadBoList()); Tuple3<String, JoinStatBo, Long> data2 = dataList.get(1); assertEquals(data2.f0, APPLICATION_ID); assertEquals(data2.f2.longValue(), 1491274140000L); JoinApplicationStatBo joinApplicationStatBo = (JoinApplicationStatBo) data2.f1; assertEquals(joinApplicationStatBo.getId(), APPLICATION_ID); assertEquals(joinApplicationStatBo.getTimestamp(), 1491274140000L); assertEquals(joinApplicationStatBo.getStatType(), StatType.APP_STST); assertJoinCpuLoadBo(joinApplicationStatBo.getJoinCpuLoadBoList()); } private ApplicationCache newMockApplicationCache() { ApplicationCache applicationCache = mock(ApplicationCache.class); when(applicationCache.findApplicationId(any(ApplicationCache.ApplicationKey.class))) .thenReturn(APPLICATION_ID); return applicationCache; } private void assertJoinCpuLoadBo(List<JoinCpuLoadBo> joincpulaodBoList) { assertEquals(2, joincpulaodBoList.size()); JoinCpuLoadBo joinCpuLoadBo = joincpulaodBoList.get(0); assertEquals(joinCpuLoadBo.getId(), AGENT_ID); assertEquals(joinCpuLoadBo.getTimestamp(), 1491274143454L); assertEquals(joinCpuLoadBo.getJvmCpuLoad(), 10, 0); assertEquals(joinCpuLoadBo.getMinJvmCpuLoad(), 10, 0); assertEquals(joinCpuLoadBo.getMaxJvmCpuLoad(), 10, 0); assertEquals(joinCpuLoadBo.getSystemCpuLoad(), 30, 0); assertEquals(joinCpuLoadBo.getMinSystemCpuLoad(), 30, 0); assertEquals(joinCpuLoadBo.getMaxSystemCpuLoad(), 30, 0); joinCpuLoadBo = joincpulaodBoList.get(1); assertEquals(joinCpuLoadBo.getId(), AGENT_ID); assertEquals(joinCpuLoadBo.getTimestamp(), 1491274148454L); assertEquals(joinCpuLoadBo.getJvmCpuLoad(), 20, 0); assertEquals(joinCpuLoadBo.getMinJvmCpuLoad(), 20, 0); assertEquals(joinCpuLoadBo.getMaxJvmCpuLoad(), 20, 0); assertEquals(joinCpuLoadBo.getSystemCpuLoad(), 50, 0); assertEquals(joinCpuLoadBo.getMinSystemCpuLoad(), 50, 0); assertEquals(joinCpuLoadBo.getMaxSystemCpuLoad(), 50, 0); } private TFAgentStatBatch createTFAgentStatBatch() { final TFAgentStatBatch tFAgentStatBatch = new TFAgentStatBatch(); tFAgentStatBatch.setStartTimestamp(1491274142454L); tFAgentStatBatch.setAgentId(AGENT_ID); final TFAgentStat tFAgentStat = new TFAgentStat(); tFAgentStat.setAgentId(AGENT_ID); tFAgentStat.setTimestamp(1491274143454L); final TFCpuLoad tFCpuLoad = new TFCpuLoad(); tFCpuLoad.setJvmCpuLoad(10); tFCpuLoad.setSystemCpuLoad(30); tFAgentStat.setCpuLoad(tFCpuLoad); final TFAgentStat tFAgentStat2 = new TFAgentStat(); tFAgentStat2.setAgentId(AGENT_ID); tFAgentStat2.setTimestamp(1491274148454L); final TFCpuLoad tFCpuLoad2 = new TFCpuLoad(); tFCpuLoad2.setJvmCpuLoad(20); tFCpuLoad2.setSystemCpuLoad(50); tFAgentStat2.setCpuLoad(tFCpuLoad2); final List<TFAgentStat> tFAgentStatList = new ArrayList<>(2); tFAgentStatList.add(tFAgentStat); tFAgentStatList.add(tFAgentStat2); tFAgentStatBatch.setAgentStats(tFAgentStatList); return tFAgentStatBatch; } @Test public void flatMap2Test() throws Exception { ApplicationCache applicationCache = newMockApplicationCache(); TBaseFlatMapper mapper = new TBaseFlatMapper(new JoinAgentStatBoMapper(), applicationCache); TFAgentStatBatch tfAgentStatBatch = createTFAgentStatBatch2(); ArrayList<Tuple3<String, JoinStatBo, Long>> dataList = new ArrayList<>(); ListCollector<Tuple3<String, JoinStatBo, Long>> collector = new ListCollector<>(dataList); mapper.flatMap(tfAgentStatBatch, collector); assertEquals(dataList.size(), 2); Tuple3<String, JoinStatBo, Long> data1 = dataList.get(0); assertEquals(data1.f0, AGENT_ID); assertEquals(data1.f2.longValue(), 1491274143454L); JoinAgentStatBo joinAgentStatBo = (JoinAgentStatBo) data1.f1; assertEquals(joinAgentStatBo.getId(), AGENT_ID); assertEquals(joinAgentStatBo.getAgentStartTimestamp(), 1491274142454L); assertEquals(joinAgentStatBo.getTimestamp(), 1491274143454L); assertJoinMemoryBo(joinAgentStatBo.getJoinMemoryBoList()); Tuple3<String, JoinStatBo, Long> data2 = dataList.get(1); assertEquals(data2.f0, APPLICATION_ID); assertEquals(data2.f2.longValue(), 1491274140000L); JoinApplicationStatBo joinApplicationStatBo = (JoinApplicationStatBo) data2.f1; assertEquals(joinApplicationStatBo.getId(), APPLICATION_ID); assertEquals(joinApplicationStatBo.getTimestamp(), 1491274140000L); assertEquals(joinApplicationStatBo.getStatType(), StatType.APP_STST); assertJoinMemoryBo(joinApplicationStatBo.getJoinMemoryBoList()); } private void assertJoinMemoryBo(List<JoinMemoryBo> joinMemoryBoList) { assertEquals(2, joinMemoryBoList.size()); JoinMemoryBo joinMemoryBo = joinMemoryBoList.get(0); assertEquals(joinMemoryBo.getId(), AGENT_ID); assertEquals(joinMemoryBo.getHeapUsed(), 3000); assertEquals(joinMemoryBo.getNonHeapUsed(), 450); assertEquals(joinMemoryBo.getTimestamp(), 1491274143454L); JoinMemoryBo joinMemoryBo2 = joinMemoryBoList.get(1); assertEquals(joinMemoryBo2.getId(), AGENT_ID); assertEquals(joinMemoryBo2.getHeapUsed(), 2000); assertEquals(joinMemoryBo2.getNonHeapUsed(), 850); assertEquals(joinMemoryBo2.getTimestamp(), 1491274148454L); } private TFAgentStatBatch createTFAgentStatBatch2() { final TFAgentStatBatch tFAgentStatBatch = new TFAgentStatBatch(); tFAgentStatBatch.setStartTimestamp(1491274142454L); tFAgentStatBatch.setAgentId(AGENT_ID); final TFAgentStat tFAgentStat = new TFAgentStat(); tFAgentStat.setAgentId(AGENT_ID); tFAgentStat.setTimestamp(1491274143454L); final TFJvmGc tFJvmGc = new TFJvmGc(); tFJvmGc.setJvmMemoryHeapUsed(3000); tFJvmGc.setJvmMemoryNonHeapUsed(450); tFAgentStat.setGc(tFJvmGc); final TFAgentStat tFAgentStat2 = new TFAgentStat(); tFAgentStat2.setAgentId(AGENT_ID); tFAgentStat2.setTimestamp(1491274148454L); final TFJvmGc tFJvmGc2 = new TFJvmGc(); tFJvmGc2.setJvmMemoryHeapUsed(2000); tFJvmGc2.setJvmMemoryNonHeapUsed(850); tFAgentStat2.setGc(tFJvmGc2); final List<TFAgentStat> tFAgentStatList = new ArrayList<>(2); tFAgentStatList.add(tFAgentStat); tFAgentStatList.add(tFAgentStat2); tFAgentStatBatch.setAgentStats(tFAgentStatList); return tFAgentStatBatch; } @Test public void flatMap3Test() throws Exception { ApplicationCache applicationCache = newMockApplicationCache(); TBaseFlatMapper mapper = new TBaseFlatMapper(new JoinAgentStatBoMapper(), applicationCache); TFAgentStatBatch tfAgentStatBatch = createTFAgentStatBatch3(); ArrayList<Tuple3<String, JoinStatBo, Long>> dataList = new ArrayList<>(); ListCollector<Tuple3<String, JoinStatBo, Long>> collector = new ListCollector<>(dataList); mapper.flatMap(tfAgentStatBatch, collector); assertEquals(dataList.size(), 2); Tuple3<String, JoinStatBo, Long> data1 = dataList.get(0); assertEquals(data1.f0, AGENT_ID); assertEquals(data1.f2.longValue(), 1491274143454L); JoinAgentStatBo joinAgentStatBo = (JoinAgentStatBo) data1.f1; assertEquals(joinAgentStatBo.getId(), AGENT_ID); assertEquals(joinAgentStatBo.getAgentStartTimestamp(), 1491274142454L); assertEquals(joinAgentStatBo.getTimestamp(), 1491274143454L); assertJoinTransactionBo(joinAgentStatBo.getJoinTransactionBoList()); Tuple3<String, JoinStatBo, Long> data2 = dataList.get(1); assertEquals(data2.f0, APPLICATION_ID); assertEquals(data2.f2.longValue(), 1491274140000L); JoinApplicationStatBo joinApplicationStatBo = (JoinApplicationStatBo) data2.f1; assertEquals(joinApplicationStatBo.getId(), APPLICATION_ID); assertEquals(joinApplicationStatBo.getTimestamp(), 1491274140000L); assertEquals(joinApplicationStatBo.getStatType(), StatType.APP_STST); assertJoinTransactionBo(joinApplicationStatBo.getJoinTransactionBoList()); } private void assertJoinTransactionBo(List<JoinTransactionBo> joinTransactionBoList) { assertEquals(2, joinTransactionBoList.size()); JoinTransactionBo joinTransactionBo = joinTransactionBoList.get(0); assertEquals(joinTransactionBo.getId(), AGENT_ID); assertEquals(joinTransactionBo.getTimestamp(), 1491274143454L); assertEquals(joinTransactionBo.getCollectInterval(), 5000); assertEquals(joinTransactionBo.getTotalCount(), 120); assertEquals(joinTransactionBo.getMaxTotalCount(), 120); assertEquals(joinTransactionBo.getMaxTotalCountAgentId(), AGENT_ID); assertEquals(joinTransactionBo.getMinTotalCount(), 120); assertEquals(joinTransactionBo.getMinTotalCountAgentId(), AGENT_ID); JoinTransactionBo joinTransactionBo2 = joinTransactionBoList.get(1); assertEquals(joinTransactionBo2.getId(), AGENT_ID); assertEquals(joinTransactionBo2.getTimestamp(), 1491274148454L); assertEquals(joinTransactionBo2.getCollectInterval(), 5000); assertEquals(joinTransactionBo2.getTotalCount(), 124); assertEquals(joinTransactionBo2.getMaxTotalCount(), 124); assertEquals(joinTransactionBo2.getMaxTotalCountAgentId(), AGENT_ID); assertEquals(joinTransactionBo2.getMinTotalCount(), 124); assertEquals(joinTransactionBo2.getMinTotalCountAgentId(), AGENT_ID); } private TFAgentStatBatch createTFAgentStatBatch3() { final TFAgentStatBatch tFAgentStatBatch = new TFAgentStatBatch(); tFAgentStatBatch.setStartTimestamp(1491274142454L); tFAgentStatBatch.setAgentId(AGENT_ID); final TFAgentStat tFAgentStat = new TFAgentStat(); tFAgentStat.setAgentId(AGENT_ID); tFAgentStat.setTimestamp(1491274143454L); tFAgentStat.setCollectInterval(5000); final TFTransaction tFTransaction = new TFTransaction(); tFTransaction.setSampledNewCount(10); tFTransaction.setSampledContinuationCount(20); tFTransaction.setUnsampledNewCount(40); tFTransaction.setUnsampledContinuationCount(50); tFAgentStat.setTransaction(tFTransaction); final TFAgentStat tFAgentStat2 = new TFAgentStat(); tFAgentStat2.setAgentId(AGENT_ID); tFAgentStat2.setTimestamp(1491274148454L); tFAgentStat2.setCollectInterval(5000); final TFTransaction tFTransaction2 = new TFTransaction(); tFTransaction2.setSampledNewCount(11); tFTransaction2.setSampledContinuationCount(21); tFTransaction2.setUnsampledNewCount(41); tFTransaction2.setUnsampledContinuationCount(51); tFAgentStat2.setTransaction(tFTransaction2); final List<TFAgentStat> tFAgentStatList = new ArrayList<>(2); tFAgentStatList.add(tFAgentStat); tFAgentStatList.add(tFAgentStat2); tFAgentStatBatch.setAgentStats(tFAgentStatList); return tFAgentStatBatch; } @Test public void flatMap4Test() throws Exception { ApplicationCache applicationCache = newMockApplicationCache(); TBaseFlatMapper mapper = new TBaseFlatMapper(new JoinAgentStatBoMapper(), applicationCache); TFAgentStatBatch tfAgentStatBatch = createTFAgentStatBatch4(); ArrayList<Tuple3<String, JoinStatBo, Long>> dataList = new ArrayList<>(); ListCollector<Tuple3<String, JoinStatBo, Long>> collector = new ListCollector<>(dataList); mapper.flatMap(tfAgentStatBatch, collector); assertEquals(dataList.size(), 2); Tuple3<String, JoinStatBo, Long> data1 = dataList.get(0); assertEquals(data1.f0, AGENT_ID); assertEquals(data1.f2.longValue(), 1491274143454L); JoinAgentStatBo joinAgentStatBo = (JoinAgentStatBo) data1.f1; assertEquals(joinAgentStatBo.getId(), AGENT_ID); assertEquals(joinAgentStatBo.getAgentStartTimestamp(), 1491274142454L); assertEquals(joinAgentStatBo.getTimestamp(), 1491274143454L); assertJoinFileDescriptorBo(joinAgentStatBo.getJoinFileDescriptorBoList()); Tuple3<String, JoinStatBo, Long> data2 = dataList.get(1); assertEquals(data2.f0, APPLICATION_ID); assertEquals(data2.f2.longValue(), 1491274140000L); JoinApplicationStatBo joinApplicationStatBo = (JoinApplicationStatBo) data2.f1; assertEquals(joinApplicationStatBo.getId(), APPLICATION_ID); assertEquals(joinApplicationStatBo.getTimestamp(), 1491274140000L); assertEquals(joinApplicationStatBo.getStatType(), StatType.APP_STST); assertJoinFileDescriptorBo(joinApplicationStatBo.getJoinFileDescriptorBoList()); } private void assertJoinFileDescriptorBo(List<JoinFileDescriptorBo> joinFileDescriptorBoList) { assertEquals(2, joinFileDescriptorBoList.size()); JoinFileDescriptorBo joinFileDescriptorBo = joinFileDescriptorBoList.get(0); assertEquals(joinFileDescriptorBo.getId(), AGENT_ID); assertEquals(joinFileDescriptorBo.getTimestamp(), 1491274143454L); assertEquals(joinFileDescriptorBo.getAvgOpenFDCount(), 10, 0); assertEquals(joinFileDescriptorBo.getMinOpenFDCount(), 10, 0); assertEquals(joinFileDescriptorBo.getMaxOpenFDCount(), 10, 0); joinFileDescriptorBo = joinFileDescriptorBoList.get(1); assertEquals(joinFileDescriptorBo.getId(), AGENT_ID); assertEquals(joinFileDescriptorBo.getTimestamp(), 1491274148454L); assertEquals(joinFileDescriptorBo.getAvgOpenFDCount(), 20, 0); assertEquals(joinFileDescriptorBo.getMinOpenFDCount(), 20, 0); assertEquals(joinFileDescriptorBo.getMaxOpenFDCount(), 20, 0); } private TFAgentStatBatch createTFAgentStatBatch4() { final TFAgentStatBatch tFAgentStatBatch = new TFAgentStatBatch(); tFAgentStatBatch.setStartTimestamp(1491274142454L); tFAgentStatBatch.setAgentId(AGENT_ID); final TFAgentStat tFAgentStat = new TFAgentStat(); tFAgentStat.setAgentId(AGENT_ID); tFAgentStat.setTimestamp(1491274143454L); final TFFileDescriptor tFFileDescriptor = new TFFileDescriptor(); tFFileDescriptor.setOpenFileDescriptorCount(10); tFAgentStat.setFileDescriptor(tFFileDescriptor); final TFAgentStat tFAgentStat2 = new TFAgentStat(); tFAgentStat2.setAgentId(AGENT_ID); tFAgentStat2.setTimestamp(1491274148454L); final TFFileDescriptor tFFileDescriptor2 = new TFFileDescriptor(); tFFileDescriptor2.setOpenFileDescriptorCount(20); tFAgentStat2.setFileDescriptor(tFFileDescriptor2); final List<TFAgentStat> tFAgentStatList = new ArrayList<>(2); tFAgentStatList.add(tFAgentStat); tFAgentStatList.add(tFAgentStat2); tFAgentStatBatch.setAgentStats(tFAgentStatList); return tFAgentStatBatch; } }
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.uci.ics.asterix.result; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.io.StringWriter; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.http.ParseException; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import edu.uci.ics.asterix.api.common.SessionConfig; import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat; import edu.uci.ics.asterix.api.http.servlet.APIServlet; import edu.uci.ics.asterix.om.types.ARecordType; import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException; import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor; import edu.uci.ics.hyracks.api.exceptions.HyracksDataException; import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream; public class ResultUtils { private static final Charset UTF_8 = Charset.forName("UTF-8"); static Map<Character, String> HTML_ENTITIES = new HashMap<Character, String>(); static { HTML_ENTITIES.put('"', "&quot;"); HTML_ENTITIES.put('&', "&amp;"); HTML_ENTITIES.put('<', "&lt;"); HTML_ENTITIES.put('>', "&gt;"); } public static String escapeHTML(String s) { for (Character c : HTML_ENTITIES.keySet()) { if (s.indexOf(c) >= 0) { s = s.replace(c.toString(), HTML_ENTITIES.get(c)); } } return s; } public static void displayCSVHeader(ARecordType recordType, SessionConfig conf) { // If HTML-ifying, we have to output this here before the header - // pretty ugly if (conf.is(SessionConfig.FORMAT_HTML)) { conf.out().println("<h4>Results:</h4>"); conf.out().println("<pre>"); } String[] fieldNames = recordType.getFieldNames(); boolean notfirst = false; for (String name : fieldNames) { if (notfirst) { conf.out().print(','); } notfirst = true; conf.out().print('"'); conf.out().print(name.replace("\"", "\"\"")); conf.out().print('"'); } conf.out().print("\r\n"); } public static void displayResults(ResultReader resultReader, SessionConfig conf) throws HyracksDataException { IFrameTupleAccessor fta = resultReader.getFrameTupleAccessor(); ByteBuffer buffer = ByteBuffer.allocate(ResultReader.FRAME_SIZE); buffer.clear(); int bytesRead = resultReader.read(buffer); ByteBufferInputStream bbis = new ByteBufferInputStream(); // Whether we need to separate top-level ADM instances with commas boolean need_commas = true; // Whether this is the first instance being output boolean notfirst = false; // If we're outputting CSV with a header, the HTML header was already // output by displayCSVHeader(), so skip it here if (conf.is(SessionConfig.FORMAT_HTML) && ! (conf.fmt() == OutputFormat.CSV && conf.is(SessionConfig.FORMAT_CSV_HEADER))) { conf.out().println("<h4>Results:</h4>"); conf.out().println("<pre>"); } switch (conf.fmt()) { case CSV: need_commas = false; break; case JSON: case ADM: // Conveniently, JSON and ADM have the same syntax for an // "ordered list", and our representation of the result of a // statement is an ordered list of instances. conf.out().print("[ "); break; } if (bytesRead > 0) { do { try { fta.reset(buffer); int last = fta.getTupleCount(); String result; for (int tIndex = 0; tIndex < last; tIndex++) { int start = fta.getTupleStartOffset(tIndex); int length = fta.getTupleEndOffset(tIndex) - start; bbis.setByteBuffer(buffer, start); byte[] recordBytes = new byte[length]; int numread = bbis.read(recordBytes, 0, length); if (conf.fmt() == OutputFormat.CSV) { if ( (numread > 0) && (recordBytes[numread-1] == '\n') ) { numread--; } } result = new String(recordBytes, 0, numread, UTF_8); if (need_commas && notfirst) { conf.out().print(", "); } notfirst = true; conf.out().print(result); if (conf.fmt() == OutputFormat.CSV) { conf.out().print("\r\n"); } } buffer.clear(); } finally { try { bbis.close(); } catch (IOException e) { throw new HyracksDataException(e); } } } while (resultReader.read(buffer) > 0); } conf.out().flush(); switch (conf.fmt()) { case JSON: case ADM: conf.out().println(" ]"); break; case CSV: // Nothing to do break; } if (conf.is(SessionConfig.FORMAT_HTML)) { conf.out().println("</pre>"); } } public static JSONObject getErrorResponse(int errorCode, String errorMessage, String errorSummary, String errorStackTrace) { JSONObject errorResp = new JSONObject(); JSONArray errorArray = new JSONArray(); errorArray.put(errorCode); errorArray.put(errorMessage); try { errorResp.put("error-code", errorArray); if (!errorSummary.equals("")) errorResp.put("summary", errorSummary); if (!errorStackTrace.equals("")) errorResp.put("stacktrace", errorStackTrace); } catch (JSONException e) { // TODO(madhusudancs): Figure out what to do when JSONException occurs while building the results. } return errorResp; } public static void webUIErrorHandler(PrintWriter out, Exception e) { String errorTemplate = readTemplateFile("/webui/errortemplate.html", "%s\n%s\n%s"); String errorOutput = String.format(errorTemplate, escapeHTML(extractErrorMessage(e)), escapeHTML(extractErrorSummary(e)), escapeHTML(extractFullStackTrace(e))); out.println(errorOutput); } public static void webUIParseExceptionHandler(PrintWriter out, Throwable e, String query) { String errorTemplate = readTemplateFile("/webui/errortemplate_message.html", "<pre class=\"error\">%s\n</pre>"); String errorOutput = String.format(errorTemplate, buildParseExceptionMessage(e, query)); out.println(errorOutput); } public static void apiErrorHandler(PrintWriter out, Exception e) { int errorCode = 99; if (e instanceof ParseException) { errorCode = 2; } else if (e instanceof AlgebricksException) { errorCode = 3; } else if (e instanceof HyracksDataException) { errorCode = 4; } JSONObject errorResp = ResultUtils.getErrorResponse(errorCode, extractErrorMessage(e), extractErrorSummary(e), extractFullStackTrace(e)); out.write(errorResp.toString()); } public static String buildParseExceptionMessage(Throwable e, String query) { StringBuilder errorMessage = new StringBuilder(); String message = e.getMessage(); message = message.replace("<", "&lt"); message = message.replace(">", "&gt"); errorMessage.append("SyntaxError: " + message + "\n"); int pos = message.indexOf("line"); if (pos > 0) { Pattern p = Pattern.compile("\\d+"); Matcher m = p.matcher(message); if (m.find(pos)) { int lineNo = Integer.parseInt(message.substring(m.start(), m.end())); String[] lines = query.split("\n"); if (lineNo > lines.length) { errorMessage.append("===> &ltBLANK LINE&gt \n"); } else { String line = lines[lineNo - 1]; errorMessage.append("==> " + line); } } } return errorMessage.toString(); } private static Throwable getRootCause(Throwable cause) { Throwable nextCause = cause.getCause(); while (nextCause != null) { cause = nextCause; nextCause = cause.getCause(); } return cause; } /** * Extract the message in the root cause of the stack trace: * * @param e * @return error message string. */ private static String extractErrorMessage(Throwable e) { Throwable cause = getRootCause(e); String fullyQualifiedExceptionClassName = cause.getClass().getName(); String[] hierarchySplits = fullyQualifiedExceptionClassName.split("\\."); //try returning the class without package qualification String exceptionClassName = hierarchySplits[hierarchySplits.length - 1]; String localizedMessage = cause.getLocalizedMessage(); if(localizedMessage == null){ localizedMessage = "Internal error. Please check instance logs for further details."; } return localizedMessage + " [" + exceptionClassName + "]"; } /** * Extract the meaningful part of a stack trace: * a. the causes in the stack trace hierarchy * b. the top exception for each cause * * @param e * @return the contacted message containing a and b. */ private static String extractErrorSummary(Throwable e) { StringBuilder errorMessageBuilder = new StringBuilder(); Throwable cause = e; errorMessageBuilder.append(cause.getLocalizedMessage()); while (cause != null) { StackTraceElement[] stackTraceElements = cause.getStackTrace(); errorMessageBuilder.append(stackTraceElements.length > 0 ? "\n caused by: " + stackTraceElements[0] : ""); cause = cause.getCause(); } return errorMessageBuilder.toString(); } /** * Extract the full stack trace: * * @param e * @return the string containing the full stack trace of the error. */ private static String extractFullStackTrace(Throwable e) { StringWriter stringWriter = new StringWriter(); PrintWriter printWriter = new PrintWriter(stringWriter); e.printStackTrace(printWriter); return stringWriter.toString(); } /** * Read the template file which is stored as a resource and return its content. If the file does not exist or is * not readable return the default template string. * * @param path * The path to the resource template file * @param defaultTemplate * The default template string if the template file does not exist or is not readable * @return The template string to be used to render the output. */ private static String readTemplateFile(String path, String defaultTemplate) { String errorTemplate = defaultTemplate; try { String resourcePath = "/webui/errortemplate_message.html"; InputStream is = APIServlet.class.getResourceAsStream(resourcePath); InputStreamReader isr = new InputStreamReader(is); StringBuilder sb = new StringBuilder(); BufferedReader br = new BufferedReader(isr); String line = br.readLine(); while (line != null) { sb.append(line); line = br.readLine(); } errorTemplate = sb.toString(); } catch (IOException ioe) { // If there is an IOException reading the error template html file, default value of error template is used. } return errorTemplate; } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2019 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.metainject; import org.pentaho.di.core.Const; import org.pentaho.di.core.ObjectLocationSpecificationMethod; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettlePluginException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.core.util.CurrentDirectoryResolver; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.HasRepositoryDirectories; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.RepositoryDirectory; import org.pentaho.di.repository.RepositoryDirectoryInterface; import org.pentaho.di.repository.StringObjectId; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceEntry; import org.pentaho.di.resource.ResourceEntry.ResourceType; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.resource.ResourceReference; import org.pentaho.di.trans.ISubTransAwareMeta; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransHopMeta; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaChangeListenerInterface; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; @Step( id = "MetaInject", image = "org/pentaho/di/ui/trans/steps/metainject/img/GenericTransform.svg", name = "i18n:org.pentaho.di.trans.step:BaseStep.TypeLongDesc.MetaInject", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.Flow" ) @InjectionSupported( localizationPrefix = "MetaInject.Injection.", groups = { "SOURCE_OUTPUT_FIELDS", "MAPPING_FIELDS" } ) public class MetaInjectMeta extends BaseStepMeta implements StepMetaInterface, StepMetaChangeListenerInterface, HasRepositoryDirectories, ISubTransAwareMeta { private static Class<?> PKG = MetaInjectMeta.class; // for i18n purposes, needed by Translator2!! private static final String MAPPINGS = "mappings"; private static final String MAPPING = "mapping"; private static final String SPECIFICATION_METHOD = "specification_method"; private static final String TRANS_OBJECT_ID = "trans_object_id"; private static final String TRANS_NAME = "trans_name"; private static final String FILENAME = "filename"; private static final String DIRECTORY_PATH = "directory_path"; private static final String TARGET_FILE = "target_file"; private static final String NO_EXECUTION = "no_execution"; private static final String SOURCE_STEP = "source_step"; private static final String STREAM_SOURCE_STEP = "stream_source_step"; private static final String STREAM_TARGET_STEP = "stream_target_step"; private static final String TARGET_STEP_NAME = "target_step_name"; private static final String TARGET_ATTRIBUTE_KEY = "target_attribute_key"; private static final String TARGET_DETAIL = "target_detail"; private static final String SOURCE_FIELD = "source_field"; private static final String SOURCE_OUTPUT_FIELDS = "source_output_fields"; private static final String SOURCE_OUTPUT_FIELD = "source_output_field"; private static final String SOURCE_OUTPUT_FIELD_NAME = "source_output_field_name"; private static final String SOURCE_OUTPUT_FIELD_TYPE = "source_output_field_type"; private static final String SOURCE_OUTPUT_FIELD_LENGTH = "source_output_field_length"; private static final String SOURCE_OUTPUT_FIELD_PRECISION = "source_output_field_precision"; private static final String MAPPING_SOURCE_FIELD = "mapping_source_field"; private static final String MAPPING_SOURCE_STEP = "mapping_source_step"; private static final String MAPPING_TARGET_DETAIL = "mapping_target_detail"; private static final String MAPPING_TARGET_ATTRIBUTE_KEY = "mapping_target_attribute_key"; private static final String MAPPING_TARGET_STEP_NAME = "mapping_target_step_name"; private static final String GROUP_AND_NAME_DELIMITER = "."; // description of the transformation to execute... // @Injection( name = "TRANS_NAME" ) private String transName; @Injection( name = "FILE_NAME" ) private String fileName; @Injection( name = "DIRECTORY_PATH" ) private String directoryPath; private ObjectId transObjectId; @Injection( name = "TRANS_SEPECIFICATION_METHOD" ) private ObjectLocationSpecificationMethod specificationMethod; @Injection( name = "SOURCE_STEP_NAME" ) private String sourceStepName; @InjectionDeep private List<MetaInjectOutputField> sourceOutputFields; private Map<TargetStepAttribute, SourceStepField> targetSourceMapping; @InjectionDeep private List<MetaInjectMapping> metaInjectMapping; @Injection( name = "TARGET_FILE" ) private String targetFile; @Injection( name = "NO_EXECUTION" ) private boolean noExecution; @Injection( name = "STREAMING_SOURCE_STEP" ) private String streamSourceStepname; private StepMeta streamSourceStep; @Injection( name = "STREAMING_TARGET_STEP" ) private String streamTargetStepname; public MetaInjectMeta() { super(); // allocate BaseStepMeta specificationMethod = ObjectLocationSpecificationMethod.FILENAME; targetSourceMapping = new HashMap<TargetStepAttribute, SourceStepField>(); sourceOutputFields = new ArrayList<MetaInjectOutputField>(); } // TODO: deep copy @Override public Object clone() { Object retval = super.clone(); return retval; } @Override public void setDefault() { } @Override public String getXML() { actualizeMetaInjectMapping(); StringBuilder retval = new StringBuilder( 500 ); retval.append( " " ).append( XMLHandler.addTagValue( SPECIFICATION_METHOD, specificationMethod == null ? null : specificationMethod.getCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( TRANS_OBJECT_ID, transObjectId == null ? null : transObjectId.toString() ) ); retval.append( " " ).append( XMLHandler.addTagValue( TRANS_NAME, transName ) ); retval.append( " " ).append( XMLHandler.addTagValue( FILENAME, fileName ) ); retval.append( " " ).append( XMLHandler.addTagValue( DIRECTORY_PATH, directoryPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_STEP, sourceStepName ) ); retval.append( " " ).append( XMLHandler.openTag( SOURCE_OUTPUT_FIELDS ) ); for ( MetaInjectOutputField field : sourceOutputFields ) { retval.append( " " ).append( XMLHandler.openTag( SOURCE_OUTPUT_FIELD ) ); retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_OUTPUT_FIELD_NAME, field.getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_OUTPUT_FIELD_TYPE, field .getTypeDescription() ) ); retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_OUTPUT_FIELD_LENGTH, field.getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_OUTPUT_FIELD_PRECISION, field .getPrecision() ) ); retval.append( " " ).append( XMLHandler.closeTag( SOURCE_OUTPUT_FIELD ) ); } retval.append( " " ).append( XMLHandler.closeTag( SOURCE_OUTPUT_FIELDS ) ); retval.append( " " ).append( XMLHandler.addTagValue( TARGET_FILE, targetFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( NO_EXECUTION, noExecution ) ); if ( ( streamSourceStepname == null ) && ( streamSourceStep != null ) ) { streamSourceStepname = streamSourceStep.getName(); } retval.append( " " ).append( XMLHandler.addTagValue( STREAM_SOURCE_STEP, streamSourceStepname ) ); retval.append( " " ).append( XMLHandler.addTagValue( STREAM_TARGET_STEP, streamTargetStepname ) ); retval.append( " " ).append( XMLHandler.openTag( MAPPINGS ) ); for ( TargetStepAttribute target : targetSourceMapping.keySet() ) { retval.append( " " ).append( XMLHandler.openTag( MAPPING ) ); SourceStepField source = targetSourceMapping.get( target ); retval.append( " " ).append( XMLHandler.addTagValue( TARGET_STEP_NAME, target.getStepname() ) ); retval.append( " " ).append( XMLHandler.addTagValue( TARGET_ATTRIBUTE_KEY, target.getAttributeKey() ) ); retval.append( " " ).append( XMLHandler.addTagValue( TARGET_DETAIL, target.isDetail() ) ); retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_STEP, source.getStepname() ) ); retval.append( " " ).append( XMLHandler.addTagValue( SOURCE_FIELD, source.getField() ) ); retval.append( " " ).append( XMLHandler.closeTag( MAPPING ) ); } retval.append( " " ).append( XMLHandler.closeTag( MAPPINGS ) ); return retval.toString(); } @Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { try { String method = XMLHandler.getTagValue( stepnode, SPECIFICATION_METHOD ); specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method ); String transId = XMLHandler.getTagValue( stepnode, TRANS_OBJECT_ID ); transObjectId = Utils.isEmpty( transId ) ? null : new StringObjectId( transId ); transName = XMLHandler.getTagValue( stepnode, TRANS_NAME ); fileName = XMLHandler.getTagValue( stepnode, FILENAME ); directoryPath = XMLHandler.getTagValue( stepnode, DIRECTORY_PATH ); sourceStepName = XMLHandler.getTagValue( stepnode, SOURCE_STEP ); Node outputFieldsNode = XMLHandler.getSubNode( stepnode, SOURCE_OUTPUT_FIELDS ); List<Node> outputFieldNodes = XMLHandler.getNodes( outputFieldsNode, SOURCE_OUTPUT_FIELD ); sourceOutputFields = new ArrayList<MetaInjectOutputField>(); for ( Node outputFieldNode : outputFieldNodes ) { String name = XMLHandler.getTagValue( outputFieldNode, SOURCE_OUTPUT_FIELD_NAME ); String typeName = XMLHandler.getTagValue( outputFieldNode, SOURCE_OUTPUT_FIELD_TYPE ); int length = Const.toInt( XMLHandler.getTagValue( outputFieldNode, SOURCE_OUTPUT_FIELD_LENGTH ), -1 ); int precision = Const.toInt( XMLHandler.getTagValue( outputFieldNode, SOURCE_OUTPUT_FIELD_PRECISION ), -1 ); int type = ValueMetaFactory.getIdForValueMeta( typeName ); sourceOutputFields.add( new MetaInjectOutputField( name, type, length, precision ) ); } targetFile = XMLHandler.getTagValue( stepnode, TARGET_FILE ); noExecution = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, NO_EXECUTION ) ); streamSourceStepname = XMLHandler.getTagValue( stepnode, STREAM_SOURCE_STEP ); streamTargetStepname = XMLHandler.getTagValue( stepnode, STREAM_TARGET_STEP ); Node mappingsNode = XMLHandler.getSubNode( stepnode, MAPPINGS ); int nrMappings = XMLHandler.countNodes( mappingsNode, MAPPING ); for ( int i = 0; i < nrMappings; i++ ) { Node mappingNode = XMLHandler.getSubNodeByNr( mappingsNode, MAPPING, i ); String targetStepname = XMLHandler.getTagValue( mappingNode, TARGET_STEP_NAME ); String targetAttributeKey = XMLHandler.getTagValue( mappingNode, TARGET_ATTRIBUTE_KEY ); boolean targetDetail = "Y".equalsIgnoreCase( XMLHandler.getTagValue( mappingNode, TARGET_DETAIL ) ); String sourceStepname = XMLHandler.getTagValue( mappingNode, SOURCE_STEP ); String sourceField = XMLHandler.getTagValue( mappingNode, SOURCE_FIELD ); TargetStepAttribute target = new TargetStepAttribute( targetStepname, targetAttributeKey, targetDetail ); SourceStepField source = new SourceStepField( sourceStepname, sourceField ); targetSourceMapping.put( target, source ); } MetaInjectMigration.migrateFrom70( targetSourceMapping ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { String method = rep.getStepAttributeString( id_step, SPECIFICATION_METHOD ); specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method ); String transId = rep.getStepAttributeString( id_step, TRANS_OBJECT_ID ); transObjectId = Utils.isEmpty( transId ) ? null : new StringObjectId( transId ); transName = rep.getStepAttributeString( id_step, TRANS_NAME ); fileName = rep.getStepAttributeString( id_step, FILENAME ); directoryPath = rep.getStepAttributeString( id_step, DIRECTORY_PATH ); sourceStepName = rep.getStepAttributeString( id_step, SOURCE_STEP ); streamSourceStepname = rep.getStepAttributeString( id_step, STREAM_SOURCE_STEP ); streamTargetStepname = rep.getStepAttributeString( id_step, STREAM_TARGET_STEP ); sourceOutputFields = new ArrayList<MetaInjectOutputField>(); int nrSourceOutputFields = rep.countNrStepAttributes( id_step, SOURCE_OUTPUT_FIELD_NAME ); for ( int i = 0; i < nrSourceOutputFields; i++ ) { String name = rep.getStepAttributeString( id_step, i, SOURCE_OUTPUT_FIELD_NAME ); String typeName = rep.getStepAttributeString( id_step, i, SOURCE_OUTPUT_FIELD_TYPE ); int length = (int) rep.getStepAttributeInteger( id_step, i, SOURCE_OUTPUT_FIELD_LENGTH ); int precision = (int) rep.getStepAttributeInteger( id_step, i, SOURCE_OUTPUT_FIELD_PRECISION ); int type = ValueMetaFactory.getIdForValueMeta( typeName ); sourceOutputFields.add( new MetaInjectOutputField( name, type, length, precision ) ); } targetFile = rep.getStepAttributeString( id_step, TARGET_FILE ); noExecution = rep.getStepAttributeBoolean( id_step, NO_EXECUTION ); int nrMappings = rep.countNrStepAttributes( id_step, MAPPING_TARGET_STEP_NAME ); for ( int i = 0; i < nrMappings; i++ ) { String targetStepname = rep.getStepAttributeString( id_step, i, MAPPING_TARGET_STEP_NAME ); String targetAttributeKey = rep.getStepAttributeString( id_step, i, MAPPING_TARGET_ATTRIBUTE_KEY ); boolean targetDetail = rep.getStepAttributeBoolean( id_step, i, MAPPING_TARGET_DETAIL ); String sourceStepname = rep.getStepAttributeString( id_step, i, MAPPING_SOURCE_STEP ); String sourceField = rep.getStepAttributeString( id_step, i, MAPPING_SOURCE_FIELD ); TargetStepAttribute target = new TargetStepAttribute( targetStepname, targetAttributeKey, targetDetail ); SourceStepField source = new SourceStepField( sourceStepname, sourceField ); targetSourceMapping.put( target, source ); } MetaInjectMigration.migrateFrom70( targetSourceMapping ); } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, SPECIFICATION_METHOD, specificationMethod == null ? null : specificationMethod.getCode() ); rep.saveStepAttribute( id_transformation, id_step, TRANS_OBJECT_ID, transObjectId == null ? null : transObjectId .toString() ); rep.saveStepAttribute( id_transformation, id_step, FILENAME, fileName ); rep.saveStepAttribute( id_transformation, id_step, TRANS_NAME, transName ); rep.saveStepAttribute( id_transformation, id_step, DIRECTORY_PATH, directoryPath ); rep.saveStepAttribute( id_transformation, id_step, SOURCE_STEP, sourceStepName ); rep.saveStepAttribute( id_transformation, id_step, STREAM_SOURCE_STEP, streamSourceStepname ); rep.saveStepAttribute( id_transformation, id_step, STREAM_TARGET_STEP, streamTargetStepname ); MetaInjectOutputField aField = null; for ( int i = 0; i < sourceOutputFields.size(); i++ ) { aField = sourceOutputFields.get( i ); rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_OUTPUT_FIELD_NAME, aField.getName() ); rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_OUTPUT_FIELD_TYPE, aField.getTypeDescription() ); rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_OUTPUT_FIELD_LENGTH, aField.getLength() ); rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_OUTPUT_FIELD_PRECISION, aField.getPrecision() ); } rep.saveStepAttribute( id_transformation, id_step, TARGET_FILE, targetFile ); rep.saveStepAttribute( id_transformation, id_step, NO_EXECUTION, noExecution ); List<TargetStepAttribute> keySet = new ArrayList<TargetStepAttribute>( targetSourceMapping.keySet() ); for ( int i = 0; i < keySet.size(); i++ ) { TargetStepAttribute target = keySet.get( i ); SourceStepField source = targetSourceMapping.get( target ); rep.saveStepAttribute( id_transformation, id_step, i, MAPPING_TARGET_STEP_NAME, target.getStepname() ); rep.saveStepAttribute( id_transformation, id_step, i, MAPPING_TARGET_ATTRIBUTE_KEY, target.getAttributeKey() ); rep.saveStepAttribute( id_transformation, id_step, i, MAPPING_TARGET_DETAIL, target.isDetail() ); rep.saveStepAttribute( id_transformation, id_step, i, MAPPING_SOURCE_STEP, source.getStepname() ); rep.saveStepAttribute( id_transformation, id_step, i, MAPPING_SOURCE_FIELD, source.getField() ); } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } @Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { rowMeta.clear(); // No defined output is expected from this step. if ( !Utils.isEmpty( sourceStepName ) ) { for ( MetaInjectOutputField field : sourceOutputFields ) { try { rowMeta.addValueMeta( field.createValueMeta() ); } catch ( KettlePluginException e ) { throw new KettleStepException( "Error creating value meta for output field '" + field.getName() + "'", e ); } } } } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ) { return new MetaInject( stepMeta, stepDataInterface, cnr, tr, trans ); } @Override public StepDataInterface getStepData() { return new MetaInjectData(); } public Map<TargetStepAttribute, SourceStepField> getTargetSourceMapping() { return targetSourceMapping; } public void setTargetSourceMapping( Map<TargetStepAttribute, SourceStepField> targetSourceMapping ) { this.targetSourceMapping = targetSourceMapping; } /** * @return the transName */ public String getTransName() { return transName; } /** * @param transName the transName to set */ public void setTransName( String transName ) { this.transName = transName; } /** * @return the fileName */ public String getFileName() { return fileName; } /** * @param fileName the fileName to set */ public void setFileName( String fileName ) { this.fileName = fileName; } /** * @return the directoryPath */ public String getDirectoryPath() { return directoryPath; } /** * @param directoryPath the directoryPath to set */ public void setDirectoryPath( String directoryPath ) { this.directoryPath = directoryPath; } @Override public String[] getDirectories() { return new String[]{ directoryPath }; } @Override public void setDirectories( String[] directories ) { this.directoryPath = directories[0]; } /** * @return the transObjectId */ public ObjectId getTransObjectId() { return transObjectId; } /** * @param transObjectId the transObjectId to set */ public void setTransObjectId( ObjectId transObjectId ) { this.transObjectId = transObjectId; } @Injection( name = "TRANS_OBJECT_ID" ) public void setTransStringObjectId( String transStringObjectId ) { this.transObjectId = new StringObjectId( transStringObjectId ); } /** * @return the specificationMethod */ public ObjectLocationSpecificationMethod getSpecificationMethod() { return specificationMethod; } @Override public ObjectLocationSpecificationMethod[] getSpecificationMethods() { return new ObjectLocationSpecificationMethod[] { specificationMethod }; } /** * @param specificationMethod the specificationMethod to set */ public void setSpecificationMethod( ObjectLocationSpecificationMethod specificationMethod ) { this.specificationMethod = specificationMethod; } @Override public TransMeta fetchTransMeta( StepMetaInterface stepMeta, Repository rep, IMetaStore metastore, VariableSpace space ) throws KettleException { return ( stepMeta != null && stepMeta instanceof MetaInjectMeta ) ? loadTransformationMeta( (MetaInjectMeta) stepMeta, rep, metastore, space ) : null; } @Deprecated public static final synchronized TransMeta loadTransformationMeta( MetaInjectMeta mappingMeta, Repository rep, VariableSpace space ) throws KettleException { return loadTransformationMeta( mappingMeta, rep, null, space ); } public static final synchronized TransMeta loadTransformationMeta( MetaInjectMeta injectMeta, Repository rep, IMetaStore metaStore, VariableSpace space ) throws KettleException { TransMeta mappingTransMeta = null; CurrentDirectoryResolver resolver = new CurrentDirectoryResolver(); VariableSpace tmpSpace = resolver.resolveCurrentDirectory( injectMeta.getSpecificationMethod(), space, rep, injectMeta .getParentStepMeta(), injectMeta.getFileName() ); switch ( injectMeta.getSpecificationMethod() ) { case FILENAME: String realFilename = tmpSpace.environmentSubstitute( injectMeta.getFileName() ); try { // OK, load the meta-data from file... // // Don't set internal variables: they belong to the parent thread! // if ( rep != null ) { // need to try to load from the repository realFilename = resolver.normalizeSlashes( realFilename ); try { String dirStr = realFilename.substring( 0, realFilename.lastIndexOf( "/" ) ); String tmpFilename = realFilename.substring( realFilename.lastIndexOf( "/" ) + 1 ); RepositoryDirectoryInterface dir = rep.findDirectory( dirStr ); mappingTransMeta = rep.loadTransformation( tmpFilename, dir, null, true, null ); } catch ( KettleException ke ) { // try without extension if ( realFilename.endsWith( Const.STRING_TRANS_DEFAULT_EXT ) ) { try { String tmpFilename = realFilename.substring( realFilename.lastIndexOf( "/" ) + 1, realFilename.indexOf( "." + Const.STRING_TRANS_DEFAULT_EXT ) ); String dirStr = realFilename.substring( 0, realFilename.lastIndexOf( "/" ) ); RepositoryDirectoryInterface dir = rep.findDirectory( dirStr ); mappingTransMeta = rep.loadTransformation( tmpFilename, dir, null, true, null ); } catch ( KettleException ke2 ) { // fall back to try loading from file system (transMeta is going to be null) } } } } if ( mappingTransMeta == null ) { mappingTransMeta = new TransMeta( realFilename, metaStore, rep, false, tmpSpace, null ); mappingTransMeta.getLogChannel().logDetailed( "Loading Mapping from repository", "Mapping transformation was loaded from XML file [" + realFilename + "]" ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "MetaInjectMeta.Exception.UnableToLoadTransformationFromFile", realFilename ), e ); } break; case REPOSITORY_BY_NAME: String realTransname = tmpSpace.environmentSubstitute( injectMeta.getTransName() ); String realDirectory = tmpSpace.environmentSubstitute( injectMeta.getDirectoryPath() ); if ( rep != null ) { if ( !Utils.isEmpty( realTransname ) && !Utils.isEmpty( realDirectory ) && rep != null ) { RepositoryDirectoryInterface repdir = rep.findDirectory( realDirectory ); if ( repdir != null ) { try { // reads the last revision in the repository... // // TODO: FIXME: see if we need to pass external MetaStore references to the repository? // mappingTransMeta = rep.loadTransformation( realTransname, repdir, null, true, null ); mappingTransMeta.getLogChannel().logDetailed( "Loading Mapping from repository", "Mapping transformation [" + realTransname + "] was loaded from the repository" ); } catch ( Exception e ) { throw new KettleException( "Unable to load transformation [" + realTransname + "]", e ); } } else { throw new KettleException( BaseMessages.getString( PKG, "MetaInjectMeta.Exception.UnableToLoadTransformationFromRepository", realTransname, realDirectory ) ); } } } else { try { mappingTransMeta = new TransMeta( realDirectory + "/" + realTransname, metaStore, rep, true, tmpSpace, null ); } catch ( KettleException ke ) { try { // add .ktr extension and try again mappingTransMeta = new TransMeta( realDirectory + "/" + realTransname + "." + Const.STRING_TRANS_DEFAULT_EXT, metaStore, rep, true, tmpSpace, null ); } catch ( KettleException ke2 ) { throw new KettleException( BaseMessages.getString( PKG, "StepWithMappingMeta.Exception.UnableToLoadTrans", realTransname ) + realDirectory ); } } } break; case REPOSITORY_BY_REFERENCE: // Read the last revision by reference... mappingTransMeta = rep.loadTransformation( injectMeta.getTransObjectId(), null ); break; default: break; } // Pass some important information to the mapping transformation metadata: // mappingTransMeta.copyVariablesFrom( space ); mappingTransMeta.setRepository( rep ); mappingTransMeta.setFilename( mappingTransMeta.getFilename() ); return mappingTransMeta; } /** * package-local visibility for testing purposes */ TransMeta loadTransformationMeta( Repository rep, VariableSpace space ) throws KettleException { return MetaInjectMeta.loadTransformationMeta( this, rep, null, space ); } @Override public List<ResourceReference> getResourceDependencies( TransMeta transMeta, StepMeta stepInfo ) { List<ResourceReference> references = new ArrayList<ResourceReference>( 5 ); String realFilename = transMeta.environmentSubstitute( fileName ); String realTransname = transMeta.environmentSubstitute( transName ); String realDirectoryPath = transMeta.environmentSubstitute( directoryPath ); ResourceReference reference = new ResourceReference( stepInfo ); references.add( reference ); if ( !Utils.isEmpty( realFilename ) ) { // Add the filename to the references, including a reference to this step // meta data. // reference.getEntries().add( new ResourceEntry( realFilename, ResourceType.ACTIONFILE ) ); } else if ( !Utils.isEmpty( realTransname ) ) { // Add the trans name (including full repository path) to dependencies String realTransformation = realDirectoryPath + "/" + realTransname; reference.getEntries().add( new ResourceEntry( realTransformation, ResourceType.ACTIONFILE ) ); } return references; } @Override public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { // Try to load the transformation from repository or file. // Modify this recursively too... // // NOTE: there is no need to clone this step because the caller is // responsible for this. // // First load the executor transformation metadata... // TransMeta executorTransMeta = loadTransformationMeta( repository, space ); // Also go down into the mapping transformation and export the files // there. (mapping recursively down) // String proposedNewFilename = executorTransMeta.exportResources( executorTransMeta, definitions, resourceNamingInterface, repository, metaStore ); // To get a relative path to it, we inject // ${Internal.Entry.Current.Directory} // String newFilename = "${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}/" + proposedNewFilename; // Set the correct filename inside the XML. // executorTransMeta.setFilename( newFilename ); // exports always reside in the root directory, in case we want to turn // this into a file repository... // executorTransMeta.setRepositoryDirectory( new RepositoryDirectory() ); // change it in the entry // fileName = newFilename; setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME ); return proposedNewFilename; } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "MetaInjectMeta.Exception.UnableToLoadTrans", fileName ) ); } } @Override public boolean excludeFromCopyDistributeVerification() { return true; } @Override public boolean excludeFromRowLayoutVerification() { return true; } /** * @return the sourceStepName */ public String getSourceStepName() { return sourceStepName; } /** * @param sourceStepName the sourceStepName to set */ public void setSourceStepName( String sourceStepName ) { this.sourceStepName = sourceStepName; } /** * @return the targetFile */ public String getTargetFile() { return targetFile; } /** * @param targetFile the targetFile to set */ public void setTargetFile( String targetFile ) { this.targetFile = targetFile; } /** * @return the noExecution */ public boolean isNoExecution() { return noExecution; } /** * @param noExecution the noExecution to set */ public void setNoExecution( boolean noExecution ) { this.noExecution = noExecution; } /** * @return The objects referenced in the step, like a mapping, a transformation, a job, ... */ @Override public String[] getReferencedObjectDescriptions() { return new String[] { BaseMessages.getString( PKG, "MetaInjectMeta.ReferencedObject.Description" ), }; } private boolean isTransformationDefined() { return !Utils.isEmpty( fileName ) || transObjectId != null || ( !Utils.isEmpty( this.directoryPath ) && !Const .isEmpty( transName ) ); } @Override public boolean[] isReferencedObjectEnabled() { return new boolean[] { isTransformationDefined(), }; } @Override public String getActiveReferencedObjectDescription() { return BaseMessages.getString( PKG, "MetaInjectMeta.ReferencedObjectAfterInjection.Description" ); } @Override @Deprecated public Object loadReferencedObject( int index, Repository rep, VariableSpace space ) throws KettleException { return loadReferencedObject( index, rep, null, space ); } /** * Load the referenced object * * @param index the object index to load * @param rep the repository * @param metaStore metaStore * @param space the variable space to use * @return the referenced object once loaded * @throws KettleException */ @Override public Object loadReferencedObject( int index, Repository rep, IMetaStore metaStore, VariableSpace space ) throws KettleException { return loadTransformationMeta( this, rep, metaStore, space ); } public String getStreamSourceStepname() { return streamSourceStepname; } public void setStreamSourceStepname( String streamSourceStepname ) { this.streamSourceStepname = streamSourceStepname; } public StepMeta getStreamSourceStep() { return streamSourceStep; } public void setStreamSourceStep( StepMeta streamSourceStep ) { this.streamSourceStep = streamSourceStep; } public String getStreamTargetStepname() { return streamTargetStepname; } public void setStreamTargetStepname( String streamTargetStepname ) { this.streamTargetStepname = streamTargetStepname; } @Override public void searchInfoAndTargetSteps( List<StepMeta> steps ) { streamSourceStep = StepMeta.findStep( steps, streamSourceStepname ); } public List<MetaInjectOutputField> getSourceOutputFields() { return sourceOutputFields; } public void setSourceOutputFields( List<MetaInjectOutputField> sourceOutputFields ) { this.sourceOutputFields = sourceOutputFields; } public List<MetaInjectMapping> getMetaInjectMapping() { return metaInjectMapping; } public void setMetaInjectMapping( List<MetaInjectMapping> metaInjectMapping ) { this.metaInjectMapping = metaInjectMapping; } public void actualizeMetaInjectMapping() { if ( metaInjectMapping == null || metaInjectMapping.isEmpty() ) { return; } Map<TargetStepAttribute, SourceStepField> targetToSourceMap = convertToMap( metaInjectMapping ); setTargetSourceMapping( targetToSourceMap ); } /** * package-local visibility for testing purposes */ static Map<TargetStepAttribute, SourceStepField> convertToMap( List<MetaInjectMapping> metaInjectMapping ) { Map<TargetStepAttribute, SourceStepField> targetToSourceMap = new HashMap<TargetStepAttribute, SourceStepField>(); for ( MetaInjectMapping mappingEntry : metaInjectMapping ) { if ( !isMappingEntryFilled( mappingEntry ) ) { continue; } TargetStepAttribute targetStepAttribute = createTargetStepAttribute( mappingEntry ); SourceStepField sourceStepField = createSourceStepField( mappingEntry ); targetToSourceMap.put( targetStepAttribute, sourceStepField ); } return targetToSourceMap; } private static TargetStepAttribute createTargetStepAttribute( MetaInjectMapping mappingEntry ) { String targetFieldName = mappingEntry.getTargetField(); if ( targetFieldName.contains( GROUP_AND_NAME_DELIMITER ) ) { String[] targetFieldGroupAndName = targetFieldName.split( "\\" + GROUP_AND_NAME_DELIMITER ); return new TargetStepAttribute( mappingEntry.getTargetStep(), targetFieldGroupAndName[ 1 ], true ); } return new TargetStepAttribute( mappingEntry.getTargetStep(), mappingEntry.getTargetField(), false ); } private static boolean isMappingEntryFilled( MetaInjectMapping mappingEntry ) { if ( mappingEntry.getSourceStep() == null || mappingEntry.getSourceField() == null || mappingEntry .getTargetStep() == null || mappingEntry.getTargetField() == null ) { return false; } return true; } private static SourceStepField createSourceStepField( MetaInjectMapping mappingEntry ) { return new SourceStepField( mappingEntry.getSourceStep(), mappingEntry.getSourceField() ); } @Override public void onStepChange( TransMeta transMeta, StepMeta oldMeta, StepMeta newMeta ) { for ( int i = 0; i < transMeta.nrTransHops(); i++ ) { TransHopMeta hopMeta = transMeta.getTransHop( i ); if ( hopMeta.getFromStep().equals( oldMeta ) ) { StepMeta toStepMeta = hopMeta.getToStep(); if ( ( toStepMeta.getStepMetaInterface() instanceof MetaInjectMeta ) && ( toStepMeta.equals( this .getParentStepMeta() ) ) ) { MetaInjectMeta toMeta = (MetaInjectMeta) toStepMeta.getStepMetaInterface(); Map<TargetStepAttribute, SourceStepField> sourceMapping = toMeta.getTargetSourceMapping(); for ( Entry<TargetStepAttribute, SourceStepField> entry : sourceMapping.entrySet() ) { SourceStepField value = entry.getValue(); if ( value.getStepname() != null && value.getStepname().equals( oldMeta.getName() ) ) { value.setStepname( newMeta.getName() ); } } } } } } }
/* * Copyright 2015 - 2016 Xyanid * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package de.saxsys.styleablefx.core; import javafx.application.Application; import javafx.css.Styleable; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Control; import javafx.scene.layout.Pane; import javafx.stage.Stage; import org.junit.BeforeClass; import java.util.function.Consumer; import static javafx.application.Application.launch; import static javafx.application.Platform.runLater; /** * This class will setup JavaFX so UI components can be created without causing the ExceptionInInitializerError. * * @author Xyanid on 10.01.2016. */ public class BaseUITest { //region Sets up javFX so that test so UI elements can be created public static class AsNonApp extends Application { @Override public void start(Stage primaryStage) throws Exception {} } /** * This thread will start the javaFX init thread so it can be used by any other UI thread */ private static final Thread JAVA_FX_INIT_THREAD = new Thread("JavaFX Init Thread") { public void run() { launch(AsNonApp.class); System.out.printf("FX App thread finished\n"); } }; @BeforeClass public static void setUpClass() throws InterruptedException { if (!JAVA_FX_INIT_THREAD.isAlive()) { System.out.printf("About to launch FX App\n"); JAVA_FX_INIT_THREAD.setDaemon(true); JAVA_FX_INIT_THREAD.start(); System.out.printf("FX App thread started\n"); //FIXME not pretty but we need to wait until java fx thread is setup Thread.sleep(500); } } //endregion //region Testing of Styles can be applied to Controls or Nodes /** * Sets up a basic scene for the given {@link Control} and then adds a stylesheet called. There are a couple of rules the stylesheet has to * follow in order for this method to work. The rule are as follows. * <ul> * <li>the name of the stylesheet must be |classname of the skin| + "ClassSelector"</li> * <li>the stylesheet must be in a sub folder in the resources named like resourceRoot</li> * <li>if resource root is submit and not null, it determines the root path of the stylesheet in the resources</li> * </ul> * * @param parent the {@link Control} that is to be checked. * @param resourceRoot the * @param consumeBeforeApply the {@link Consumer} which will be called before the actual style are applied to the {@link javafx.scene.control.Skin}. In * this you should check if the state of the {@link javafx.scene.control.Skin} is as expected. * @param consumerAfterApply the {@link Consumer} which will be called after the actual style are applied to the {@link javafx.scene.control.Skin}. In * this you should check if the state of the {@link javafx.scene.control.Skin} has been changed as expected. * @param <TControl> the type of the {@link Control}. */ public static <TControl extends Control> void assertSkinStyleableViaClassSelector(final TControl parent, final String resourceRoot, final Consumer<TControl> consumeBeforeApply, final Consumer<TControl> consumerAfterApply) { assertIsStyleableViaClassSelector(parent, resourceRoot, parent.getSkin().getClass(), consumeBeforeApply, consumerAfterApply); } /** * Sets up a basic scene for the given {@link Control} to and then adds a stylesheet called. There are a couple of rules the stylesheet has to * follow in order for this method to work. The rule are as follows. * <ul> * <li>the name of the stylesheet must be |classname| + IdSelector</li> * <li>the stylesheet must be in a sub folder in the resources named like resourceRoot</li> * <li>the id in the stylesheet must be "id"</li> * <li>if resource root is submit and not null, it determines the root path of the stylesheet in the resources</li> * </ul> * * @param parent the {@link Parent} that is to be checked. * @param resourceRoot the * @param consumeBeforeApply the {@link Consumer} which will be called before the actual style are applied to the given {@link javafx.scene.control.Skin}. * In this you should check if the state of the {@link javafx.scene.control.Skin} is as expected. * @param consumerAfterApply the {@link Consumer} which will be called after the actual style are applied to the given {@link javafx.scene.control.Skin}. * In this you should check if the state of the {@link javafx.scene.control.Skin} has been changed as expected. * @param <TControl> the type of the {@link Control}. */ public static <TControl extends Control> void assertSkinStyleableViaIdSelector(final TControl parent, final String resourceRoot, final Consumer<TControl> consumeBeforeApply, final Consumer<TControl> consumerAfterApply) { parent.setId("#id"); assertStyleableViaIdSelector(parent, resourceRoot, parent.getSkin().getClass(), consumeBeforeApply, consumerAfterApply); } /** * Sets up a basic scene for the given {@link Parent} and then adds a stylesheet. There are a couple of rules the stylesheet has to follow in order * for this method to work. The rule are as follows. * <ul> * <li>the name of the stylesheet must be |classname| + "ClassSelector"</li> * <li>the stylesheet must be in a sub folder in the resources named like resourceRoot</li> * <li>if resource root is submit and not null, it determines the root path of the stylesheet in the resources</li> * </ul> * * @param parent the {@link Parent} that is to be checked. * @param resourceRoot the * @param consumeBeforeApply the {@link Consumer} which will be called before the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} is as expected. * @param consumerAfterApply the {@link Consumer} which will be called after the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} has been changed as expected. * @param <TParent> the type of the {@link Parent}. */ public static <TParent extends Parent> void assertParentStyleableViaClassSelector(final TParent parent, final String resourceRoot, final Consumer<TParent> consumeBeforeApply, final Consumer<TParent> consumerAfterApply) { assertIsStyleableViaClassSelector(parent, resourceRoot, parent.getClass(), consumeBeforeApply, consumerAfterApply); } /** * Sets up a basic scene for the given {@link Parent} to and then adds a stylesheet called. There are a couple of rules the stylesheet has to * follow in order for this method to work. The rule are as follows. * <ul> * <li>the name of the stylesheet must be |classname| + IdSelector</li> * <li>the stylesheet must be in a sub folder in the resources named like resourceRoot</li> * <li>the id in the stylesheet must be "id"</li> * <li>if resource root is submit and not null, it determines the root path of the stylesheet in the resources</li> * </ul> * * @param parent the {@link Parent} that is to be checked. * @param resourceRoot the * @param consumeBeforeApply the {@link Consumer} which will be called before the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} is as expected. * @param consumerAfterApply the {@link Consumer} which will be called after the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} has been changed as expected. * @param <TParent> the type of the {@link Parent}. */ public static <TParent extends Parent> void assertParentStyleableViaIdSelector(final TParent parent, final String resourceRoot, final Consumer<TParent> consumeBeforeApply, final Consumer<TParent> consumerAfterApply) { parent.setId("id"); assertStyleableViaIdSelector(parent, resourceRoot, parent.getClass(), consumeBeforeApply, consumerAfterApply); } //endregion //region Private /** * Sets up a basic scene for the given {@link Parent} and then adds a stylesheet. There are a couple of rules the stylesheet has to follow in order * for this method to work. The rule are as follows. * <ul> * <li>the name of the stylesheet must be {@link Class#getSimpleName()} + "ClassSelector", where the class is styleSheetNameClass</li> * <li>the stylesheet must be in a sub folder in the resources named like resourceRoot</li> * <li>if resource root is submit and not null, it determines the root path of the stylesheet in the resources</li> * </ul> * * @param parent the {@link Parent} that is to be checked. * @param resourceRoot the * @param consumeBeforeApply the {@link Consumer} which will be called before the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} is as expected. * @param consumerAfterApply the {@link Consumer} which will be called after the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} has been changed as expected. * @param <TParent> the type of the {@link Parent}. */ private static <TParent extends Parent> void assertIsStyleableViaClassSelector(final TParent parent, final String resourceRoot, final Class<?> styleSheetNameClass, final Consumer<TParent> consumeBeforeApply, final Consumer<TParent> consumerAfterApply) { assertStyleable(parent, String.format("%s/%sClassSelector.css", resourceRoot != null ? resourceRoot : "", styleSheetNameClass.getSimpleName()), consumeBeforeApply, consumerAfterApply); } /** * Sets up a basic scene for the given {@link Parent} to and then adds a stylesheet called. There are a couple of rules the stylesheet has to * follow in order for this method to work. The rule are as follows. * <ul> * <li>the name of the stylesheet must be {@link Class#getSimpleName()} + "IdSelector", where the class is styleSheetNameClass</li> * <li>the stylesheet must be in a sub folder in the resources named like resourceRoot</li> * <li>the id in the stylesheet must be "id"</li> * <li>if resource root is submit and not null, it determines the root path of the stylesheet in the resources</li> * </ul> * * @param parent the {@link Parent} that is to be checked. * @param resourceRoot the * @param consumeBeforeApply the {@link Consumer} which will be called before the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} is as expected. * @param consumerAfterApply the {@link Consumer} which will be called after the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} has been changed as expected. * @param <TParent> the type of the {@link Parent}. */ private static <TParent extends Parent> void assertStyleableViaIdSelector(final TParent parent, final String resourceRoot, final Class<?> styleSheetNameClass, final Consumer<TParent> consumeBeforeApply, final Consumer<TParent> consumerAfterApply) { parent.setId("#id"); assertStyleable(parent, String.format("%s/%sIdSelector.css", resourceRoot != null ? resourceRoot : "", styleSheetNameClass.getSimpleName()), consumeBeforeApply, consumerAfterApply); } /** * Basic setup a scene for the given {@link Styleable} to and then adds a stylesheet. There are a couple of rules the stylesheet has to follow in order * for this method to work. The rule are as follows. * <ul> * <li>the name of the stylesheet must be "classname" + resourceExtension</li> * <li>the stylesheet must be in a sub folder in the resources named like resourceRoot</li> * <li>if resourceRoot is submit and not null, it determines the root path of the stylesheet in the resources</li> * <li>if resourceExtension</li> * </ul> * * @param parent the {@link Parent} that is to be checked. * @param styleSheetLocation the location of the stylesheet to be used. * @param consumeBeforeApply the {@link Consumer} which will be called before the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} is as expected. * @param consumerAfterApply the {@link Consumer} which will be called after the actual style are applied to the given {@link Parent}. In this you * should check if the state of the {@link Parent} has been changed as expected. * @param <TParent> the type of the {@link Parent}. */ private static <TParent extends Parent> void assertStyleable(final TParent parent, final String styleSheetLocation, final Consumer<TParent> consumeBeforeApply, final Consumer<TParent> consumerAfterApply) { runOnJavaFXThread(() -> { parent.getStylesheets().add(styleSheetLocation); consumeBeforeApply.accept(parent); // create root and scene and add button Pane root = new Pane(); root.getChildren().add(parent); Stage stage = new Stage(); stage.setScene(new Scene(root)); stage.show(); consumerAfterApply.accept(parent); }); } /** * Runs the given {@link Runnable} in a javaFX thread. * * @param runnable the {@link Runnable} to be run on a java fx thread */ public static void runOnJavaFXThread(Runnable runnable) { runLater(runnable); } //endregion }
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.internal.scopes; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.flex.compiler.common.Multiname; import org.apache.flex.compiler.common.XMLName; import org.apache.flex.compiler.definitions.IClassDefinition; import org.apache.flex.compiler.definitions.IDefinition; import org.apache.flex.compiler.definitions.metadata.IMetaTag; import org.apache.flex.compiler.definitions.references.IReference; import org.apache.flex.compiler.definitions.references.ReferenceFactory; import org.apache.flex.compiler.internal.definitions.ClassDefinition; import org.apache.flex.compiler.internal.mxml.MXMLDialect; import org.apache.flex.compiler.internal.projects.FlexProject; import org.apache.flex.compiler.internal.units.MXMLCompilationUnit; import org.apache.flex.compiler.mxml.IMXMLData; import org.apache.flex.compiler.mxml.IMXMLTagData; import org.apache.flex.compiler.mxml.IMXMLUnitData; import org.apache.flex.compiler.mxml.IXMLNameResolver; import com.google.common.collect.ImmutableSet; /** * Subclass of {@link ASFileScope} for MXML file scopes. * <p> * It keeps track of the main MXML class definition in an MXML file scope. * <p> * It supports creating additional classes defined by * <code>&lt;fx:Component&gt;</code> and <code>&lt;fx:Definition&gt;</code> tags * in an MXML file. * <p> * It keeps track of the mapping from an MXML tag such as * <code>&lt;fx:MyDefinition&gt;</code> to the the class defined by * <code>&lt;fx:Definition name="MyDefinition"&gt;</code> * <p> * It has APIs such as {@code isScriptTag()} for determining whether an MXML tag * is a particular language tag, as determined by the version of MXML being used * in the MXML file that created this file scope. */ public class MXMLFileScope extends ASFileScope implements IXMLNameResolver { /** * Constructor. * * @param compilationUnit The {@link MXMLCompilationUnit} in which the new file scope * resides. * @param filePath The path of the MXML file for which this file scope is * being constructed. * @param mxmlData The {@code IMXMLData} that built this file scope. This is * used to determine which version of MXML is being used. */ public MXMLFileScope(MXMLCompilationUnit compilationUnit, String filePath, IMXMLData mxmlData) { super(compilationUnit.getProject().getWorkspace(), filePath); this.project = compilationUnit.getProject(); this.mxmlDialect = mxmlData.getMXMLDialect(); this.sourceDependencies = new HashSet<String>(); addImplicitImportsForMXML(); } /** * The {@code FlexProject} for this file scope. */ private final FlexProject project; /** * The {@code MXMLDialect} for this file scope. */ private final MXMLDialect mxmlDialect; /** * Any source files which this file scope includes */ private final Set<String> sourceDependencies; /** * The {@code ClassDefinition} of the main class for the MXML file. */ private ClassDefinition mainClassDefinition; /** * A map from XMLNames that refer to <fx:Definition>s to the * ClassDefinitions for those <fx:Definition>s. */ private Map<XMLName, ClassDefinition> fxDefinitionsMap; /** * A map from the starting offset of an <fx:Definition> tag to the * ClassDefinition produced by that <fx:Definition> This is built during MXML * scope-building, and used later by MXML tree-building to find the * already-built definition to connect to the node. */ private Map<Integer, ClassDefinition> fxDefinitionsOffsetMap; /** * A map from XMLNames that refer to <fx:Components>s to the * ClassDefinitions for those <fx:Components>s. */ private Map<XMLName, ClassDefinition> fxComponentsMap; /** * A map from the starting offset of an <fx:Component> tag to the * ClassDefinition produced by that <fx:Component> This is built during MXML * scope-building, and used later by MXML tree-building to find the * already-built definition to connect to the node. */ private Map<Integer, ClassDefinition> fxComponentsOffsetMap; /** * Adds the appropriate implicit imports for ActionScript. */ private void addImplicitImportsForMXML() { // Add the implicit imports for MXML. for (String implicitImport : project.getImplicitImportsForMXML(mxmlDialect)) { addImport(implicitImport); } } /** * Returns the main class definition in this file scope. * * @return The main class definition in this file scope. */ public ClassDefinition getMainClassDefinition() { assert mainClassDefinition != null : "Main class definition should be set before it is retrieved"; return mainClassDefinition; } /** * Called by the MXML scope-building code to set the main class definition * in this file scope. * * @param mainClassDefinition The main class definition in this file scope. */ public void setMainClassDefinition(ClassDefinition mainClassDefinition) { assert this.mainClassDefinition == null : "Main class definition should only be set once."; assert mainClassDefinition != null; this.mainClassDefinition = mainClassDefinition; } /** * @return a Collection of source files included in this file scope */ public ImmutableSet<String> getSourceDependencies() { return ImmutableSet.copyOf(sourceDependencies); } /** * Add a source file dependency to this file scope * * @param filename Source dependency filename */ public void addSourceDependency(String filename) { sourceDependencies.add(filename); } /** * Creates a new class definition for an &lt;fx:Component&gt; tag and adds * it to this scope. * * @param mainClassQName The fully-qualified class name of the main class * for the entire MXML document (e.g., <code>"MyApp"</code>). * @param componentTagStart The starting offset of the &lt;fx:Component&gt; * tag. * @param componentClassName The class name for the component, as specified * by the <code>className</code> attribute on the &lt;fx:Component&gt; tag, * or <code>null</code> if there was no such attribute. * @param componentBaseClassQName The fully-qualified class name of the base * class for the component class. * @return The newly-added {@code ClassDefinition} for the component class. */ public ClassDefinition addFXComponent(String mainClassQName, int componentTagStart, String componentClassName, String componentBaseClassQName) { // Use the class name specified by the <code>className</code> attribute, // or generate a unique class name for the new component class, // such as "com_whatever_Whatever_component2" // for the 3rd anonymous <fx:Component> inside com.whatever.Whatever. String className = componentClassName != null ? componentClassName : generateComponentClassName(mainClassQName); String packageName = Multiname.getPackageNameForQName(className); String baseName = Multiname.getBaseNameForQName(className); String namespace = packageName.isEmpty() ? "*" : packageName + ".*"; XMLName xmlName = new XMLName(namespace, baseName); // Create a ClassDefinition for the component class, // and add it to this file scope. ClassDefinition fxComponentClassDefinition = new ClassDefinition(className, getFilePrivateNamespaceReference()); fxComponentClassDefinition.setBaseClassReference( ReferenceFactory.packageQualifiedReference(getWorkspace(), componentBaseClassQName)); fxComponentClassDefinition.setMetaTags(new IMetaTag[0]); addDefinition(fxComponentClassDefinition); // Create a class scope for the component class. TypeScope classScope = new TypeScope(this, fxComponentClassDefinition); classScope.setContainingDefinition(fxComponentClassDefinition); fxComponentClassDefinition.setContainedScope(classScope); fxComponentClassDefinition.setupThisAndSuper(); // Keep track of the tag-name-to-class-definition mapping so that we can // resolve a tag like <MyComponent>. if (fxComponentsMap == null) fxComponentsMap = new HashMap<XMLName, ClassDefinition>(); fxComponentsMap.put(xmlName, fxComponentClassDefinition); // Keep track of the starting-offset-of-component-tag-to-component-class-definition // mapping so that we can find the class defined by an <fx:Component> tag // later when we build the MXML tree. if (fxComponentsOffsetMap == null) fxComponentsOffsetMap = new HashMap<Integer, ClassDefinition>(); fxComponentsOffsetMap.put(componentTagStart, fxComponentClassDefinition); return fxComponentClassDefinition; } /** * Generates a class name for a class defined by an anonymous * <code>&lt;fx:Component&gt;</code> tag. * <p> * If the main class of the MXML document is, for example, * <code>com.whatever.Whatever</code>, then the 3rd anonymous component * class will be named <code>com_whatever_Whatever_component2</code>. * * @return The generated class name. */ private String generateComponentClassName(String mainClassQName) { int currentComponentCount = fxComponentsOffsetMap != null ? fxComponentsOffsetMap.size() : 0; return mainClassQName.replace('.', '_') + "_component" + String.valueOf(currentComponentCount); } /** * Gets the {@code ClassDefinition} for a class defined by a * <code>&lt;fx:Component&gt;</code> tag. * * @param componentTag The {@code MXMLTagData} for the * <code>&lt;fx:Component&gt;</code> tag. * @return The {@code ClassDefinition} associated with the * <code>&lt;fx:Component&gt;</code> tag. */ public ClassDefinition getClassDefinitionForComponentTag(IMXMLTagData componentTag) { return fxComponentsOffsetMap != null ? fxComponentsOffsetMap.get(componentTag.getAbsoluteStart()) : null; } /** * Creates a new class definition for an &lt;fx:Definition&gt; tag and adds * it to this scope. * * @param mainClassQName The fully-qualified class name of the main class * for the entire MXML document (e.g., <code>"MyApp"</code>). * @param definitionTag the MXMLTagData representing the * &lt;fx:Definition&gt; tag * @param definitionName The definition name as specified by the * <code>name</code> attribute on the &lt;fx:Definition&gt; tag. * @param definitionBaseClassQName The fully-qualified class name of the base * class for the definition class. * @return The newly-added {@code ClassDefinition} for the definition class. */ public ClassDefinition addFXDefinition(String mainClassQName, IMXMLTagData definitionTag, String definitionName, String definitionBaseClassQName) { // Generate a unique class name for the new <fx:Definition> class, // such as "com_whatever_Whatever_definition2" // for the 3rd <fx:Definition> inside com.whatever.Whatever. String className = generateDefinitionClassName(mainClassQName); XMLName definitionXMLName = new XMLName(definitionTag.getURI(), definitionName); // Create a ClassDefinition for the definition class, // and add it to this file scope. ClassDefinition fxDefinitionClassDefinition = new ClassDefinition(className, getFilePrivateNamespaceReference()); fxDefinitionClassDefinition.setBaseClassReference( ReferenceFactory.packageQualifiedReference(getWorkspace(), definitionBaseClassQName)); fxDefinitionClassDefinition.setMetaTags(new IMetaTag[0]); addDefinition(fxDefinitionClassDefinition); // Create a class scope for the definition class. TypeScope classScope = new TypeScope(this, fxDefinitionClassDefinition); classScope.setContainingDefinition(fxDefinitionClassDefinition); fxDefinitionClassDefinition.setContainedScope(classScope); fxDefinitionClassDefinition.setupThisAndSuper(); // Keep track of the tag-name-to-class-definition mapping so that we can // resolve a tag like <fx:MyDefinition>. if (fxDefinitionsMap == null) fxDefinitionsMap = new HashMap<XMLName, ClassDefinition>(); fxDefinitionsMap.put(definitionXMLName, fxDefinitionClassDefinition); // Keep track of the starting-offset-of-definition-tag-to-definition-class-definition // mapping so that we can find the class defined by an <fx:Definition> tag // later when we build the MXML tree. if (fxDefinitionsOffsetMap == null) fxDefinitionsOffsetMap = new HashMap<Integer, ClassDefinition>(); fxDefinitionsOffsetMap.put(definitionTag.getAbsoluteStart(), fxDefinitionClassDefinition); return fxDefinitionClassDefinition; } /** * Generates a class name for a class defined by an * <code>&lt;fx:Definition&gt;</code> tag. * <p> * If the main class of the MXML document is, for example, * <code>com.whatever.Whatever</code>, then the 3rd definition class will be * named <code>com_whatever_Whatever_definition2</code>. * * @return The generated class name. */ private String generateDefinitionClassName(String mainClassQName) { // TODO when http://bugs.adobe.com/jira/browse/CMP-403 is fixed, // make the name of the definition classes, just the name specified on // the tag and the namespace reference a private implementation namespace. int currentDefinitionCount = fxDefinitionsMap != null ? fxDefinitionsMap.size() : 0; return mainClassQName.replace('.', '_') + "_definition" + String.valueOf(currentDefinitionCount); } /** * Resolves an MXMLTagData to the fully qualified AS3 class name the tag * refers to. * <p> * TODO This method should return a name object instead of a string. * * @param tag An MXMLTagData whose name potentially refers to a AS3 class * name via manifest or &lt;fx:Definition&gt; tags. * @return Fully qualified AS3 class name the specified tag refers to. */ public String resolveTagToQualifiedName(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); return resolveXMLNameToQualifiedName(tagName, tag.getParent().getMXMLDialect()); } @Override public String resolveXMLNameToQualifiedName(XMLName tagName, MXMLDialect mxmlDialect) { ClassDefinition classDef = getClassDefinitionForDefinitionTagName(tagName); if (classDef != null) return classDef.getQualifiedName(); return project.resolveXMLNameToQualifiedName(tagName, mxmlDialect); } /** * Resolves an MXMLTagData to the IReference class the tag refers to. * * @param tag An MXMLTagData whose name potentially refers to a AS3 class * name via manifest or &lt;fx:Definition&gt; tags. * @return IReference to the specified tag refers to. */ public IReference resolveTagToReference(IMXMLTagData tag) { String qname = resolveTagToQualifiedName(tag); if (qname != null) return ReferenceFactory.packageQualifiedReference(getWorkspace(), qname); return null; } /** * Resolves an MXML tag such as <s:Button> to a class definition that the * manifest information has associated with the tag. * <p> * This method handles both manifest namespaces (such as in the above * example) and package namespaces such as <d:Sprite * xmlns:d="flash.display.*">. * * @param tag An MXML tag. * @return The definition of the ActionScript class, or <code>null</code> if * the tag has a manifest namespace and isn't found in the * MXMLManifestManager. */ public IDefinition resolveTagToDefinition(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); return resolveXMLNameToDefinition(tagName, tag.getParent().getMXMLDialect()); } /** * Resolves an {@link XMLName} such as <s:Button> to a class definition that * the manifest information has associated with the tag. * <p> * This method handles both manifest namespaces (such as in the above * example) and package namespaces such as <d:Sprite * xmlns:d="flash.display.*">. * * @param tagXMLName {@link XMLName} of a tag. * @param mxmlDialect Knowledge about dialect-specific resolution * strategies. * @return The definition of the ActionScript class, or <code>null</code> if * the tag has a manifest namespace and isn't found in the * MXMLManifestManager. */ @Override public IDefinition resolveXMLNameToDefinition(XMLName tagXMLName, MXMLDialect mxmlDialect) { // See if there is a class defined by a <Component> tag. ClassDefinition componentTagClassDef = getClassDefinitionForComponentTagName(tagXMLName); if (componentTagClassDef != null) return componentTagClassDef; // See if there is a class defined by a <Definition> tag. ClassDefinition definitionTagClassDef = getClassDefinitionForDefinitionTagName(tagXMLName); if (definitionTagClassDef != null) return definitionTagClassDef; return project.resolveXMLNameToDefinition(tagXMLName, mxmlDialect); } /** * Gets the {@link ClassDefinition} for a class defined by a * &lt;fx:Component&gt; tag. * * @param componentTagName {@link XMLName} that refers to the * &lt;fx:Component&gt;. The name of the tag is determined by the className * attribute of the &lt;fx:Component&gt; tag. */ public ClassDefinition getClassDefinitionForComponentTagName(XMLName componentTagName) { return fxComponentsMap != null ? fxComponentsMap.get(componentTagName) : null; } /** * Gets the {@code ClassDefinition} for a class defined by a * <code>&lt;fx:Definition&gt;</code> tag. * * @param definitionTag The {@code MXMLTagData} for the * <code>&lt;fx:Definition&gt;</code> tag. * @return The {@code ClassDefinition} associated with the * <code>&lt;fx:Definition&gt;</code> tag. */ public ClassDefinition getClassDefinitionForDefinitionTag(IMXMLTagData definitionTag) { return fxDefinitionsOffsetMap != null ? fxDefinitionsOffsetMap.get(definitionTag.getAbsoluteStart()) : null; } /** * Gets the {@link ClassDefinition} for a class defined by a * &lt;fx:Definition&gt; tag. * * @param definitionTagName {@link XMLName} that refers to the * &lt;fx:Definition&gt;. The name of the tag is determined by the name * attribute of the &lt;fx:Definition&gt; tag. */ public ClassDefinition getClassDefinitionForDefinitionTagName(XMLName definitionTagName) { return fxDefinitionsMap != null ? fxDefinitionsMap.get(definitionTagName) : null; } /** * Gets the class definitions for all the &lt;fx:Definition&gt; tags in this * scope. * * @return The class definitions for all the &lt;fx:Definition&gt; tags in * this scope. */ public IClassDefinition[] getLibraryDefinitions() { return fxDefinitionsMap != null ? fxDefinitionsMap.values().toArray(new IClassDefinition[0]) : new IClassDefinition[0]; } /** * Returns all the {@link XMLName}'s that refer to &lt;fx:Definition&gt;'s * in this file scope. * * @return All the {@link XMLName}'s that refer to &lt;fx:Definition&gt;'s * in this file scope. */ public XMLName[] getLibraryDefinitionTagNames() { return fxDefinitionsMap != null ? fxDefinitionsMap.keySet().toArray(new XMLName[0]) : new XMLName[0]; } public boolean isBindingTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName bindingTagName = mxmlDialect.resolveBinding(); return tagName.equals(bindingTagName); } public boolean isComponentTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName componentTagName = mxmlDialect.resolveComponent(); return tagName.equals(componentTagName); } public boolean isDeclarationsTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName declarationsTagName = mxmlDialect.resolveDeclarations(); return tagName.equals(declarationsTagName); } public boolean isDefinitionTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName definitionTagName = mxmlDialect.resolveDefinition(); return tagName.equals(definitionTagName); } public boolean isLibraryTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName libraryTagName = mxmlDialect.resolveLibrary(); return tagName.equals(libraryTagName); } public boolean isMetadataTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName metadataTagName = mxmlDialect.resolveMetadata(); return tagName.equals(metadataTagName); } public boolean isModelTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName modelTagName = mxmlDialect.resolveModel(); return tagName.equals(modelTagName); } public boolean isPrivateTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName privateTagName = mxmlDialect.resolvePrivate(); return tagName.equals(privateTagName); } public boolean isReparentTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName reparentTagName = mxmlDialect.resolveReparent(); return tagName.equals(reparentTagName); } public boolean isScriptTag(IMXMLUnitData unitData) { if (unitData instanceof IMXMLTagData) return isScriptTag((IMXMLTagData)unitData); return false; } public boolean isScriptTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName scriptTagName = mxmlDialect.resolveScript(); return tagName.equals(scriptTagName); } public boolean isStringTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName stringTagName = mxmlDialect.resolveString(); return tagName.equals(stringTagName); } public boolean isStyleTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName styleTagName = mxmlDialect.resolveStyle(); return tagName.equals(styleTagName); } public boolean isXMLTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName xmlTagName = mxmlDialect.resolveXML(); return tagName.equals(xmlTagName); } public boolean isXMLListTag(IMXMLTagData tag) { XMLName tagName = tag.getXMLName(); XMLName xmlListTagName = mxmlDialect.resolveXMLList(); return tagName.equals(xmlListTagName); } }
package com.github.mikephil.charting.data; import android.content.Context; import android.graphics.Color; import android.graphics.DashPathEffect; import android.graphics.Typeface; import com.github.mikephil.charting.components.Legend; import com.github.mikephil.charting.components.YAxis; import com.github.mikephil.charting.formatter.DefaultValueFormatter; import com.github.mikephil.charting.formatter.IValueFormatter; import com.github.mikephil.charting.interfaces.datasets.IDataSet; import com.github.mikephil.charting.utils.ColorTemplate; import com.github.mikephil.charting.utils.MPPointF; import com.github.mikephil.charting.utils.Utils; import java.lang.annotation.Documented; import java.lang.annotation.Inherited; import java.util.ArrayList; import java.util.List; /** * Created by Philipp Jahoda on 21/10/15. * This is the base dataset of all DataSets. It's purpose is to implement critical methods * provided by the IDataSet interface. */ public abstract class BaseDataSet<T extends Entry> implements IDataSet<T> { /** * List representing all colors that are used for this DataSet */ protected List<Integer> mColors = null; /** * List representing all colors that are used for drawing the actual values for this DataSet */ protected List<Integer> mValueColors = null; /** * label that describes the DataSet or the data the DataSet represents */ private String mLabel = "DataSet"; /** * this specifies which axis this DataSet should be plotted against */ protected YAxis.AxisDependency mAxisDependency = YAxis.AxisDependency.LEFT; /** * if true, value highlightning is enabled */ protected boolean mHighlightEnabled = true; /** * custom formatter that is used instead of the auto-formatter if set */ protected transient IValueFormatter mValueFormatter; /** * the typeface used for the value text */ protected Typeface mValueTypeface; private Legend.LegendForm mForm = Legend.LegendForm.DEFAULT; private float mFormSize = Float.NaN; private float mFormLineWidth = Float.NaN; private DashPathEffect mFormLineDashEffect = null; /** * if true, y-values are drawn on the chart */ protected boolean mDrawValues = true; /** * if true, y-icons are drawn on the chart */ protected boolean mDrawIcons = true; /** * the offset for drawing icons (in dp) */ protected MPPointF mIconsOffset = new MPPointF(); /** * the size of the value-text labels */ protected float mValueTextSize = 17f; /** * flag that indicates if the DataSet is visible or not */ protected boolean mVisible = true; /** * Default constructor. */ public BaseDataSet() { mColors = new ArrayList<Integer>(); mValueColors = new ArrayList<Integer>(); // default color mColors.add(Color.rgb(140, 234, 255)); mValueColors.add(Color.BLACK); } /** * Constructor with label. * * @param label */ public BaseDataSet(String label) { this(); this.mLabel = label; } /** * Use this method to tell the data set that the underlying data has changed. */ public void notifyDataSetChanged() { calcMinMax(); } /** * ###### ###### COLOR GETTING RELATED METHODS ##### ###### */ @Override public List<Integer> getColors() { return mColors; } public List<Integer> getValueColors() { return mValueColors; } @Override public int getColor() { return mColors.get(0); } @Override public int getColor(int index) { return mColors.get(index % mColors.size()); } /** * ###### ###### COLOR SETTING RELATED METHODS ##### ###### */ /** * Sets the colors that should be used fore this DataSet. Colors are reused * as soon as the number of Entries the DataSet represents is higher than * the size of the colors array. If you are using colors from the resources, * make sure that the colors are already prepared (by calling * getResources().getColor(...)) before adding them to the DataSet. * * @param colors */ public void setColors(List<Integer> colors) { this.mColors = colors; } /** * Sets the colors that should be used fore this DataSet. Colors are reused * as soon as the number of Entries the DataSet represents is higher than * the size of the colors array. If you are using colors from the resources, * make sure that the colors are already prepared (by calling * getResources().getColor(...)) before adding them to the DataSet. * * @param colors */ public void setColors(int... colors) { this.mColors = ColorTemplate.createColors(colors); } /** * Sets the colors that should be used fore this DataSet. Colors are reused * as soon as the number of Entries the DataSet represents is higher than * the size of the colors array. You can use * "new int[] { R.color.red, R.color.green, ... }" to provide colors for * this method. Internally, the colors are resolved using * getResources().getColor(...) * * @param colors */ public void setColors(int[] colors, Context c) { if(mColors == null){ mColors = new ArrayList<>(); } mColors.clear(); for (int color : colors) { mColors.add(c.getResources().getColor(color)); } } /** * Adds a new color to the colors array of the DataSet. * * @param color */ public void addColor(int color) { if (mColors == null) mColors = new ArrayList<Integer>(); mColors.add(color); } /** * Sets the one and ONLY color that should be used for this DataSet. * Internally, this recreates the colors array and adds the specified color. * * @param color */ public void setColor(int color) { resetColors(); mColors.add(color); } /** * Sets a color with a specific alpha value. * * @param color * @param alpha from 0-255 */ public void setColor(int color, int alpha) { setColor(Color.argb(alpha, Color.red(color), Color.green(color), Color.blue(color))); } /** * Sets colors with a specific alpha value. * * @param colors * @param alpha */ public void setColors(int[] colors, int alpha) { resetColors(); for (int color : colors) { addColor(Color.argb(alpha, Color.red(color), Color.green(color), Color.blue(color))); } } /** * Resets all colors of this DataSet and recreates the colors array. */ public void resetColors() { if(mColors == null) { mColors = new ArrayList<Integer>(); } mColors.clear(); } /** * ###### ###### OTHER STYLING RELATED METHODS ##### ###### */ @Override public void setLabel(String label) { mLabel = label; } @Override public String getLabel() { return mLabel; } @Override public void setHighlightEnabled(boolean enabled) { mHighlightEnabled = enabled; } @Override public boolean isHighlightEnabled() { return mHighlightEnabled; } @Override public void setValueFormatter(IValueFormatter f) { if (f == null) return; else mValueFormatter = f; } @Override public IValueFormatter getValueFormatter() { if (needsFormatter()) return Utils.getDefaultValueFormatter(); return mValueFormatter; } @Override public boolean needsFormatter() { return mValueFormatter == null; } @Override public void setValueTextColor(int color) { mValueColors.clear(); mValueColors.add(color); } @Override public void setValueTextColors(List<Integer> colors) { mValueColors = colors; } @Override public void setValueTypeface(Typeface tf) { mValueTypeface = tf; } @Override public void setValueTextSize(float size) { mValueTextSize = Utils.convertDpToPixel(size); } @Override public int getValueTextColor() { return mValueColors.get(0); } @Override public int getValueTextColor(int index) { return mValueColors.get(index % mValueColors.size()); } @Override public Typeface getValueTypeface() { return mValueTypeface; } @Override public float getValueTextSize() { return mValueTextSize; } public void setForm(Legend.LegendForm form) { mForm = form; } @Override public Legend.LegendForm getForm() { return mForm; } public void setFormSize(float formSize) { mFormSize = formSize; } @Override public float getFormSize() { return mFormSize; } public void setFormLineWidth(float formLineWidth) { mFormLineWidth = formLineWidth; } @Override public float getFormLineWidth() { return mFormLineWidth; } public void setFormLineDashEffect(DashPathEffect dashPathEffect) { mFormLineDashEffect = dashPathEffect; } @Override public DashPathEffect getFormLineDashEffect() { return mFormLineDashEffect; } @Override public void setDrawValues(boolean enabled) { this.mDrawValues = enabled; } @Override public boolean isDrawValuesEnabled() { return mDrawValues; } @Override public void setDrawIcons(boolean enabled) { mDrawIcons = enabled; } @Override public boolean isDrawIconsEnabled() { return mDrawIcons; } @Override public void setIconsOffset(MPPointF offsetDp) { mIconsOffset.x = offsetDp.x; mIconsOffset.y = offsetDp.y; } @Override public MPPointF getIconsOffset() { return mIconsOffset; } @Override public void setVisible(boolean visible) { mVisible = visible; } @Override public boolean isVisible() { return mVisible; } @Override public YAxis.AxisDependency getAxisDependency() { return mAxisDependency; } @Override public void setAxisDependency(YAxis.AxisDependency dependency) { mAxisDependency = dependency; } /** * ###### ###### DATA RELATED METHODS ###### ###### */ @Override public int getIndexInEntries(int xIndex) { for (int i = 0; i < getEntryCount(); i++) { if (xIndex == getEntryForIndex(i).getX()) return i; } return -1; } @Override public boolean removeFirst() { if (getEntryCount() > 0) { T entry = getEntryForIndex(0); return removeEntry(entry); } else return false; } @Override public boolean removeLast() { if (getEntryCount() > 0) { T e = getEntryForIndex(getEntryCount() - 1); return removeEntry(e); } else return false; } @Override public boolean removeEntryByXValue(float xValue) { T e = getEntryForXValue(xValue, Float.NaN); return removeEntry(e); } @Override public boolean removeEntry(int index) { T e = getEntryForIndex(index); return removeEntry(e); } @Override public boolean contains(T e) { for (int i = 0; i < getEntryCount(); i++) { if (getEntryForIndex(i).equals(e)) return true; } return false; } }
package mxh.kickassmenu.gestured.app; import android.app.Activity; import android.os.Bundle; import android.os.Handler; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup.LayoutParams; import mxh.kickassmenu.R; import mxh.kickassmenu.gestured.SlidingMenu; public class SlidingActivityHelper { private Activity mActivity; private SlidingMenu mSlidingMenu; private View mViewAbove; private View mViewBehind; private boolean mBroadcasting = false; private boolean mOnPostCreateCalled = false; private boolean mEnableSlide = true; /** * Instantiates a new SlidingActivityHelper. * * @param activity the associated activity */ public SlidingActivityHelper(Activity activity) { mActivity = activity; } /** * Sets mSlidingMenu as a newly inflated SlidingMenu. Should be called within the activitiy's onCreate() * * @param savedInstanceState the saved instance state (unused) */ public void onCreate(Bundle savedInstanceState) { try { mSlidingMenu = (SlidingMenu) LayoutInflater.from(mActivity).inflate(R.layout.slidingmenumain, null); } catch (Exception e) { e.printStackTrace(); } } /** * Further SlidingMenu initialization. Should be called within the activitiy's onPostCreate() * * @param savedInstanceState the saved instance state (unused) */ public void onPostCreate(Bundle savedInstanceState) { if (mViewBehind == null || mViewAbove == null) { throw new IllegalStateException("Both setBehindContentView must be called " + "in onCreate in addition to setContentView."); } mOnPostCreateCalled = true; mSlidingMenu.attachToActivity(mActivity, mEnableSlide ? SlidingMenu.SLIDING_WINDOW : SlidingMenu.SLIDING_CONTENT); final boolean open; final boolean secondary; if (savedInstanceState != null) { open = savedInstanceState.getBoolean("SlidingActivityHelper.open"); secondary = savedInstanceState.getBoolean("SlidingActivityHelper.secondary"); } else { open = false; secondary = false; } new Handler().post(new Runnable() { public void run() { if (open) { if (secondary) { mSlidingMenu.showSecondaryMenu(false); } else { mSlidingMenu.showMenu(false); } } else { mSlidingMenu.showContent(false); } } }); } /** * Controls whether the ActionBar slides along with the above view when the menu is opened, * or if it stays in place. * * @param slidingActionBarEnabled True if you want the ActionBar to slide along with the SlidingMenu, * false if you want the ActionBar to stay in place */ public void setSlidingActionBarEnabled(boolean slidingActionBarEnabled) { if (mOnPostCreateCalled) throw new IllegalStateException("enableSlidingActionBar must be called in onCreate."); mEnableSlide = slidingActionBarEnabled; } /** * Finds a view that was identified by the id attribute from the XML that was processed in onCreate(Bundle). * * @param id the resource id of the desired view * @return The view if found or null otherwise. */ public View findViewById(int id) { View v; if (mSlidingMenu != null) { v = mSlidingMenu.findViewById(id); if (v != null) return v; } return null; } /** * Called to retrieve per-instance state from an activity before being killed so that the state can be * restored in onCreate(Bundle) or onRestoreInstanceState(Bundle) (the Bundle populated by this method * will be passed to both). * * @param outState Bundle in which to place your saved state. */ public void onSaveInstanceState(Bundle outState) { outState.putBoolean("SlidingActivityHelper.open", mSlidingMenu.isMenuShowing()); outState.putBoolean("SlidingActivityHelper.secondary", mSlidingMenu.isSecondaryMenuShowing()); } /** * Register the above content view. * * @param v the above content view to register * @param params LayoutParams for that view (unused) */ public void registerAboveContentView(View v, LayoutParams params) { if (!mBroadcasting) mViewAbove = v; } /** * Set the activity content to an explicit view. This view is placed directly into the activity's view * hierarchy. It can itself be a complex view hierarchy. When calling this method, the layout parameters * of the specified view are ignored. Both the width and the height of the view are set by default to * MATCH_PARENT. To use your own layout parameters, invoke setContentView(android.view.View, * android.view.ViewGroup.LayoutParams) instead. * * @param v The desired content to display. */ public void setContentView(View v) { mBroadcasting = true; mActivity.setContentView(v); } /** * Set the behind view content to an explicit view. This view is placed directly into the behind view 's view hierarchy. * It can itself be a complex view hierarchy. * * @param view The desired content to display. * @param layoutParams Layout parameters for the view. (unused) */ public void setBehindContentView(View view, LayoutParams layoutParams) { mViewBehind = view; mSlidingMenu.setMenu(mViewBehind); } /** * Gets the SlidingMenu associated with this activity. * * @return the SlidingMenu associated with this activity. */ public SlidingMenu getSlidingMenu() { return mSlidingMenu; } /** * Toggle the SlidingMenu. If it is open, it will be closed, and vice versa. */ public void toggle() { mSlidingMenu.toggle(); } /** * Close the SlidingMenu and show the content view. */ public void showContent() { mSlidingMenu.showContent(); } /** * Open the SlidingMenu and show the menu view. */ public void showMenu() { mSlidingMenu.showMenu(); } /** * Open the SlidingMenu and show the secondary menu view. Will default to the regular menu * if there is only one. */ public void showSecondaryMenu() { mSlidingMenu.showSecondaryMenu(); } /** * On key up. * * @param keyCode the key code * @param event the event * @return true, if successful */ public boolean onKeyUp(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK && mSlidingMenu.isMenuShowing()) { showContent(); return true; } return false; } }
/** * Licensed to the Sakai Foundation (SF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.sakaiproject.nakamura.user.lite.servlet; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import junit.framework.Assert; import org.apache.jackrabbit.api.security.user.AuthorizableExistsException; import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.servlets.HtmlResponse; import org.apache.sling.servlets.post.Modification; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.sakaiproject.nakamura.api.lite.ClientPoolException; import org.sakaiproject.nakamura.api.lite.Repository; import org.sakaiproject.nakamura.api.lite.Session; import org.sakaiproject.nakamura.api.lite.SessionAdaptable; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.api.lite.authorizable.Group; import org.sakaiproject.nakamura.api.user.AuthorizableCountChanger; import org.sakaiproject.nakamura.api.user.UserConstants; import org.sakaiproject.nakamura.user.lite.resource.RepositoryHelper; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; /** * */ public class LiteAbstractSakaiGroupPostServletTest { @Mock private SlingHttpServletRequest request; @Mock private ResourceResolver resourceResolver; @Mock private AuthorizableCountChanger authorizableCountChanger; private LiteAbstractSakaiGroupPostServlet servlet; private Repository repository; private Session session; public LiteAbstractSakaiGroupPostServletTest() throws ClientPoolException, StorageClientException, AccessDeniedException, ClassNotFoundException, IOException { repository = RepositoryHelper.getRepository(new String[]{ "ieb","jeff","joe"}, new String[]{"g-course101"} ); MockitoAnnotations.initMocks(this); } @Before public void before() throws ClientPoolException, StorageClientException, AccessDeniedException { javax.jcr.Session jcrSession = Mockito.mock(javax.jcr.Session.class, Mockito.withSettings().extraInterfaces(SessionAdaptable.class)); session = repository.loginAdministrative("ieb"); Mockito.when(((SessionAdaptable)jcrSession).getSession()).thenReturn(session); Mockito.when(resourceResolver.adaptTo(javax.jcr.Session.class)).thenReturn(jcrSession); when(request.getRemoteUser()).thenReturn("ieb"); when(request.getResourceResolver()).thenReturn(resourceResolver); servlet = new LiteAbstractSakaiGroupPostServlet() { /** * */ private static final long serialVersionUID = 1L; @Override protected void handleOperation(SlingHttpServletRequest request, HtmlResponse htmlResponse, List<Modification> changes) throws StorageClientException, AccessDeniedException, AuthorizableExistsException { } }; servlet.authorizableCountChanger = authorizableCountChanger; servlet.repository = repository; } @Test public void testAddManager() throws Exception { when(request.getParameterValues(":manager")).thenReturn( new String[] { "jack", "john", "jeff" }); when(request.getParameterValues(":manager@Delete")).thenReturn(null); Map<String, Object> props = Maps.newHashMap(); props.put(Group.ID_FIELD,"g-foo"); Group group = new Group(props); Map<String, Object> toSave = Maps.newLinkedHashMap(); servlet.updateOwnership(request, group, new String[] { "joe" }, null, toSave); Set<String> values = ImmutableSet.copyOf((String[])group.getProperty(UserConstants.PROP_GROUP_MANAGERS)); assertTrue(values.contains("jeff")); assertTrue(values.contains("jack")); assertTrue(values.contains("john")); assertTrue(values.contains("joe")); assertEquals(4, values.size()); } @Test public void testDeleteManager() throws Exception { // Remove jeff, add jack when(request.getParameterValues(":manager")).thenReturn(new String[] { "jack" }); when(request.getParameterValues(":manager@Delete")).thenReturn( new String[] { "jeff" }); Map<String, Object> props = Maps.newHashMap(); props.put(Group.ID_FIELD,"g-foo"); Group group = new Group(props); Map<String, Object> toSave = Maps.newLinkedHashMap(); servlet.updateOwnership(request, group, new String[0], null,toSave); Set<String> values = ImmutableSet.copyOf((String[])group.getProperty(UserConstants.PROP_GROUP_MANAGERS)); assertTrue(values.contains("jack")); assertEquals(1, values.size()); } @Test public void testNonJoinableGroup() throws Exception { Session adminSession = repository.loginAdministrative(); adminSession.getAuthorizableManager().createGroup("g-fooNoJoin", "FooNoJoin", ImmutableMap.of(UserConstants.PROP_JOINABLE_GROUP,(Object)"no")); adminSession.logout(); Group group = (Group) session.getAuthorizableManager().findAuthorizable("g-fooNoJoin"); when(request.getParameterValues(":member")).thenReturn( new String[] { "ieb" }); Map<String, Object> toSave = Maps.newLinkedHashMap(); ArrayList<Modification> changes = new ArrayList<Modification>(); try { servlet.updateGroupMembership(request, session, group, changes, toSave); servlet.saveAll(session, toSave); Assert.fail("Should have thrown an exception"); } catch ( AccessDeniedException e) { } } @Test public void testJoinableGroup() throws Exception { Map<String, Object> props = Maps.newHashMap(); props.put(UserConstants.PROP_JOINABLE_GROUP,"yes"); Session adminSession = repository.loginAdministrative(); adminSession.getAuthorizableManager().createGroup("g-foo2", "g-foo2", props); adminSession.logout(); Group group = (Group) session.getAuthorizableManager().findAuthorizable("g-foo2"); when(request.getRemoteUser()).thenReturn("ieb"); when(request.getParameterValues(":member")).thenReturn( new String[] { "ieb" }); ArrayList<Modification> changes = new ArrayList<Modification>(); Map<String, Object> toSave = Maps.newLinkedHashMap(); servlet.updateGroupMembership(request, session, group, changes, toSave); assertTrue(changes.size() > 0); } }
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.vpls; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.net.intf.Interface; import org.onosproject.net.intf.InterfaceEvent; import org.onosproject.net.intf.InterfaceListener; import org.onosproject.net.intf.InterfaceService; import org.onosproject.net.neighbour.NeighbourMessageContext; import org.onosproject.net.neighbour.NeighbourMessageHandler; import org.onosproject.net.neighbour.NeighbourResolutionService; import org.onosproject.net.ConnectPoint; import org.onosproject.net.Host; import org.onosproject.net.config.NetworkConfigEvent; import org.onosproject.net.config.NetworkConfigListener; import org.onosproject.net.config.NetworkConfigService; import org.onosproject.net.host.HostService; import org.onosproject.vpls.api.VplsData; import org.onosproject.vpls.api.VplsStore; import org.slf4j.Logger; import java.util.Collection; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import static org.slf4j.LoggerFactory.getLogger; /** * Handles neighbour messages for on behalf of the VPLS application. Handlers * will be changed automatically by interface or network configuration events. */ @Component(immediate = true) public class VplsNeighbourHandler { private static final String UNKNOWN_CONTEXT = "Unknown context type: {}"; private static final String CAN_NOT_FIND_VPLS = "Cannot find VPLS for port {} with VLAN Id {}."; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected InterfaceService interfaceService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected NeighbourResolutionService neighbourService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected VplsStore vplsStore; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected NetworkConfigService configService; private VplsInterfaceListener interfaceListener = new VplsInterfaceListener(); protected VplsNeighbourMessageHandler neighbourHandler = new VplsNeighbourMessageHandler(); protected VplsConfigListener configListener = new VplsConfigListener(); private final Logger log = getLogger(getClass()); private ApplicationId appId; @Activate protected void activate() { appId = coreService.registerApplication(VplsManager.VPLS_APP); interfaceService.addListener(interfaceListener); configService.addListener(configListener); configNeighbourHandler(); } @Deactivate protected void deactivate() { interfaceService.removeListener(interfaceListener); configService.removeListener(configListener); neighbourService.unregisterNeighbourHandlers(appId); } /** * Registers neighbour handler to all available interfaces. */ protected void configNeighbourHandler() { neighbourService.unregisterNeighbourHandlers(appId); interfaceService .getInterfaces() .forEach(intf -> neighbourService.registerNeighbourHandler(intf, neighbourHandler, appId)); } /** * Handler for neighbour messages. */ private class VplsNeighbourMessageHandler implements NeighbourMessageHandler { @Override public void handleMessage(NeighbourMessageContext context, HostService hostService) { switch (context.type()) { case REQUEST: handleRequest(context); break; case REPLY: handleReply(context, hostService); break; default: log.warn(UNKNOWN_CONTEXT, context.type()); break; } } } /** * Handles request messages. * * @param context the message context */ protected void handleRequest(NeighbourMessageContext context) { // Find target VPLS first, then broadcast to all interface of this VPLS VplsData vplsData = findVpls(context); if (vplsData != null) { vplsData.interfaces().stream() .filter(intf -> !context.inPort().equals(intf.connectPoint())) .forEach(context::forward); } else { log.warn(CAN_NOT_FIND_VPLS, context.inPort(), context.vlan()); context.drop(); } } /** * Handles reply messages between VLAN tagged interfaces. * * @param context the message context * @param hostService the host service */ protected void handleReply(NeighbourMessageContext context, HostService hostService) { // Find target VPLS, then reply to the host VplsData vplsData = findVpls(context); if (vplsData != null) { MacAddress dstMac = context.dstMac(); Set<Host> hosts = hostService.getHostsByMac(dstMac); hosts = hosts.stream() .filter(host -> vplsData.interfaces().contains(getHostInterface(host))) .collect(Collectors.toSet()); // reply to all host in same VPLS hosts.stream() .map(this::getHostInterface) .filter(Objects::nonNull) .forEach(context::forward); } else { // this might be happened when we remove an interface from VPLS // just ignore this message log.warn(CAN_NOT_FIND_VPLS, context.inPort(), context.vlan()); context.drop(); } } /** * Finds the VPLS with given neighbour message context. * * @param context the neighbour message context * @return the VPLS for specific neighbour message context */ private VplsData findVpls(NeighbourMessageContext context) { Collection<VplsData> vplses = vplsStore.getAllVpls(); for (VplsData vplsData : vplses) { Set<Interface> interfaces = vplsData.interfaces(); ConnectPoint port = context.inPort(); VlanId vlanId = context.vlan(); boolean match = interfaces.stream() .anyMatch(iface -> iface.connectPoint().equals(port) && iface.vlan().equals(vlanId)); if (match) { return vplsData; } } return null; } /** * Finds the network interface related to the host. * * @param host the host * @return the interface related to the host */ private Interface getHostInterface(Host host) { Set<Interface> interfaces = interfaceService.getInterfaces(); return interfaces.stream() .filter(iface -> iface.connectPoint().equals(host.location()) && iface.vlan().equals(host.vlan())) .findFirst() .orElse(null); } /** * Listener for interface configuration events. */ private class VplsInterfaceListener implements InterfaceListener { @Override public void event(InterfaceEvent event) { configNeighbourHandler(); } } /** * Listener for network configuration events. */ private class VplsConfigListener implements NetworkConfigListener { @Override public void event(NetworkConfigEvent event) { configNeighbourHandler(); } } }
package org.edx.mobile.user; import android.content.Context; import android.net.Uri; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.webkit.MimeTypeMap; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.inject.Inject; import com.google.inject.Singleton; import org.edx.mobile.event.AccountDataLoadedEvent; import org.edx.mobile.event.ProfilePhotoUpdatedEvent; import org.edx.mobile.http.CallTrigger; import org.edx.mobile.http.ErrorHandlingCallback; import org.edx.mobile.http.HttpResponseStatusException; import org.edx.mobile.http.cache.CacheManager; import org.edx.mobile.logger.Logger; import org.edx.mobile.model.api.EnrolledCoursesResponse; import org.edx.mobile.module.prefs.LoginPrefs; import org.edx.mobile.util.Config; import org.edx.mobile.view.common.TaskMessageCallback; import org.edx.mobile.view.common.TaskProgressCallback; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import de.greenrobot.event.EventBus; import okhttp3.MediaType; import okhttp3.RequestBody; import okhttp3.ResponseBody; import retrofit2.Call; import retrofit2.Response; @Singleton public class UserAPI { private Logger logger = new Logger(UserAPI.class.getName()); @Inject private UserService userService; @Inject private Config config; @Inject private CacheManager cache; @Inject private Gson gson; public static class AccountDataUpdatedCallback extends ErrorHandlingCallback<Account> { @Inject private LoginPrefs loginPrefs; @NonNull private final String username; public AccountDataUpdatedCallback(@NonNull final Context context, @NonNull final String username, @NonNull final CallTrigger type) { super(context, type); this.username = username; } public AccountDataUpdatedCallback(@NonNull final Context context, @NonNull final String username, @NonNull final CallTrigger type, @Nullable final TaskProgressCallback progressCallback) { super(context, type, progressCallback); this.username = username; } public AccountDataUpdatedCallback(@NonNull final Context context, @NonNull final String username, @NonNull final CallTrigger type, @Nullable final TaskMessageCallback messageCallback) { super(context, type, messageCallback); this.username = username; } public AccountDataUpdatedCallback(@NonNull final Context context, @NonNull final String username, @NonNull final CallTrigger type, @Nullable final TaskProgressCallback progressCallback, @Nullable final TaskMessageCallback messageCallback) { super(context, type, progressCallback, messageCallback); this.username = username; } @Override protected void onResponse(@NonNull final Account account) { EventBus.getDefault().post(new AccountDataLoadedEvent(account)); // Store the logged in user's ProfileImage loginPrefs.setProfileImage(username, account.getProfileImage()); } } public Call<ResponseBody> setProfileImage(@NonNull String username, @NonNull final File file) { final String mimeType = "image/jpeg"; return userService.setProfileImage( username, "attachment;filename=filename." + MimeTypeMap.getSingleton().getExtensionFromMimeType(mimeType), RequestBody.create(MediaType.parse(mimeType), file)); } public static class ProfileImageUpdatedCallback extends ErrorHandlingCallback<ResponseBody> { @Inject private LoginPrefs loginPrefs; @NonNull private final String username; @Nullable private final Uri profileImageUri; public ProfileImageUpdatedCallback(@NonNull final Context context, @NonNull final String username, @Nullable final File profileImageFile, @NonNull final CallTrigger type) { super(context, type); this.username = username; profileImageUri = profileImageFile == null ? null : Uri.fromFile(profileImageFile); } public ProfileImageUpdatedCallback(@NonNull final Context context, @NonNull final String username, @Nullable final File profileImageFile, @NonNull final CallTrigger type, @Nullable final TaskProgressCallback progressCallback) { super(context, type, progressCallback); this.username = username; profileImageUri = profileImageFile == null ? null : Uri.fromFile(profileImageFile); } public ProfileImageUpdatedCallback(@NonNull final Context context, @NonNull final String username, @Nullable final File profileImageFile, @NonNull final CallTrigger type, @Nullable final TaskMessageCallback messageCallback) { super(context, type, messageCallback); this.username = username; profileImageUri = profileImageFile == null ? null : Uri.fromFile(profileImageFile); } public ProfileImageUpdatedCallback(@NonNull final Context context, @NonNull final String username, @Nullable final File profileImageFile, @NonNull final CallTrigger type, @Nullable final TaskProgressCallback progressCallback, @Nullable final TaskMessageCallback messageCallback) { super(context, type, progressCallback, messageCallback); this.username = username; profileImageUri = profileImageFile == null ? null : Uri.fromFile(profileImageFile); } @Override protected void onResponse(@NonNull final ResponseBody response) { EventBus.getDefault().post(new ProfilePhotoUpdatedEvent(username, profileImageUri)); if (profileImageUri == null) { // Delete the logged in user's ProfileImage loginPrefs.setProfileImage(username, null); } } } public @NonNull String getUserEnrolledCoursesURL(@NonNull String username) { return config.getApiHostURL() + "/api/mobile/v0.5/users/" + username + "/course_enrollments"; } public @NonNull List<EnrolledCoursesResponse> getUserEnrolledCourses(@NonNull String username, boolean tryCache) throws Exception { String json = null; final String cacheKey = getUserEnrolledCoursesURL(username); // try to get from cache if we should if (tryCache) { try { json = cache.get(cacheKey); } catch (IOException e) { logger.debug(e.toString()); } } // if we don't have a json yet, get it from userService if (json == null) { Response<ResponseBody> response = userService.getUserEnrolledCourses(username).execute(); if (response.isSuccessful()) { json = userService.getUserEnrolledCourses(username).execute().body().string(); // cache result try { cache.put(cacheKey, json); } catch (IOException e) { logger.debug(e.toString()); } } else { // Cache has already been checked, and connectivity // can't be established, so throw an exception. if (tryCache) throw new HttpResponseStatusException(response.code()); // Otherwise fall back to fetching from the cache try { json = cache.get(cacheKey); } catch (IOException e) { logger.debug(e.toString()); throw new HttpResponseStatusException(response.code()); } // If the cache is empty, then throw an exception. if (json == null) throw new HttpResponseStatusException(response.code()); } } // We aren't use TypeToken here because it throws NoClassDefFoundError final JsonArray ary = gson.fromJson(json, JsonArray.class); final List<EnrolledCoursesResponse> ret = new ArrayList<>(ary.size()); for (int cnt = 0; cnt < ary.size(); ++cnt) { ret.add(gson.fromJson(ary.get(cnt), EnrolledCoursesResponse.class)); } return ret; } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.aggregates; import java.io.Serializable; import java.text.ParseException; import java.util.Arrays; import java.util.Date; import org.deidentifier.arx.DataType; import org.deidentifier.arx.DataType.ARXDate; import org.deidentifier.arx.DataType.ARXDecimal; import org.deidentifier.arx.DataType.ARXInteger; /** * This abstract class represents an aggregate function. * * @author Fabian Prasser * @param <T> */ public abstract class AggregateFunction<T> implements Serializable{ /** * A builder for aggregate functions. * * @author Fabian Prasser * @param <T> */ public static class AggregateFunctionBuilder<T> { /** SVUID */ private DataType<T> type; /** * Creates a new instance. * * @param type */ private AggregateFunctionBuilder(DataType<T> type){ this.type = type; } /** * An aggregate function that returns a the arithmetic mean, if it can be computed, NULL otherwise. * * @return */ public final AggregateFunction<T> createArithmeticMeanFunction() { return new GenericArithmeticMean<T>(type); } /** * An aggregate function that returns a the arithmetic mean of min & max, if it can be computed, NULL otherwise. * * @return */ public final AggregateFunction<T> createArithmeticMeanOfBoundsFunction() { return new GenericArithmeticMeanOfBounds<T>(type); } /** * An aggregate function that returns an interval consisting of the * first and the last element following the predefined order. * * @return */ public final AggregateFunction<T> createBoundsFunction() { return new GenericBounds<T>(type); } /** * An aggregate function that returns a constant value. * * @param value * @return */ public final AggregateFunction<T> createConstantFunction(String value) { return new GenericConstant<T>(type, value); } /** * An aggregate function that returns a the geometric mean, if it can be computed, NULL otherwise. * * @return */ public final AggregateFunction<T> createGeometricMeanFunction() { return new GenericGeometricMean<T>(type); } /** * An aggregate function that returns a the geometric mean of min & max, if it can be computed, NULL otherwise. * * @return */ public final AggregateFunction<T> createGeometricMeanOfBoundsFunction() { return new GenericGeometricMeanOfBounds<T>(type); } /** * An aggregate function that returns an interval [min, max]. * * @return */ public final AggregateFunction<T> createIntervalFunction() { return new GenericInterval<T>(type, true, true); } /** * An aggregate function that returns an interval [min, max]. * * @param lowerIncluded * @param upperIncluded * @return */ public final AggregateFunction<T> createIntervalFunction(boolean lowerIncluded, boolean upperIncluded) { return new GenericInterval<T>(type, lowerIncluded, upperIncluded); } /** * An aggregate function that returns a common prefix. * * @return */ public final AggregateFunction<T> createPrefixFunction() { return new GenericCommonPrefix<T>(type, null); } /** * An aggregate function that returns a common prefix. The remaining characters will be redacted with * the given character * * @param redaction * @return */ public final AggregateFunction<T> createPrefixFunction(Character redaction) { return new GenericCommonPrefix<T>(type, redaction); } /** * * An aggregate function that returns a set of all data values . * * @return */ public final AggregateFunction<T> createSetFunction() { return new GenericSet<T>(type); } /** * * An aggregate function that returns a set of the prefixes of the data values. Length is 1 * * @return */ public final AggregateFunction<T> createSetOfPrefixesFunction() { return new GenericSetOfPrefixes<T>(type, 1); } /** * * An aggregate function that returns a set of the prefixes of the data values. * * @param length * @return */ public final AggregateFunction<T> createSetOfPrefixesFunction(int length) { return new GenericSetOfPrefixes<T>(type, length); } } /** * An aggregate function that has a parameter. * * @author Fabian Prasser * @param <T> */ public static abstract class AggregateFunctionWithParameter<T> extends AggregateFunction<T>{ /** SVUID */ private static final long serialVersionUID = 1L; /** * Creates a new instance. * * @param type */ protected AggregateFunctionWithParameter(DataType<T> type) { super(type); } /** * Returns whether the function accepts this parameter. * * @param parameter * @return */ public abstract boolean acceptsParameter(String parameter); /** * Returns the parameter. * * @return */ public abstract String getParameter(); /** * Creates a new instance with the given parameter. * * @param parameter * @return */ public abstract AggregateFunctionWithParameter<T> newInstance(String parameter); } /** * An aggregate function that returns the arithmetic mean, if it may be computed, "NULL" * otherwise. * * @author Fabian Prasser * @param <T> */ public static class GenericArithmeticMean<T> extends AggregateFunction<T> { /** SVUID*/ private static final long serialVersionUID = -901667129625212217L; /** * Creates a new instance. * * @param type */ private GenericArithmeticMean(DataType<T> type) { super(type); } @Override public String aggregate(String[] values) { // Count the number of non-null values double count = 0; for (String value : values) { count += value != null && !DataType.isNull(value) ? 1 : 0; } // Data-type specific implementation if (super.type.getDescription().getWrappedClass() == Date.class) { double result = 0d; for (String value : values) { Date date = ((ARXDate)type).parse(value); result += date != null ? (double)date.getTime() / count : 0d; } return ((ARXDate)type).format(new Date((long)result)); // Data-type specific implementation } else if (super.type.getDescription().getWrappedClass() == Long.class) { double result = 0d; for (String value : values) { Long longValue = ((ARXInteger)type).parse(value); result += longValue != null ? (double)longValue / count : 0d; } return ((ARXInteger)type).format((long)result); // Data-type specific implementation } else if (super.type.getDescription().getWrappedClass() == Double.class) { double result = 0d; for (String value : values) { Double doubleValue = ((ARXDecimal)type).parse(value); result += doubleValue != null ? doubleValue / count : 0d; } return ((ARXDecimal)type).format(result); // Data-type specific implementation } else { return DataType.NULL_VALUE; } } @Override public String toLabel() { return "Arithmetic mean"; } @Override public String toString(){ return "ArithmeticMean"; } } /** * An aggregate function that returns the arithmetic mean of min & max, if it may be computed, "NULL" * otherwise. * * @author Fabian Prasser * @param <T> */ public static class GenericArithmeticMeanOfBounds<T> extends AggregateFunction<T> { /** SVUID*/ private static final long serialVersionUID = 5067728720270473715L; /** * Creates a new instance. * * @param type */ private GenericArithmeticMeanOfBounds(DataType<T> type) { super(type); } @Override public String aggregate(String[] values) { String min = null; String max = null; for (String value : values) { try { if (value != null && !DataType.isNull(value) && (min == null || type.compare(min, value) > 0)){ min = value; } if (value != null && !DataType.isNull(value) && (max == null || type.compare(max, value) < 0)){ max = value; } } catch (Exception e) { return DataType.NULL_VALUE; } } if (min == null || max == null) { return DataType.NULL_VALUE; } else { return new GenericArithmeticMean<T>(this.type).aggregate(new String[]{min, max}); } } @Override public String toLabel() { return "Arithmetic mean of bounds"; } @Override public String toString(){ return "ArithmeticMeanOfBounds"; } } /** * An aggregate function that returns an interval consisting of the * first and the last element following the predefined order . * * @author Fabian Prasser * @param <T> */ public static class GenericBounds<T> extends AggregateFunction<T> { /** SVUID */ private static final long serialVersionUID = -8884657842545379206L; /** * Creates a new instance. * * @param type */ private GenericBounds(DataType<T> type) { super(type); } @Override public String aggregate(String[] values) { return new StringBuilder().append("[") .append(values[0]) .append(", ") .append(values[values.length - 1]) .append("]") .toString(); } @Override public String toLabel() { return "Bounding values"; } @Override public String toString(){ return "Bounds"; } } /** * An aggregate function that returns a common prefix. * * @author Fabian Prasser * @param <T> */ public static class GenericCommonPrefix<T> extends AggregateFunctionWithParameter<T> { /** SVUID */ private static final long serialVersionUID = 526809670467390820L; /** SVUID */ private Character redaction; /** * Creates a new instance. * * @param type * @param redaction */ private GenericCommonPrefix(DataType<T> type, final Character redaction) { super(type); this.redaction = redaction; } @Override public boolean acceptsParameter(String parameter) { return parameter == null || parameter.length()<=1; } @Override public String aggregate(String[] values) { // Determine length int length = Integer.MIN_VALUE; if (redaction != null) { for (String s : values) { length = Math.max(length, s.length()); } } // Determine largest common prefix int position = 0; outer: while (true) { if (values[0].length()==position) break outer; char c = values[0].charAt(position); for (int i = 1; i < values.length; i++) { if (values[i].charAt(position) != c) { break outer; } } position++; } position--; char[] result; if (redaction != null) { result = new char[length]; Arrays.fill(result, position + 1, length, redaction); } else { result = new char[position + 1]; } for (int i = 0; i <= position; i++) { result[i] = values[0].charAt(i); } return new String(result); } @Override public String getParameter() { if (redaction == null) return null; else return String.valueOf(redaction); } @Override public AggregateFunctionWithParameter<T> newInstance(String parameter) { if (parameter == null || parameter.length()==0) return new GenericCommonPrefix<T>(this.type, null); else return new GenericCommonPrefix<T>(this.type, parameter.toCharArray()[0]); } @Override public String toLabel() { return "Common prefix"; } @Override public String toString(){ if (redaction == null){ return "CommonPrefix"; } else { return "CommonPrefix[redaction="+redaction+"]"; } } } /** * An aggregate function that returns a constant value. * * @author Fabian Prasser * @param <T> */ public static class GenericConstant<T> extends AggregateFunctionWithParameter<T> { /** SVUID */ private static final long serialVersionUID = -8995068916108125096L; /** SVUID */ private String value; /** * Creates a new instance. * * @param type * @param value */ private GenericConstant(DataType<T> type, String value) { super(type); this.value = value; } @Override public boolean acceptsParameter(String parameter) { return parameter != null; } @Override public String aggregate(String[] values) { return value; } @Override public String getParameter() { return value; } @Override public AggregateFunctionWithParameter<T> newInstance(String parameter) { return new GenericConstant<T>(this.type, parameter); } @Override public String toLabel() { return "Constant value"; } @Override public String toString(){ return "Constant[value="+value+"]"; } } /** * An aggregate function that returns the geometric mean, if it may be computed, "NULL" * otherwise. * * @author Fabian Prasser * @param <T> */ public static class GenericGeometricMean<T> extends AggregateFunction<T> { /** SVUID*/ private static final long serialVersionUID = -1756610766270481335L; /** * Creates a new instance. * * @param type */ private GenericGeometricMean(DataType<T> type) { super(type); } @Override public String aggregate(String[] values) { // Count the number of non-null values double count = 0; for (String value : values) { count += value != null && !DataType.isNull(value) ? 1 : 0; } // Data-type specific implementation if (super.type.getDescription().getWrappedClass() == Date.class) { double result = 0d; for (String value : values) { Date date = ((ARXDate)type).parse(value); result += date != null ? Math.log10((double)date.getTime()) / count : 0d; } return ((ARXDate)type).format(new Date((long)Math.pow(10d, result))); // Data-type specific implementation } else if (super.type.getDescription().getWrappedClass() == Long.class) { double result = 0d; for (String value : values) { Long longValue = ((ARXInteger)type).parse(value); result += longValue != null ? Math.log10((double)longValue) / count : 0d; } return ((ARXInteger)type).format((long)Math.pow(10d, result)); // Data-type specific implementation } else if (super.type.getDescription().getWrappedClass() == Double.class) { double result = 0d; for (String value : values) { Double doubleValue = ((ARXDecimal)type).parse(value); result += doubleValue != null ? Math.log10(doubleValue) / count : 0d; } return ((ARXDecimal)type).format(Math.pow(10d, result)); // Data-type specific implementation } else { return DataType.NULL_VALUE; } } @Override public String toLabel() { return "Geometric mean"; } @Override public String toString(){ return "GeometricMean"; } } /** * An aggregate function that returns the geometric mean of min & max, if it may be computed, "NULL" * otherwise. * * @author Fabian Prasser * @param <T> */ public static class GenericGeometricMeanOfBounds<T> extends AggregateFunction<T> { /** SVUID*/ private static final long serialVersionUID = 8155390779775522723L; /** * Creates a new instance. * * @param type */ private GenericGeometricMeanOfBounds(DataType<T> type) { super(type); } @Override public String aggregate(String[] values) { String min = null; String max = null; for (String value : values) { try { if (value != null && !DataType.isNull(value) && (min == null || type.compare(min, value) > 0)){ min = value; } if (value != null && !DataType.isNull(value) && (max == null || type.compare(max, value) < 0)){ max = value; } } catch (Exception e) { return DataType.NULL_VALUE; } } if (min == null || max == null) { return DataType.NULL_VALUE; } else { return new GenericGeometricMean<T>(this.type).aggregate(new String[]{min, max}); } } @Override public String toLabel() { return "Geometric mean of bounds"; } @Override public String toString(){ return "GeometricMeanOfBounds"; } } /** * An aggregate function that returns an interval [min, max] . * * @author Fabian Prasser * @param <T> */ public static class GenericInterval<T> extends AggregateFunction<T> { /** SVUID */ private static final long serialVersionUID = -5182521036467379023L; /** SVUID */ private final boolean lowerIncluded; /** SVUID */ private final boolean upperIncluded; /** * Creates a new instance. * * @param type * @param lowerIncluded * @param upperIncluded */ private GenericInterval(DataType<T> type, boolean lowerIncluded, boolean upperIncluded) { super(type); this.lowerIncluded = lowerIncluded; this.upperIncluded = upperIncluded; } @Override public String aggregate(String[] values) { String min = null; String max = null; for (String value : values) { try { if (min == null || type.compare(min, value) > 0){ min = value; } if (max == null || type.compare(max, value) < 0){ max = value; } } catch (NumberFormatException | ParseException e) { throw new RuntimeException(e); } } return new StringBuilder().append(lowerIncluded ? "[" : "]") .append(min) .append(", ") .append(max) .append(upperIncluded ? "]" : "[") .toString(); } @Override public String toLabel() { return "Interval"; } @Override public String toString(){ return "Interval"; } } /** * An aggregate function that returns a set of all data values. * * @author Fabian Prasser * @param <T> */ public static class GenericSet<T> extends AggregateFunction<T> { /** SVUID */ private static final long serialVersionUID = -4029191421720743653L; /** * Creates a new instance. * * @param type */ private GenericSet(DataType<T> type) { super(type); } @Override public String aggregate(String[] values) { StringBuilder b = new StringBuilder(); b.append("{"); for (int i = 0; i < values.length; i++) { b.append(values[i]); if (i < values.length - 1) { b.append(", "); } } b.append("}"); return b.toString(); } @Override public String toLabel() { return "Set of values"; } @Override public String toString(){ return "Set"; } }; /** * An aggregate function that returns a set of the prefixes of the data values. * * @author Fabian Prasser * @param <T> */ public static class GenericSetOfPrefixes<T> extends AggregateFunctionWithParameter<T> { /** SVUID */ private static final long serialVersionUID = -4164142474804296433L; /** SVUID */ private int length; /** * Creates a new instance. * * @param type * @param length */ private GenericSetOfPrefixes(DataType<T> type, int length) { super(type); this.length = length; } @Override public boolean acceptsParameter(String parameter) { try { return Integer.parseInt(parameter) > 0; } catch (Exception e) { return false; } } @Override public String aggregate(String[] values) { StringBuilder b = new StringBuilder(); for (int i = 0; i < values.length; i++) { int size = Math.min(length, values[i].length()); b.append(values[i].substring(0, size)); if (i < values.length - 1) { b.append("-"); } } return b.toString(); } @Override public String getParameter() { return String.valueOf(length); } @Override public AggregateFunctionWithParameter<T> newInstance(String parameter) { return new GenericSetOfPrefixes<T>(this.type, Integer.parseInt(parameter)); } @Override public String toLabel() { return "Set of prefixes"; } @Override public String toString(){ return "SetOfPrefixes[length="+length+"]"; } }; /** SVUID */ private static final long serialVersionUID = 3803318906010996154L; /** * Returns a builder for the given data type. * * @param <T> * @param type * @return */ public static <T> AggregateFunctionBuilder<T> forType(DataType<T> type){ return new AggregateFunctionBuilder<T>(type); } /** The data type. */ protected DataType<T> type; /** * Constructor. * * @param type */ protected AggregateFunction(DataType<T> type){ this.type = type; } /** * * This function returns an aggregate value. * * @param values * @return */ public abstract String aggregate (String[] values); /** * * Returns whether the function accepts a parameter. * * @return */ public boolean hasParameter() { return (this instanceof AggregateFunctionWithParameter); } /** * Returns a label. * * @return */ public abstract String toLabel(); @Override public abstract String toString (); }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.significant.heuristics; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collections; import java.util.Objects; public class ScriptHeuristic extends SignificanceHeuristic { public static final String NAME = "script_heuristic"; private final LongAccessor subsetSizeHolder; private final LongAccessor supersetSizeHolder; private final LongAccessor subsetDfHolder; private final LongAccessor supersetDfHolder; private final Script script; ExecutableScript searchScript = null; public ScriptHeuristic(Script script) { subsetSizeHolder = new LongAccessor(); supersetSizeHolder = new LongAccessor(); subsetDfHolder = new LongAccessor(); supersetDfHolder = new LongAccessor(); this.script = script; } /** * Read from a stream. */ public ScriptHeuristic(StreamInput in) throws IOException { this(new Script(in)); } @Override public void writeTo(StreamOutput out) throws IOException { script.writeTo(out); } @Override public void initialize(InternalAggregation.ReduceContext context) { initialize(context.scriptService()); } @Override public void initialize(SearchContext context) { initialize(context.scriptService()); } public void initialize(ScriptService scriptService) { searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, Collections.emptyMap()); searchScript.setNextVar("_subset_freq", subsetDfHolder); searchScript.setNextVar("_subset_size", subsetSizeHolder); searchScript.setNextVar("_superset_freq", supersetDfHolder); searchScript.setNextVar("_superset_size", supersetSizeHolder); } /** * Calculates score with a script * * @param subsetFreq The frequency of the term in the selected sample * @param subsetSize The size of the selected sample (typically number of docs) * @param supersetFreq The frequency of the term in the superset from which the sample was taken * @param supersetSize The size of the superset from which the sample was taken (typically number of docs) * @return a "significance" score */ @Override public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) { if (searchScript == null) { //In tests, wehn calling assertSearchResponse(..) the response is streamed one additional time with an arbitrary version, see assertVersionSerializable(..). // Now, for version before 1.5.0 the score is computed after streaming the response but for scripts the script does not exists yet. // assertSearchResponse() might therefore fail although there is no problem. // This should be replaced by an exception in 2.0. ESLoggerFactory.getLogger("script heuristic").warn("cannot compute score - script has not been initialized yet."); return 0; } subsetSizeHolder.value = subsetSize; supersetSizeHolder.value = supersetSize; subsetDfHolder.value = subsetFreq; supersetDfHolder.value = supersetFreq; return ((Number) searchScript.run()).doubleValue(); } @Override public String getWriteableName() { return NAME; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { builder.startObject(NAME); builder.field(ScriptField.SCRIPT.getPreferredName()); script.toXContent(builder, builderParams); builder.endObject(); return builder; } @Override public int hashCode() { return Objects.hash(script); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ScriptHeuristic other = (ScriptHeuristic) obj; return Objects.equals(script, other.script); } public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryShardException { String heuristicName = parser.currentName(); Script script = null; XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token.equals(XContentParser.Token.FIELD_NAME)) { currentFieldName = parser.currentName(); } else { if (parseFieldMatcher.match(currentFieldName, ScriptField.SCRIPT)) { script = Script.parse(parser, parseFieldMatcher); } else { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName); } } } if (script == null) { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. no script found in script_heuristic", heuristicName); } return new ScriptHeuristic(script); } public static class ScriptHeuristicBuilder implements SignificanceHeuristicBuilder { private Script script = null; public ScriptHeuristicBuilder setScript(Script script) { this.script = script; return this; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { builder.startObject(NAME); builder.field(ScriptField.SCRIPT.getPreferredName()); script.toXContent(builder, builderParams); builder.endObject(); return builder; } } public final class LongAccessor extends Number { public long value; @Override public int intValue() { return (int)value; } @Override public long longValue() { return value; } @Override public float floatValue() { return value; } @Override public double doubleValue() { return value; } @Override public String toString() { return Long.toString(value); } } }
// SPDX-License-Identifier: BSD-3-Clause package org.xbill.DNS; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.jupiter.api.Test; public class SVCBRecordTest { @Test @SuppressWarnings("deprecation") void createParams() throws UnknownHostException, TextParseException { List<Integer> mandatoryList = Arrays.asList(SVCBRecord.ALPN, SVCBRecord.IPV4HINT); SVCBRecord.ParameterMandatory mandatory = new SVCBBase.ParameterMandatory(mandatoryList); assertEquals(SVCBRecord.MANDATORY, mandatory.getKey()); assertEquals(mandatoryList, mandatory.getValues()); List<String> alpnList = Arrays.asList("h2", "h3"); SVCBRecord.ParameterAlpn alpn = new SVCBRecord.ParameterAlpn(alpnList); assertEquals(SVCBRecord.ALPN, alpn.getKey()); assertEquals(alpnList, alpn.getValues()); SVCBRecord.ParameterPort port = new SVCBBase.ParameterPort(8443); assertEquals(SVCBRecord.PORT, port.getKey()); assertEquals(8443, port.getPort()); List<Inet4Address> ipv4List = Collections.singletonList((Inet4Address) InetAddress.getByName("1.2.3.4")); SVCBRecord.ParameterIpv4Hint ipv4hint = new SVCBRecord.ParameterIpv4Hint(ipv4List); assertEquals(SVCBRecord.IPV4HINT, ipv4hint.getKey()); assertEquals(ipv4List, ipv4hint.getAddresses()); byte[] data = {'a', 'b', 'c'}; SVCBBase.ParameterEch ech = new SVCBBase.ParameterEch(data); assertEquals(SVCBRecord.ECH, ech.getKey()); assertEquals(data, ech.getData()); SVCBRecord.ParameterEchConfig echconfig = new SVCBRecord.ParameterEchConfig(data); assertEquals(SVCBRecord.ECHCONFIG, echconfig.getKey()); assertEquals(data, echconfig.getData()); List<Inet6Address> ipv6List = Collections.singletonList((Inet6Address) InetAddress.getByName("2001:db8::1")); SVCBRecord.ParameterIpv6Hint ipv6hint = new SVCBRecord.ParameterIpv6Hint(ipv6List); assertEquals(SVCBRecord.IPV6HINT, ipv6hint.getKey()); assertEquals(ipv6List, ipv6hint.getAddresses()); byte[] value = {0, 1, 2, 3}; SVCBRecord.ParameterUnknown unknown = new SVCBRecord.ParameterUnknown(33, value); assertEquals(33, unknown.getKey()); assertEquals(value, unknown.getValue()); } @Test void createRecord() throws IOException { Name label = Name.fromString("test.com."); int svcPriority = 5; Name svcDomain = Name.fromString("svc.test.com."); SVCBRecord.ParameterMandatory mandatory = new SVCBRecord.ParameterMandatory(); mandatory.fromString("alpn"); SVCBRecord.ParameterAlpn alpn = new SVCBRecord.ParameterAlpn(); alpn.fromString("h1,h2"); SVCBRecord.ParameterIpv4Hint ipv4 = new SVCBRecord.ParameterIpv4Hint(); ipv4.fromString("1.2.3.4,5.6.7.8"); List<SVCBRecord.ParameterBase> params = Arrays.asList(mandatory, ipv4, alpn); SVCBRecord record = new SVCBRecord(label, DClass.IN, 300, svcPriority, svcDomain, params); assertEquals(Type.SVCB, record.getType()); assertEquals(label, record.getName()); assertEquals(svcPriority, record.getSvcPriority()); assertEquals(svcDomain, record.getTargetName()); assertEquals( Arrays.asList(SVCBRecord.MANDATORY, SVCBRecord.ALPN, SVCBRecord.IPV4HINT).toString(), record.getSvcParamKeys().toString()); assertEquals("alpn", record.getSvcParamValue(SVCBRecord.MANDATORY).toString()); assertEquals("h1,h2", record.getSvcParamValue(SVCBRecord.ALPN).toString()); assertEquals("h1,h2", record.getSvcParamValue(SVCBRecord.ALPN).toString()); assertNull(record.getSvcParamValue(1234)); Options.unset("BINDTTL"); Options.unset("noPrintIN"); assertEquals( "test.com.\t\t300\tIN\tSVCB\t5 svc.test.com. mandatory=alpn alpn=h1,h2 ipv4hint=1.2.3.4,5.6.7.8", record.toString()); } @Test void createRecordDuplicateParam() throws IOException { Name label = Name.fromString("test.com."); Name svcDomain = Name.fromString("svc.test.com."); SVCBRecord.ParameterAlpn alpn = new SVCBRecord.ParameterAlpn(); alpn.fromString("h1,h2"); SVCBRecord.ParameterIpv4Hint ipv4 = new SVCBRecord.ParameterIpv4Hint(); ipv4.fromString("1.2.3.4,5.6.7.8"); List<SVCBRecord.ParameterBase> params = Arrays.asList(alpn, ipv4, alpn); assertThrows( IllegalArgumentException.class, () -> new SVCBRecord(label, DClass.IN, 300, 5, svcDomain, params)); } @Test void aliasMode() throws IOException { String str = "0 a.b.c."; byte[] bytes = stringToWire(str); byte[] expected = new byte[] {0, 0, 1, 'a', 1, 'b', 1, 'c', 0}; assertArrayEquals(expected, bytes); assertEquals(str, wireToString(bytes)); } @Test void serviceModeWithoutParameters() throws IOException { String str = "1 ."; byte[] bytes = stringToWire(str); byte[] expected = new byte[] {0, 1, 0}; assertArrayEquals(expected, bytes); assertEquals(str, wireToString(bytes)); } @Test void serviceModePort() throws IOException { String str = "1 . port=8443"; byte[] bytes = stringToWire(str); byte[] expected = new byte[] {0, 1, 0, 0, 3, 0, 2, 0x20, (byte) 0xFB}; assertArrayEquals(expected, bytes); assertEquals(str, wireToString(bytes)); } @Test void serviceModeAlpn() throws IOException { String str = "1 . alpn=h3"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeNoDefaultAlpn() throws IOException { String str = "1 . no-default-alpn"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeMultiKey() throws IOException { String str = "1 . alpn=h3 no-default-alpn"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeIntKey() throws IOException { String str = "1 . 1=h3"; assertEquals("1 . alpn=h3", stringToWireToString(str)); } @Test void serviceModeMultiValue() throws IOException { String str = "1 . alpn=h2,h3"; byte[] bytes = stringToWire(str); byte[] expected = new byte[] {0, 1, 0, 0, 1, 0, 6, 2, 'h', '2', 2, 'h', '3'}; assertArrayEquals(expected, bytes); assertEquals(str, wireToString(bytes)); } @Test void serviceModeQuotedValue() throws IOException { String str = "1 . alpn=\"h2,h3\""; assertEquals("1 . alpn=h2,h3", stringToWireToString(str)); } @Test void serviceModeQuotedEscapedValue() throws IOException { String str = "1 . alpn=\"h2\\,h3,h\\\\4\""; String expectedStr = "1 . alpn=h2\\,h3,h\\\\4"; byte[] bytes = stringToWire(str); byte[] expectedBytes = new byte[] {0, 1, 0, 0, 1, 0, 10, 5, 104, 50, 44, 104, 51, 3, 104, '\\', 52}; assertArrayEquals(bytes, expectedBytes); assertEquals(expectedStr, wireToString(bytes)); } @Test void serviceModeAlpnEscapedBytes() throws IOException { String str = "1 . alpn=http/1.1,\\001aa\\003\\b,h2"; String expectedStr = "1 . alpn=http/1.1,\\001aa\\003b,h2"; byte[] bytes = stringToWire(str); byte[] expectedBytes = new byte[] { 0, 1, 0, 0, 1, 0, 18, 8, 104, 116, 116, 112, 47, 49, 46, 49, 5, 1, 97, 97, 3, 98, 2, 104, 50 }; assertArrayEquals(bytes, expectedBytes); assertEquals(expectedStr, wireToString(bytes)); } @Test void serviceModeMandatoryAndOutOfOrder() throws IOException { String str = "1 . alpn=h3 no-default-alpn mandatory=alpn"; assertEquals("1 . mandatory=alpn alpn=h3 no-default-alpn", stringToWireToString(str)); } @Test void serviceModeEscapedDomain() throws IOException { String str = "1 dotty\\.lotty.example.com. no-default-alpn"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeEch() throws IOException { String str = "1 h3pool. ech=1234"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeEchMulti() throws IOException { String str = "1 h3pool. alpn=h2,h3 ech=1234"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeEchOutOfOrder() throws IOException { String str = "1 h3pool. ech=1234 alpn=h2,h3"; assertEquals("1 h3pool. alpn=h2,h3 ech=1234", stringToWireToString(str)); } @Test void serviceModeEchQuoted() throws IOException { String str = "1 h3pool. alpn=h2,h3 ech=\"1234\""; assertEquals("1 h3pool. alpn=h2,h3 ech=1234", stringToWireToString(str)); } @Test void serviceModeObsoleteEchConfigName() throws IOException { String str = "1 . echconfig=1234"; assertEquals("1 . ech=1234", stringToWireToString(str)); } @Test void serviceModeIpv4Hint() throws IOException { String str = "3 . ipv4hint=4.5.6.7"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeIpv4HintList() throws IOException { String str = "5 . ipv4hint=4.5.6.7,8.9.1.2"; byte[] bytes = stringToWire(str); byte[] expected = new byte[] {0, 5, 0, 0, 4, 0, 8, 4, 5, 6, 7, 8, 9, 1, 2}; assertArrayEquals(expected, bytes); assertEquals(str, wireToString(bytes)); } @Test void serviceModeIpv4HintQuoted() throws IOException { String str = "5 . ipv4hint=\"4.5.6.7,8.9.1.2\""; assertEquals("5 . ipv4hint=4.5.6.7,8.9.1.2", stringToWireToString(str)); } @Test void serviceModeIpv4HintMultiKey() throws IOException { String str = "7 . alpn=h2 ipv4hint=4.5.6.7"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeIpv6Hint() throws IOException { String str = "9 . ipv6hint=2001:db8::1"; assertEquals("9 . ipv6hint=2001:db8:0:0:0:0:0:1", stringToWireToString(str)); } @Test void serviceModeIpv6HintMulti() throws IOException { String str = "2 . alpn=h2 ipv6hint=2001:db8::1,2001:db8::2"; assertEquals( "2 . alpn=h2 ipv6hint=2001:db8:0:0:0:0:0:1,2001:db8:0:0:0:0:0:2", stringToWireToString(str)); } @Test void serviceModeUnknownKey() throws IOException { String str = "6 . key12345=abcdefg\\012"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeUnknownKeyBytes() throws IOException { String str = "8 . key23456=\\000\\001\\002\\003"; byte[] bytes = stringToWire(str); byte[] expected = new byte[] {0, 8, 0, 0x5B, (byte) 0xA0, 0, 4, 0, 1, 2, 3}; assertArrayEquals(expected, bytes); assertEquals(str, wireToString(bytes)); } @Test void serviceModeUnknownKeyEscapedChars() throws IOException { String str = "1 . key29=a\\b\\c"; assertEquals("1 . key29=abc", stringToWireToString(str)); } @Test void serviceModeUnknownKeyEscapedSlash() throws IOException { String str = "65535 . key29=a\\\\b\\\\c"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeUnknownHighKey() throws IOException { String str = "65535 . key65535=abcdefg"; assertEquals(str, stringToWireToString(str)); } @Test void serviceModeUnknownKeyNoValue() throws IOException { String str = "65535 . key65535"; assertEquals(str, stringToWireToString(str)); } @Test void masterFormatParsing() throws IOException { String str = "test.net. 86400 IN SOA test.net. test.net. 2020100900 3600 600 604800 300\n" + "test.net. 86400 IN NS ns1.test.net.\n" + "test.net. 300 IN HTTPS 0 www.test.net.\n" + "test.net. 300 IN SVCB 1 . alpn=h2\n" + "test.net. 300 IN HTTPS 1 .\n" + "www.test.net. 300 IN A 1.2.3.4\n"; Master m = new Master(new ByteArrayInputStream(str.getBytes())); Record r = m.nextRecord(); assertEquals(Type.SOA, r.getType()); r = m.nextRecord(); assertEquals(Type.NS, r.getType()); r = m.nextRecord(); assertEquals(Type.HTTPS, r.getType()); assertEquals("0 www.test.net.", r.rdataToString()); r = m.nextRecord(); assertEquals(Type.SVCB, r.getType()); assertEquals("1 . alpn=h2", r.rdataToString()); r = m.nextRecord(); assertEquals(Type.HTTPS, r.getType()); assertEquals("1 .", r.rdataToString()); r = m.nextRecord(); assertEquals(Type.A, r.getType()); assertEquals("1.2.3.4", r.rdataToString()); r = m.nextRecord(); assertNull(r); } @Test void invalidText() { String str = "these are all garbage strings that should fail"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void extraQuotesInParamValues() { String str = "5 . ipv4hint=\"4.5.6.7\",\"8.9.1.2\""; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void aliasModeWithParameters() { String str = "0 . alpn=h3"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void zeroLengthMandatory() { String str = "1 . mandatory"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void zeroLengthAlpnValue() { String str = "1 . alpn"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void zeroLengthPortValue() { String str = "1 . port"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void zeroLengthIpv4Hint() { String str = "1 . ipv4hint"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void zeroLengthEch() { String str = "1 . ech"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void zeroLengthIpv6Hint() { String str = "1 . ipv6hint"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void emptyKey() { String str = "1 . =1234"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void emptyValue() { String str = "1 . alpn="; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void emptyKeyAndValue() { String str = "1 . ="; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void unknownKey() { String str = "1 . sport=8443"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void mandatoryListWithSelf() { String str = "1 . mandatory=alpn,mandatory alpn=h1"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void mandatoryListWithDuplicate() { String str = "1 . mandatory=alpn,ipv4hint,alpn alpn=h1 ipv4hint=1.2.3.4"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void mandatoryListWithMissingParam() { String str = "1 . mandatory=alpn,ipv4hint alpn=h1"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void portValueTooLarge() { String str = "1 . port=84438"; assertThrows(IllegalArgumentException.class, () -> stringToWire(str)); } @Test void portValueCharAfterInt() { String str = "1 . port=443a"; assertThrows(IllegalArgumentException.class, () -> stringToWire(str)); } @Test void noDefaultAlpnWithValue() { String str = "1 . no-default-alpn=true"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void emptyString() { String str = ""; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void svcPriorityTooHigh() { String str = "65536 . port=443"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void invalidPortKey() { String str = "1 . port<5"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void invalidSvcDomain() { String str = "1 fred..harvey port=80"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void duplicateParamKey() { String str = "1 . alpn=h2 alpn=h3"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void invalidIpv4Hint() { String str = "1 . ipv4hint=2001:db8::1"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void invalidIpv6Hint() { String str = "1 . ipv6hint=1.2.3.4"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void negativeSvcPriority() { String str = "-1 . port=80"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void svcParamUnknownKeyTooHigh() { String str = "65535 . key65536=abcdefg"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void svcParamUnknownKeyCharAfterInt() { String str = "65535 . key123a=abcdefg"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void invalidSvcParamKey() { String str = "65535 . keyBlooie=abcdefg"; assertThrows(TextParseException.class, () -> stringToWire(str)); } @Test void wireFormatTooShort() { byte[] wire = new byte[] {0, 1, 0, 0, 1, 0, 10}; assertThrows(WireParseException.class, () -> wireToString(wire)); } @Test void wireFormatTooLong() { byte[] wire = new byte[] {0, 0, 0, 1}; assertThrows(WireParseException.class, () -> wireToString(wire)); } @Test void wireFormatMandatoryTooLong() { byte[] wire = new byte[] {0, 1, 0, 0, 0, 0, 3, 0, 1, 55}; assertThrows(WireParseException.class, () -> wireToString(wire)); } @Test void wireFormatAlpnTooShort() { byte[] wire = new byte[] {0, 1, 0, 0, 1, 0, 3, 10, 1, 55}; assertThrows(WireParseException.class, () -> wireToString(wire)); } @Test void wireFormatNoDefaultAlpnTooLong() { byte[] wire = new byte[] {0, 1, 0, 0, 2, 0, 1, 0}; assertThrows(WireParseException.class, () -> wireToString(wire)); } @Test void wireFormatPortTooLong() { byte[] wire = new byte[] {0, 1, 0, 0, 3, 0, 4, 0, 0, 0, 0}; assertThrows(WireParseException.class, () -> wireToString(wire)); } @Test void wireFormatIpv4HintTooLong() { byte[] wire = new byte[] {0, 1, 0, 0, 4, 0, 5, 1, 2, 3, 4, 5}; assertThrows(WireParseException.class, () -> wireToString(wire)); } @Test void wireFormatIpv6HintTooShort() { byte[] wire = new byte[] {0, 1, 0, 0, 6, 0, 2, 1, 2}; assertThrows(WireParseException.class, () -> wireToString(wire)); } public static byte[] stringToWire(String str) throws IOException { Tokenizer t = new Tokenizer(str); SVCBRecord record = new SVCBRecord(); record.rdataFromString(t, null); DNSOutput out = new DNSOutput(); record.rrToWire(out, null, true); return out.toByteArray(); } public static String wireToString(byte[] bytes) throws IOException { DNSInput in = new DNSInput(bytes); SVCBRecord record = new SVCBRecord(); record.rrFromWire(in); return record.rdataToString(); } public static String stringToWireToString(String str) throws IOException { return wireToString(stringToWire(str)); } }
package org.jenkinsci.plugins.workflow.cps; import com.cloudbees.groovy.cps.Continuable; import com.cloudbees.groovy.cps.Continuation; import com.cloudbees.groovy.cps.Next; import com.cloudbees.groovy.cps.Outcome; import com.cloudbees.groovy.cps.impl.CpsCallableInvocation; import com.cloudbees.groovy.cps.impl.FunctionCallEnv; import com.cloudbees.groovy.cps.impl.SourceLocation; import com.cloudbees.groovy.cps.impl.TryBlockEnv; import com.cloudbees.groovy.cps.sandbox.SandboxInvoker; import com.google.common.util.concurrent.FutureCallback; import hudson.model.Action; import hudson.model.Result; import jenkins.model.CauseOfInterruption; import org.jenkinsci.plugins.workflow.actions.BodyInvocationAction; import org.jenkinsci.plugins.workflow.actions.ErrorAction; import org.jenkinsci.plugins.workflow.cps.nodes.StepEndNode; import org.jenkinsci.plugins.workflow.cps.nodes.StepStartNode; import org.jenkinsci.plugins.workflow.cps.persistence.PersistIn; import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.steps.BodyExecution; import org.jenkinsci.plugins.workflow.steps.BodyExecutionCallback; import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException; import org.jenkinsci.plugins.workflow.steps.StepContext; import org.jenkinsci.plugins.workflow.steps.StepExecution; import javax.annotation.CheckForNull; import javax.annotation.concurrent.GuardedBy; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Logger; import static java.util.logging.Level.*; import static org.jenkinsci.plugins.workflow.cps.persistence.PersistenceContext.*; /** * {@link BodyExecution} impl for CPS. * * Instantiated when {@linkplain CpsBodyInvoker#start() the execution is scheduled}, * and {@link CpsThreadGroup} gets updated with the new thread in the {@link #launch(CpsBodyInvoker, CpsThread, FlowHead)} * method, and this is the point in which the actual execution gest under way. * * <p> * This object is serializable while {@link CpsBodyInvoker} isn't. * * @author Kohsuke Kawaguchi * @see CpsBodyInvoker#start() */ @PersistIn(PROGRAM) class CpsBodyExecution extends BodyExecution { /** * Thread that's executing the body. */ @GuardedBy("this") // 'thread' and 'stopped' needs to be compared & set atomically private CpsThread thread; /** * Set to non-null if the body execution is stopped. */ @GuardedBy("this") private FlowInterruptedException stopped; private final List<BodyExecutionCallback> callbacks; /** * Context for the step who invoked its body. */ private final CpsStepContext context; private String startNodeId; private final Continuation onSuccess = new SuccessAdapter(); /** * Unlike {@link #onSuccess} that can only happen after {@link #launch(CpsBodyInvoker, CpsThread, FlowHead)}, * a failure can happen right after {@link CpsBodyInvoker#start()} before we get a chance to be launched. */ /*package*/ final Continuation onFailure = new FailureAdapter(); @GuardedBy("this") private Outcome outcome; /** * @see CpsBodyInvoker#createBodyBlockNode */ private final boolean createBodyBlockNode; public CpsBodyExecution(CpsStepContext context, List<BodyExecutionCallback> callbacks, boolean createBodyBlockNode) { this.context = context; this.callbacks = callbacks; this.createBodyBlockNode = createBodyBlockNode; } /** * Starts evaluating the body. * * If the body is a synchronous closure, this method evaluates the closure synchronously. * Otherwise, the body is asynchronous and the method schedules another thread to evaluate the body. * * @param currentThread * The thread whose context the new thread will inherit. */ @CpsVmThreadOnly /*package*/ void launch(CpsBodyInvoker params, CpsThread currentThread, FlowHead head) { if (isLaunched()) throw new IllegalStateException("Already launched"); StepStartNode sn = addBodyStartFlowNode(head); for (Action a : params.startNodeActions) { if (a!=null) sn.addAction(a); } StepContext sc = subContext(sn); for (BodyExecutionCallback c : callbacks) { c.onStart(sc); } try { // TODO: handle arguments to closure Object x = params.body.getBody(currentThread).call(); // body has completed synchronously. mark this done after the fact // pointless synchronization to make findbugs happy. This is already done, so there's no cancelling this anyway. synchronized (this) { this.thread = currentThread; } onSuccess.receive(x); } catch (CpsCallableInvocation e) { // execute this closure asynchronously // TODO: does it make sense that the new thread shares the same head? CpsThread t = currentThread.group.addThread(createContinuable(currentThread, e), head, ContextVariableSet.from(currentThread.getContextVariables(), params.contextOverrides)); // let the new CpsThread run. Either get the new thread going normally with (null,null), or abort from the beginning // due to earlier cancellation synchronized (this) { t.resume(new Outcome(null, stopped)); assert this.thread==null; this.thread = t; } } catch (Throwable t) { // body has completed synchronously and abnormally onFailure.receive(t); } } /** * Creates {@link Continuable} that executes the given invocation and pass its result to {@link FutureCallback}. * * The {@link Continuable} itself will just yield null. {@link CpsThreadGroup} considers the whole * execution a failure if any of the threads fail, so this behaviour ensures that a problem in the closure * body won't terminate the workflow. */ private Continuable createContinuable(CpsThread currentThread, CpsCallableInvocation inv) { // we need FunctionCallEnv that acts as the back drop of try/catch block. // TODO: we need to capture the surrounding calling context to capture variables, and switch to ClosureCallEnv FunctionCallEnv caller = new FunctionCallEnv(null, onSuccess, null, null); if (currentThread.getExecution().isSandbox()) caller.setInvoker(new SandboxInvoker()); // catch an exception thrown from body and treat that as a failure TryBlockEnv env = new TryBlockEnv(caller, null); env.addHandler(Throwable.class, onFailure); return new Continuable( // this source location is a place holder for the step implementation. // perhaps at some point in the future we'll let the Step implementation control this. inv.invoke(env, SourceLocation.UNKNOWN, onSuccess)); } @Override public synchronized Collection<StepExecution> getCurrentExecutions() { if (thread==null) return Collections.emptyList(); StepExecution s = thread.getStep(); if (s!=null) return Collections.singleton(s); else return Collections.emptyList(); } @Override public boolean cancel(final CauseOfInterruption... causes) { // 'stopped' and 'thread' are updated atomically final CpsThread t; synchronized (this) { if (isDone()) return false; // already complete stopped = new FlowInterruptedException(Result.ABORTED, causes); // TODO: the fact that I'm hard-coding exception seems to indicate an abstraction leak. Come back and think about this. t = this.thread; } if (t!=null) { t.getExecution().runInCpsVmThread(new FutureCallback<CpsThreadGroup>() { @Override public void onSuccess(CpsThreadGroup g) { StepExecution s = t.getStep(); // this is the part that should run in CpsVmThread if (s == null) { // TODO: if it's not running inside a StepExecution, we need to set an interrupt flag // and interrupt at an earliest convenience return; } try { s.stop(stopped); } catch (Exception e) { LOGGER.log(WARNING, "Failed to stop " + s, e); } } @Override public void onFailure(Throwable t) { // couldn't cancel } }); } else { // if it hasn't begun executing, we'll stop it when // it begins. } return true; } @Override public synchronized boolean isCancelled() { return stopped!=null && isDone(); } /** * Is the execution under way? True after {@link #launch(CpsBodyInvoker, CpsThread, FlowHead)} */ public synchronized boolean isLaunched() { return thread!=null; } @Override public synchronized Object get() throws InterruptedException, ExecutionException { while (outcome==null) { wait(); } if (outcome.isSuccess()) return outcome.getNormal(); else throw new ExecutionException(outcome.getAbnormal()); } @Override public synchronized Object get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { long endTime = System.currentTimeMillis() + unit.toMillis(timeout); long remaining; while (outcome==null && (remaining=endTime-System.currentTimeMillis()) > 0) { wait(remaining); } if (outcome==null) throw new TimeoutException(); if (outcome.isSuccess()) return outcome.getNormal(); else throw new ExecutionException(outcome.getAbnormal()); } private void setOutcome(Outcome o) { synchronized (this) { if (outcome!=null) throw new IllegalStateException("Outcome is already set"); this.outcome = o; notifyAll(); // wake up everyone waiting for the outcome. } context.saveState(); } public synchronized boolean isDone() { return outcome!=null; } private class FailureAdapter implements Continuation { @Override public Next receive(Object o) { if (!isLaunched()) { // failed before we even started. fake the start node that start() would have created. addBodyStartFlowNode(CpsThread.current().head); } StepEndNode en = addBodyEndFlowNode(); Throwable t = (Throwable)o; en.addAction(new ErrorAction(t)); setOutcome(new Outcome(null,t)); StepContext sc = subContext(en); for (BodyExecutionCallback c : callbacks) { c.onFailure(sc, t); } return Next.terminate(null); } private static final long serialVersionUID = 1L; } private class SuccessAdapter implements Continuation { @Override public Next receive(Object o) { StepEndNode en = addBodyEndFlowNode(); setOutcome(new Outcome(o,null)); StepContext sc = subContext(en); for (BodyExecutionCallback c : callbacks) { c.onSuccess(sc, o); } return Next.terminate(null); } private static final long serialVersionUID = 1L; } /** * Creates a sub-context to call {@link BodyExecutionCallback}. * If {@link #createBodyBlockNode} is false, then we don't have distinctive * {@link FlowNode}, so we just hand out the master context. */ private StepContext subContext(FlowNode n) { if (n==null) return context; else return new CpsBodySubContext(context,n); } /** * Inserts the flow node that indicates the beginning of the body invocation. * * @see #addBodyEndFlowNode() */ private @CheckForNull StepStartNode addBodyStartFlowNode(FlowHead head) { if (createBodyBlockNode) { StepStartNode start = new StepStartNode(head.getExecution(), context.getStepDescriptor(), head.get()); this.startNodeId = start.getId(); start.addAction(new BodyInvocationAction()); head.setNewHead(start); return start; } else { return null; } } /** * Inserts the flow node that indicates the beginning of the body invocation. * * @see #addBodyStartFlowNode(FlowHead) */ private @CheckForNull StepEndNode addBodyEndFlowNode() { if (createBodyBlockNode) { try { FlowHead head = CpsThread.current().head; StepEndNode end = new StepEndNode(head.getExecution(), getBodyStartNode(), head.get()); end.addAction(new BodyInvocationAction()); head.setNewHead(end); return end; } catch (IOException e) { LOGGER.log(WARNING, "Failed to grow the flow graph", e); throw new Error(e); } } else { return null; } } public StepStartNode getBodyStartNode() throws IOException { if (startNodeId==null) throw new IllegalStateException("StepStartNode is not yet created"); CpsThread t; synchronized (this) {// to make findbugs happy t = thread; } return (StepStartNode) t.getExecution().getNode(startNodeId); } private static final long serialVersionUID = 1L; private static final Logger LOGGER = Logger.getLogger(CpsBodyExecution.class.getName()); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import com.google.common.primitives.Ints; import org.apache.cassandra.cache.IMeasurableMemory; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.sstable.IndexHelper; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.ObjectSizes; public class RowIndexEntry implements IMeasurableMemory { public static final Serializer serializer = new Serializer(); public final long position; public RowIndexEntry(long position) { this.position = position; } public int serializedSize() { int size = TypeSizes.NATIVE.sizeof(position) + TypeSizes.NATIVE.sizeof(promotedSize()); if (isIndexed()) { List<IndexHelper.IndexInfo> index = columnsIndex(); size += DeletionTime.serializer.serializedSize(deletionTime(), TypeSizes.NATIVE); size += TypeSizes.NATIVE.sizeof(index.size()); for (IndexHelper.IndexInfo info : index) size += info.serializedSize(TypeSizes.NATIVE); } return size; } protected int promotedSize() { return 0; } public static RowIndexEntry create(long position, DeletionTime deletionTime, ColumnIndex index) { assert index != null; assert deletionTime != null; // we only consider the columns summary when determining whether to create an IndexedEntry, // since if there are insufficient columns to be worth indexing we're going to seek to // the beginning of the row anyway, so we might as well read the tombstone there as well. if (index.columnsIndex.size() > 1) return new IndexedEntry(position, deletionTime, index.columnsIndex); else return new RowIndexEntry(position); } /** * @return true if this index entry contains the row-level tombstone and column summary. Otherwise, * caller should fetch these from the row header. */ public boolean isIndexed() { return !columnsIndex().isEmpty(); } public DeletionTime deletionTime() { throw new UnsupportedOperationException(); } public List<IndexHelper.IndexInfo> columnsIndex() { return Collections.emptyList(); } public long memorySize() { return ObjectSizes.getFieldSize(TypeSizes.NATIVE.sizeof(position)); } public static class Serializer { public void serialize(RowIndexEntry rie, DataOutput out) throws IOException { out.writeLong(rie.position); out.writeInt(rie.promotedSize()); if (rie.isIndexed()) { DeletionTime.serializer.serialize(rie.deletionTime(), out); out.writeInt(rie.columnsIndex().size()); for (IndexHelper.IndexInfo info : rie.columnsIndex()) info.serialize(out); } } public RowIndexEntry deserialize(DataInput in, Descriptor.Version version) throws IOException { long position = in.readLong(); int size = in.readInt(); if (size > 0) { DeletionTime deletionTime = DeletionTime.serializer.deserialize(in); int entries = in.readInt(); List<IndexHelper.IndexInfo> columnsIndex = new ArrayList<IndexHelper.IndexInfo>(entries); for (int i = 0; i < entries; i++) columnsIndex.add(IndexHelper.IndexInfo.deserialize(in)); return new IndexedEntry(position, deletionTime, columnsIndex); } else { return new RowIndexEntry(position); } } public void skip(DataInput in) throws IOException { in.readLong(); skipPromotedIndex(in); } public void skipPromotedIndex(DataInput in) throws IOException { int size = in.readInt(); if (size <= 0) return; FileUtils.skipBytesFully(in, size); } } /** * An entry in the row index for a row whose columns are indexed. */ private static class IndexedEntry extends RowIndexEntry { private final DeletionTime deletionTime; private final List<IndexHelper.IndexInfo> columnsIndex; private IndexedEntry(long position, DeletionTime deletionTime, List<IndexHelper.IndexInfo> columnsIndex) { super(position); assert deletionTime != null; assert columnsIndex != null && columnsIndex.size() > 1; this.deletionTime = deletionTime; this.columnsIndex = columnsIndex; } @Override public DeletionTime deletionTime() { return deletionTime; } @Override public List<IndexHelper.IndexInfo> columnsIndex() { return columnsIndex; } @Override public int promotedSize() { TypeSizes typeSizes = TypeSizes.NATIVE; long size = DeletionTime.serializer.serializedSize(deletionTime, typeSizes); size += typeSizes.sizeof(columnsIndex.size()); // number of entries for (IndexHelper.IndexInfo info : columnsIndex) size += info.serializedSize(typeSizes); return Ints.checkedCast(size); } @Override public long memorySize() { long entrySize = 0; for (IndexHelper.IndexInfo idx : columnsIndex) entrySize += idx.memorySize(); return ObjectSizes.getSuperClassFieldSize(TypeSizes.NATIVE.sizeof(position)) + ObjectSizes.getFieldSize(// deletionTime ObjectSizes.getReferenceSize() + // columnsIndex ObjectSizes.getReferenceSize()) + deletionTime.memorySize() + ObjectSizes.getArraySize(columnsIndex.size(), ObjectSizes.getReferenceSize()) + entrySize + 4; } } }
package net.mcft.copy.betterstorage.item; import java.util.List; import net.mcft.copy.betterstorage.BetterStorage; import net.mcft.copy.betterstorage.client.model.ModelBackpack; import net.mcft.copy.betterstorage.client.model.ModelBackpackArmor; import net.mcft.copy.betterstorage.config.GlobalConfig; import net.mcft.copy.betterstorage.container.ContainerBetterStorage; import net.mcft.copy.betterstorage.container.SlotArmorBackpack; import net.mcft.copy.betterstorage.content.BetterStorageTiles; import net.mcft.copy.betterstorage.inventory.InventoryBackpackEquipped; import net.mcft.copy.betterstorage.inventory.InventoryStacks; import net.mcft.copy.betterstorage.misc.Constants; import net.mcft.copy.betterstorage.misc.EquipmentSlot; import net.mcft.copy.betterstorage.misc.PropertiesBackpack; import net.mcft.copy.betterstorage.misc.Resources; import net.mcft.copy.betterstorage.misc.handlers.KeyBindingHandler; import net.mcft.copy.betterstorage.network.packet.PacketBackpackHasItems; import net.mcft.copy.betterstorage.tile.TileBackpack; import net.mcft.copy.betterstorage.tile.entity.TileEntityBackpack; import net.mcft.copy.betterstorage.utils.DirectionUtils; import net.mcft.copy.betterstorage.utils.EntityUtils; import net.mcft.copy.betterstorage.utils.LanguageUtils; import net.mcft.copy.betterstorage.utils.PlayerUtils; import net.mcft.copy.betterstorage.utils.RandomUtils; import net.mcft.copy.betterstorage.utils.StackUtils; import net.mcft.copy.betterstorage.utils.WorldUtils; import net.minecraft.block.Block; import net.minecraft.client.model.ModelBiped; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.client.settings.GameSettings; import net.minecraft.enchantment.Enchantment; import net.minecraft.enchantment.EnchantmentHelper; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.init.Items; import net.minecraft.inventory.Container; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.Slot; import net.minecraft.item.ItemStack; import net.minecraft.network.play.server.S2FPacketSetSlot; import net.minecraft.util.DamageSource; import net.minecraft.world.World; import net.minecraftforge.common.ISpecialArmor; import net.minecraftforge.common.util.EnumHelper; import net.minecraftforge.common.util.ForgeDirection; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; public class ItemBackpack extends ItemArmorBetterStorage implements ISpecialArmor, IDyeableItem { public static final ArmorMaterial material = EnumHelper.addArmorMaterial( "backpack", 14, new int[]{ 0, 2, 0, 0 }, 15); static { material.customCraftingMaterial = Items.leather; } protected ItemBackpack(ArmorMaterial material) { super(material, 0, 1); } public ItemBackpack() { this(material); } public String getBackpackName() { return Constants.containerBackpack; } /** Returns the number of columns this backpack has. */ public int getBackpackColumns() { return 9; } /** Returns the number of rows this backpack has. */ public int getBackpackRows() { return BetterStorage.globalConfig.getInteger(GlobalConfig.backpackRows); } protected int getDefaultColor() { return 0x805038; } protected IInventory getBackpackItemsInternal(EntityLivingBase carrier, EntityPlayer player) { PropertiesBackpack backpackData = getBackpackData(carrier); int size = (getBackpackColumns() * getBackpackRows()); if (backpackData.contents == null) backpackData.contents = new ItemStack[size]; // In case the backpack size got changed in // the configuration file, update it here. else if (backpackData.contents.length != size) { ItemStack[] newContents = new ItemStack[size]; System.arraycopy(backpackData.contents, 0, newContents, 0, Math.min(size, backpackData.contents.length)); backpackData.contents = newContents; } return new InventoryStacks(getBackpackName(), backpackData.contents); } public boolean containsItems(PropertiesBackpack backpackData) { return (backpackData.hasItems || ((backpackData.contents != null) && !StackUtils.isEmpty(backpackData.contents))); } // Model and texture @SideOnly(Side.CLIENT) private ModelBackpack model; @SideOnly(Side.CLIENT) private ModelBackpackArmor modelArmor; /** Returns the model class of the backpack. */ @SideOnly(Side.CLIENT) public Class<? extends ModelBackpack> getModelClass() { return ModelBackpack.class; } @SideOnly(Side.CLIENT) public ModelBackpack getModel() { if (model == null) { try { model = getModelClass().getConstructor(boolean.class).newInstance(true); } catch (Exception e) { e.printStackTrace(); } } return model; } @Override @SideOnly(Side.CLIENT) public ModelBiped getArmorModel(EntityLivingBase entity, ItemStack stack, int slot) { if (modelArmor == null) { try { ModelBackpack model = getModelClass().getConstructor(boolean.class).newInstance(false); modelArmor = new ModelBackpackArmor(model); } catch (Exception e) { e.printStackTrace(); } } return modelArmor; } @Override public String getArmorTexture(ItemStack stack, Entity entity, int slot, String type) { return ((type == "overlay") ? Resources.textureBackpackOverlay : Resources.textureBackpack).toString(); } // Item stuff @Override @SideOnly(Side.CLIENT) public int getSpriteNumber() { return 0; } @Override @SideOnly(Side.CLIENT) public void registerIcons(IIconRegister iconRegister) { } @Override public String getUnlocalizedName() { return getBlockType().getUnlocalizedName(); } @Override public String getUnlocalizedName(ItemStack stack) { return getUnlocalizedName(); } public TileBackpack getBlockType() { return BetterStorageTiles.backpack; } @Override public boolean isValidArmor(ItemStack stack, int armorType, Entity entity) { return false; } @Override public int getColor(ItemStack stack) { int color = getDefaultColor(); return ((color >= 0) ? StackUtils.get(stack, color, "display", "color") : color); } @Override public int getRenderPasses(int metadata) { return ((getDefaultColor() >= 0) ? 2 : 1); } @Override @SideOnly(Side.CLIENT) public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean advancedTooltips) { boolean enableHelpTooltips = BetterStorage.globalConfig.getBoolean(GlobalConfig.enableHelpTooltips); if (getBackpack(player) == stack) { String info = LanguageUtils.translateTooltip(getAdditionalInfo(stack, player)); // Tell players if someone's using their backpack when they hovers over it in the GUI. // This is because if the backpack is used by another player it can't be placed down. if (ItemBackpack.isBackpackOpen(player)) { if (info != null) list.add(info); LanguageUtils.translateTooltip(list, "backpack.used"); // If the backpack can't be removed from its slot (only placed down), // tell the player why, like "Contains items" or "Bound backpack". } else if (enableHelpTooltips) LanguageUtils.translateTooltip(list, (info != null) ? "backpack.unequipHint.extended" : "backpack.unequipHint", (info != null) ? new String[]{ "%INFO%", info } : new String[0]); else if (info != null) list.add(info); // If the backpack can be opened by pressing a key, let the player know. if (BetterStorage.globalConfig.getBoolean(GlobalConfig.enableBackpackOpen)) { String str = GameSettings.getKeyDisplayString(KeyBindingHandler.backpackOpen.getKeyCode()); LanguageUtils.translateTooltip(list, "backpack.openHint", "%KEY%", str); } // Tell the player to place down and break a backpack to equip it. } else if (enableHelpTooltips) { boolean chestplate = BetterStorage.globalConfig.getBoolean(GlobalConfig.backpackChestplate); LanguageUtils.translateTooltip(list, (chestplate ? "backpack.equipHint" : "backpack.equipHint.extended")); // If the backpack doesn't get equipped to the chestplate slot, // let players know they can open it in the regular item tooltip. if (!chestplate && BetterStorage.globalConfig.getBoolean(GlobalConfig.enableBackpackOpen)) { String str = GameSettings.getKeyDisplayString(KeyBindingHandler.backpackOpen.getKeyCode()); LanguageUtils.translateTooltip(list, "backpack.openHint", "%KEY%", str); } } } /** Returns additional info (a string to be translated) of the backpack. */ protected String getAdditionalInfo(ItemStack stack, EntityPlayer player) { return (containsItems(getBackpackData(player)) ? "backpack.containsItems" : null); } @Override public void onArmorTick(World world, EntityPlayer player, ItemStack itemStack) { // Replace the armor slot with a custom one, so the player // can't unequip the backpack when there's items inside. int index = 5 + armorType; Slot slotBefore = player.inventoryContainer.getSlot(index); if (slotBefore instanceof SlotArmorBackpack) return; int slotIndex = player.inventory.getSizeInventory() - getChestSlotOffset(player) - armorType; SlotArmorBackpack slot = new SlotArmorBackpack(player.inventory, slotIndex, 8, 8 + armorType * 18); slot.slotNumber = index; player.inventoryContainer.inventorySlots.set(index, slot); } // For compatibility with Galacticraft. private int getChestSlotOffset(EntityPlayer player) { return isExact(player.inventory, "micdoodle8.mods.galacticraft.core.inventory.GCCoreInventoryPlayer") ? 6 : 1; } private static boolean isExact(Object obj, String str) { try { return obj.getClass().getName().equals(str); } catch (Exception e) { return false; } } @Override public ItemStack onItemRightClick(ItemStack stack, World world, EntityPlayer player) { return stack; } @Override public boolean onItemUse(ItemStack stack, EntityPlayer player, World world, int x, int y, int z, int side, float hitX, float hitY, float hitZ) { ForgeDirection orientation = DirectionUtils.getOrientation(player).getOpposite(); return placeBackpack(player, player, stack, x, y, z, side, orientation, false, false); } /** Called every tick regardless of whether the * backpack is equipped in an armor slot or not. */ public void onEquippedUpdate(EntityLivingBase player, ItemStack backpack) { } // ISpecialArmor implementation @Override public ArmorProperties getProperties(EntityLivingBase entity, ItemStack armor, DamageSource source, double damage, int slot) { return new ArmorProperties(0, 2 / 25.0, armor.getMaxDamage() + 1 - armor.getItemDamage()); } @Override public int getArmorDisplay(EntityPlayer player, ItemStack armor, int slot) { return 2; } @Override public void damageArmor(EntityLivingBase entity, ItemStack stack, DamageSource source, int damage, int slot) { if (!takesDamage(stack, source)) return; stack.damageItem(damage, entity); if (stack.stackSize > 0) return; PropertiesBackpack backpackData = ItemBackpack.getBackpackData(entity); if (backpackData.contents != null) for (ItemStack s : backpackData.contents) WorldUtils.dropStackFromEntity(entity, s, 2.0F); entity.renderBrokenItemStack(stack); } private static final String[] immuneToDamageType = { "inWall", "drown", "starve", "cactus", "fall", "outOfWorld", "generic", "wither", "anvil", "fallingBlock", "thrown" }; protected boolean takesDamage(ItemStack stack, DamageSource source) { // Backpacks don't get damaged from certain // damage types (see above) and magic damage. if (source.isMagicDamage()) return false; for (String immune : immuneToDamageType) if (immune.equals(source.getDamageType())) return false; // Protection enchantments protect the backpack // from taking damage from that damage type. return (!enchantmentProtection(stack, Enchantment.protection, 0.3, 0.35, 0.4, 0.45) && !(source.isProjectile() && enchantmentProtection(stack, Enchantment.projectileProtection, 0.4, 0.5, 0.6, 0.7)) && !(source.isFireDamage() && enchantmentProtection(stack, Enchantment.fireProtection, 0.55, 0.65, 0.75, 0.85)) && !(source.isExplosion() && enchantmentProtection(stack, Enchantment.blastProtection, 0.65, 0.75, 0.85, 0.95))); } private boolean enchantmentProtection(ItemStack stack, Enchantment ench, double... chance) { int level = EnchantmentHelper.getEnchantmentLevel(ench.effectId, stack); level = Math.min(level - 1, chance.length - 1); return ((level >= 0) && RandomUtils.getBoolean(chance[level])); } // IDyeableItem implementation @Override public boolean canDye(ItemStack stack) { return (getDefaultColor() >= 0); } // Helper functions public static ItemStack getBackpack(EntityLivingBase entity) { ItemStack backpack = entity.getEquipmentInSlot(EquipmentSlot.CHEST); if ((backpack != null) && (backpack.getItem() instanceof ItemBackpack)) return backpack; return getBackpackData(entity).backpack; } public static void setBackpack(EntityLivingBase entity, ItemStack backpack, ItemStack[] contents) { boolean setChestplate = (BetterStorage.globalConfig.getBoolean(GlobalConfig.backpackChestplate) || !(entity instanceof EntityPlayer) || hasChestplateBackpackEquipped(entity)); PropertiesBackpack backpackData = getBackpackData(entity); if (!setChestplate) backpackData.backpack = backpack; else entity.setCurrentItemOrArmor(EquipmentSlot.CHEST, backpack); backpackData.contents = contents; ItemBackpack.updateHasItems(entity, backpackData); } public static boolean hasChestplateBackpackEquipped(EntityLivingBase entity) { ItemStack backpack = getBackpack(entity); return ((backpack != null) ? (backpack == entity.getEquipmentInSlot(EquipmentSlot.CHEST)) : false); } public static boolean canEquipBackpack(EntityPlayer player) { return ((getBackpack(player) == null) && !(BetterStorage.globalConfig.getBoolean(GlobalConfig.backpackChestplate) && (player.getEquipmentInSlot(EquipmentSlot.CHEST) != null))); } public static IInventory getBackpackItems(EntityLivingBase carrier, EntityPlayer player) { ItemStack backpack = getBackpack(carrier); if (backpack == null) return null; return ((ItemBackpack)backpack.getItem()).getBackpackItemsInternal(carrier, player); } public static IInventory getBackpackItems(EntityLivingBase carrier) { return getBackpackItems(carrier, null); } public static void initBackpackData(EntityLivingBase entity) { EntityUtils.createProperties(entity, PropertiesBackpack.class); } public static PropertiesBackpack getBackpackData(EntityLivingBase entity) { PropertiesBackpack backpackData = EntityUtils.getOrCreateProperties(entity, PropertiesBackpack.class); if (!backpackData.initialized) { updateHasItems(entity, backpackData); backpackData.initialized = true; } return backpackData; } public static void updateHasItems(EntityLivingBase entity, PropertiesBackpack backpackData) { if (entity.worldObj.isRemote || !(entity instanceof EntityPlayer)) return; EntityPlayer player = (EntityPlayer)entity; boolean hasItems = ((backpackData.contents != null) && !StackUtils.isEmpty(backpackData.contents)); if (backpackData.hasItems == hasItems) return; BetterStorage.networkChannel.sendTo(new PacketBackpackHasItems(hasItems), player); backpackData.hasItems = hasItems; } public static boolean isBackpackOpen(EntityLivingBase entity) { return (getBackpackData(entity).playersUsing > 0); } /** Opens the carrier's equipped backpack for the player. * Returns if it was successfully opened. */ public static boolean openBackpack(EntityPlayer player, EntityLivingBase carrier) { ItemStack backpack = ItemBackpack.getBackpack(carrier); if (backpack == null) return false; ItemBackpack backpackType = (ItemBackpack)backpack.getItem(); IInventory inventory = ItemBackpack.getBackpackItems(carrier, player); inventory = new InventoryBackpackEquipped(carrier, player, inventory); if (!inventory.isUseableByPlayer(player)) return false; int columns = backpackType.getBackpackColumns(); int rows = backpackType.getBackpackRows(); Container container = new ContainerBetterStorage(player, inventory, columns, rows); String title = StackUtils.get(backpack, "", "display", "Name"); PlayerUtils.openGui(player, inventory.getInventoryName(), columns, rows, title, container); return true; } /** Places an equipped backpack when the player right clicks * on the ground while sneaking and holding nothing. */ public static boolean onPlaceBackpack(EntityPlayer player, int x, int y, int z, int side) { if (player.getCurrentEquippedItem() != null || !player.isSneaking()) return false; ItemStack backpack = ItemBackpack.getBackpack(player); if (backpack == null) return false; boolean success = false; if (!ItemBackpack.isBackpackOpen(player)) { // Try to place the backpack as if it was being held and used by the player. success = backpack.getItem().onItemUse(backpack, player, player.worldObj, x, y, z, side, 0, 0, 0); if (backpack.stackSize <= 0) { ItemBackpack.setBackpack(player, null, null); backpack = null; } } // Make sure the client has the same information as the server. It does not sync when backpackChestplate is disabled because there are no changes to the slot in that case. if (!player.worldObj.isRemote && success && player instanceof EntityPlayerMP && BetterStorage.globalConfig.getBoolean(GlobalConfig.backpackChestplate)) { ((EntityPlayerMP)player).playerNetServerHandler.sendPacket(new S2FPacketSetSlot(0, 6, backpack)); } if (success) player.swingItem(); return success; } /** Place a backpack down on a block. * @param carrier The carrier of the backpack (non-null). * @param player The player placing the backpack, if any. * Used to check if they're allowed to place it. * @param backpack The backpack stack. * Stack size is decreased if placed successfully. * @param side The side of block the backpack is placed on. * Anything other than top usually doesn't place it. * @param orientation The orientation the backpack will be placed in. * @param despawn If the backpack should despawn after a while. * True for mobs, unless hit recently. * @param deathDrop True if the backpack is dropped on death. * Will not check for block solidity or entities. * @return If the backpack was placed successfully. */ public static boolean placeBackpack(EntityLivingBase carrier, EntityPlayer player, ItemStack backpack, int x, int y, int z, int side, ForgeDirection orientation, boolean despawn, boolean deathDrop) { if (backpack.stackSize == 0) return false; World world = carrier.worldObj; Block blockBackpack = ((ItemBackpack)backpack.getItem()).getBlockType(); // Return false if there's block is too low or too high. if ((y <= 0) || (y >= world.getHeight() - 1)) return false; // If a replaceable block was clicked, move on. // Otherwise, check if the top side was clicked and adjust the position. if (!world.getBlock(x, y, z).isReplaceable(world, x, y, z)) { if (side != 1) return false; y++; } // If the backpack is dropped on death, return false // if it's placed on a non-replaceable block. Otherwise, // return false if the block isn't solid on top. Block blockBelow = world.getBlock(x, y - 1, z); if ((deathDrop ? blockBelow.isReplaceable(world, x, y - 1, z) : !world.isSideSolid(x, y - 1, z, ForgeDirection.UP))) return false; // Return false if there's an entity blocking the placement. if (!world.canPlaceEntityOnSide(blockBackpack, x, y, z, deathDrop, side, carrier, backpack)) return false; // Return false if the player can't edit the block. if ((player != null) && (!world.canMineBlock(player, x, y, z) || !player.canPlayerEdit(x, y, z, side, backpack))) return false; // Do not actually place the backpack on the client. if (world.isRemote) return true; // Actually place the block in the world, // play place sound and decrease stack size if successful. if (!world.setBlock(x, y, z, blockBackpack, orientation.ordinal(), 3)) return false; if (world.getBlock(x, y, z) != blockBackpack) return false; blockBackpack.onBlockPlacedBy(world, x, y, z, carrier, backpack); blockBackpack.onPostBlockPlaced(world, x, y, z, orientation.ordinal()); TileEntityBackpack te = WorldUtils.get(world, x, y, z, TileEntityBackpack.class); te.stack = backpack.copy(); if (ItemBackpack.getBackpack(carrier) == backpack) te.unequip(carrier, despawn); String sound = blockBackpack.stepSound.func_150496_b(); float volume = (blockBackpack.stepSound.getVolume() + 1.0F) / 2.0F; float pitch = blockBackpack.stepSound.getPitch() * 0.8F; world.playSoundEffect(x + 0.5, y + 0.5, z + 0.5F, sound, volume, pitch); backpack.stackSize--; return true; } }
/* * Copyright 2011 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkState; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; /** * A pass that looks for assignments to properties of an object or array * immediately following its creation using the abbreviated syntax. * <p> * E.g. {@code var a = [];a[0] = 0} is optimized to {@code var a = [0]} and * similarly for the object constructor. * * @author msamuel@google.com (Mike Samuel) */ final class PeepholeCollectPropertyAssignments extends AbstractPeepholeOptimization { @Override Node optimizeSubtree(Node subtree) { if (!subtree.isScript() && !subtree.isBlock()) { return subtree; } boolean codeChanged = false; // Look for variable declarations or simple assignments // and start processing there. for (Node child = subtree.getFirstChild(); child != null; child = child.getNext()) { if (!NodeUtil.isNameDeclaration(child) && !NodeUtil.isExprAssign(child)) { continue; } if (!isPropertyAssignmentToName(child.getNext())) { // Quick check to see if there's anything to collapse. continue; } checkState(child.hasOneChild()); Node name = getName(child); if (!name.isName()) { // The assignment target is not a simple name. continue; } Node value = getValue(child); if (value == null || !isInterestingValue(value)) { // No initializer or not an Object or Array literal. continue; } Node propertyCandidate; while ((propertyCandidate = child.getNext()) != null) { // This does not infinitely loop because collectProperty always // removes propertyCandidate from its parent when it returns true. if (!collectProperty(propertyCandidate, name.getString(), value)) { break; } codeChanged = true; } } if (codeChanged) { reportChangeToEnclosingScope(subtree); } return subtree; } private static Node getName(Node n) { if (NodeUtil.isNameDeclaration(n)) { return n.getFirstChild(); } else if (NodeUtil.isExprAssign(n)) { return n.getFirstFirstChild(); } throw new IllegalStateException(); } private static Node getValue(Node n) { if (NodeUtil.isNameDeclaration(n)) { return n.getFirstFirstChild(); } else if (NodeUtil.isExprAssign(n)) { return n.getFirstChild().getLastChild(); } throw new IllegalStateException(); } static boolean isInterestingValue(Node n) { return n.isObjectLit() || n.isArrayLit(); } private static boolean isPropertyAssignmentToName(Node propertyCandidate) { if (propertyCandidate == null) { return false; } // Must be an assignment... if (!NodeUtil.isExprAssign(propertyCandidate)) { return false; } Node expr = propertyCandidate.getFirstChild(); // to a property... Node lhs = expr.getFirstChild(); if (!NodeUtil.isGet(lhs)) { return false; } // of a variable. Node obj = lhs.getFirstChild(); return obj.isName(); } private boolean collectProperty(Node propertyCandidate, String name, Node value) { if (!isPropertyAssignmentToName(propertyCandidate)) { return false; } Node lhs = propertyCandidate.getFirstFirstChild(); // Must be an assignment to the recent variable... if (!name.equals(lhs.getFirstChild().getString())) { return false; } Node rhs = lhs.getNext(); // with a value that cannot change the values of the variables, if (mayHaveSideEffects(rhs) || NodeUtil.canBeSideEffected(rhs)) { return false; } // and does not have a reference to a variable initialized after it. if (!NodeUtil.isLiteralValue(rhs, true) && mightContainForwardReference(rhs, name)) { return false; } switch (value.getToken()) { case ARRAYLIT: if (!collectArrayProperty(value, propertyCandidate)) { return false; } break; case OBJECTLIT: if (!collectObjectProperty(value, propertyCandidate)) { return false; } break; default: throw new IllegalStateException(); } return true; } private static boolean collectArrayProperty(Node arrayLiteral, Node propertyCandidate) { Node assignment = propertyCandidate.getFirstChild(); final int sizeOfArrayAtStart = arrayLiteral.getChildCount(); int maxIndexAssigned = sizeOfArrayAtStart - 1; Node lhs = assignment.getFirstChild(); Node rhs = lhs.getNext(); if (!lhs.isGetElem()) { return false; } Node obj = lhs.getFirstChild(); Node property = obj.getNext(); // The left hand side must have a numeric index if (!property.isNumber()) { return false; } // that is a valid array index double dindex = property.getDouble(); if (!(dindex >= 0) // Handles NaN and negatives. || Double.isInfinite(dindex) || dindex > 0x7fffffffL) { return false; } int index = (int) dindex; if (dindex != index) { return false; } // that would not make the array so sparse that they take more space // when rendered than x[9]=1. if (maxIndexAssigned + 4 < index) { return false; } if (index > maxIndexAssigned) { while (maxIndexAssigned < index - 1) { // Pad the array if it is sparse. // So if array is [0] and integer 3 is assigned at index is 2, then // we want to produce [0,,2]. Node emptyNode = IR.empty().srcref(arrayLiteral); arrayLiteral.addChildToBack(emptyNode); ++maxIndexAssigned; } arrayLiteral.addChildToBack(rhs.detach()); } else { // An out of order assignment. Allow it if it's a hole. Node currentValue = arrayLiteral.getChildAtIndex(index); if (!currentValue.isEmpty()) { // We've already collected a value for this index. return false; } arrayLiteral.replaceChild(currentValue, rhs.detach()); } propertyCandidate.detach(); return true; } private boolean collectObjectProperty(Node objectLiteral, Node propertyCandidate) { Node assignment = propertyCandidate.getFirstChild(); Node lhs = assignment.getFirstChild(); Node rhs = lhs.getNext(); Node obj = lhs.getFirstChild(); Node property = obj.getNext(); // The property must be statically known. if (lhs.isGetElem() && !property.isString() && !property.isNumber()) { return false; } String propertyName; if (property.isNumber()) { propertyName = getSideEffectFreeStringValue(property); } else { propertyName = property.getString(); } // Check if the new property already exists in the object literal // Note: Duplicate keys are invalid in strict mode Node existingProperty = null; for (Node currentProperty : objectLiteral.children()) { if (currentProperty.isStringKey() || currentProperty.isMemberFunctionDef()) { // Get the name of the current property String currentPropertyName = currentProperty.getString(); // Get the value of the property Node currentValue = currentProperty.getFirstChild(); // Compare the current property name with the new property name if (currentPropertyName.equals(propertyName)) { existingProperty = currentProperty; // Check if the current value and the new value are side-effect boolean isCurrentValueSideEffect = NodeUtil.canBeSideEffected(currentValue); boolean isNewValueSideEffect = NodeUtil.canBeSideEffected(rhs); // If they are side-effect free then replace the current value with the new one if (isCurrentValueSideEffect || isNewValueSideEffect) { return false; } // Break the loop if the property exists break; } } else if (currentProperty.isGetterDef() || currentProperty.isSetterDef()) { String currentPropertyName = currentProperty.getString(); if (currentPropertyName.equals(propertyName)) { return false; } } } Node newProperty = IR.stringKey(propertyName) .useSourceInfoIfMissingFrom(property); // Preserve the quotedness of a property reference if (lhs.isGetElem()) { newProperty.setQuotedString(); } Node newValue = rhs.detach(); newProperty.addChildToBack(newValue); if (existingProperty != null) { deleteNode(existingProperty); } // If the property does not already exist we can safely add it objectLiteral.addChildToBack(newProperty); propertyCandidate.detach(); return true; } private static boolean mightContainForwardReference(Node node, String varName) { if (node.isName()) { return varName.equals(node.getString()); } for (Node child = node.getFirstChild(); child != null; child = child.getNext()) { if (mightContainForwardReference(child, varName)) { return true; } } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws.s3; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.regions.Regions; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; public class S3ComponentConfigurationTest extends CamelTestSupport { @Test public void createEndpointWithMinimalS3ClientConfiguration() throws Exception { AmazonS3ClientMock clientMock = new AmazonS3ClientMock(); context.getRegistry().bind("amazonS3Client", clientMock); S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?amazonS3Client=#amazonS3Client&accessKey=xxx&secretKey=yyy"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertNotNull(endpoint.getConfiguration().getAmazonS3Client()); assertNull(endpoint.getConfiguration().getRegion()); assertTrue(endpoint.getConfiguration().isDeleteAfterRead()); assertEquals(10, endpoint.getMaxMessagesPerPoll()); assertNull(endpoint.getConfiguration().getPolicy()); assertNull(endpoint.getConfiguration().getPrefix()); assertTrue(endpoint.getConfiguration().isIncludeBody()); } @Test public void createEndpointWithMinimalCredentialsConfiguration() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?accessKey=xxx&secretKey=yyy"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertNull(endpoint.getConfiguration().getRegion()); assertTrue(endpoint.getConfiguration().isDeleteAfterRead()); assertEquals(10, endpoint.getMaxMessagesPerPoll()); assertNull(endpoint.getConfiguration().getPolicy()); assertNull(endpoint.getConfiguration().getPrefix()); assertTrue(endpoint.getConfiguration().isIncludeBody()); } @Test public void createEndpointWithMinimalArnConfiguration() throws Exception { AmazonS3ClientMock clientMock = new AmazonS3ClientMock(); context.getRegistry().bind("amazonS3Client", clientMock); S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://arn:aws:s3:::MyBucket?amazonS3Client=#amazonS3Client&accessKey=xxx&secretKey=yyy"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); } @Test public void createEndpointWithMinimalConfigurationAndProvidedClient() throws Exception { AmazonS3ClientMock clientMock = new AmazonS3ClientMock(); context.getRegistry().bind("amazonS3Client", clientMock); S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?amazonS3Client=#amazonS3Client"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertNull(endpoint.getConfiguration().getAccessKey()); assertNull(endpoint.getConfiguration().getSecretKey()); assertSame(clientMock, endpoint.getConfiguration().getAmazonS3Client()); assertNull(endpoint.getConfiguration().getRegion()); assertTrue(endpoint.getConfiguration().isDeleteAfterRead()); assertEquals(10, endpoint.getMaxMessagesPerPoll()); assertNull(endpoint.getConfiguration().getPolicy()); assertNull(endpoint.getConfiguration().getPrefix()); assertTrue(endpoint.getConfiguration().isIncludeBody()); } @Test public void createEndpointWithMaximalConfiguration() throws Exception { AmazonS3ClientMock clientMock = new AmazonS3ClientMock(); context.getRegistry().bind("amazonS3Client", clientMock); S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component .createEndpoint("aws-s3://MyBucket?amazonS3Client=#amazonS3Client" + "&accessKey=xxx&secretKey=yyy&region=us-west-1&deleteAfterRead=false&maxMessagesPerPoll=1&policy=%7B%22Version%22%3A%222008-10-17%22,%22Id%22%3A%22Policy4324355464%22," + "%22Statement%22%3A%5B%7B%22Sid%22%3A%22Stmt456464646477%22,%22Action%22%3A%5B%22s3%3AGetObject%22%5D,%22Effect%22%3A%22Allow%22," + "%22Resource%22%3A%5B%22arn%3Aaws%3As3%3A%3A%3Amybucket/some/path/*%22%5D,%22Principal%22%3A%7B%22AWS%22%3A%5B%22*%22%5D%7D%7D%5D%7D&storageClass=REDUCED_REDUNDANCY" + "&prefix=confidential&includeBody=false"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertNotNull(endpoint.getConfiguration().getAmazonS3Client()); assertEquals("us-west-1", endpoint.getConfiguration().getRegion()); assertFalse(endpoint.getConfiguration().isDeleteAfterRead()); assertEquals(1, endpoint.getMaxMessagesPerPoll()); assertEquals("{\"Version\":\"2008-10-17\",\"Id\":\"Policy4324355464\",\"Statement\":[{\"Sid\":\"Stmt456464646477\",\"Action\":[\"s3:GetObject\"],\"Effect\":\"Allow\",\"Resource\":" + "[\"arn:aws:s3:::mybucket/some/path/*\"],\"Principal\":{\"AWS\":[\"*\"]}}]}", endpoint.getConfiguration().getPolicy()); assertEquals("REDUCED_REDUNDANCY", endpoint.getConfiguration().getStorageClass()); assertEquals("confidential", endpoint.getConfiguration().getPrefix()); assertFalse(endpoint.getConfiguration().isIncludeBody()); } @Test(expected = IllegalArgumentException.class) public void createEndpointWithoutBucketName() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); component.createEndpoint("aws-s3:// "); } @Test(expected = IllegalArgumentException.class) public void createEndpointWithoutAccessKeyConfiguration() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); component.createEndpoint("aws-s3://MyTopic?secretKey=yyy"); } @Test(expected = IllegalArgumentException.class) public void createEndpointWithoutSecretKeyConfiguration() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); component.createEndpoint("aws-s3://MyTopic?accessKey=xxx"); } @Test public void createEndpointWithComponentElements() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); component.getConfiguration().setAccessKey("XXX"); component.getConfiguration().setSecretKey("YYY"); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("XXX", endpoint.getConfiguration().getAccessKey()); assertEquals("YYY", endpoint.getConfiguration().getSecretKey()); } @Test public void createEndpointWithComponentAndEndpointElements() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); component.getConfiguration().setAccessKey("XXX"); component.getConfiguration().setSecretKey("YYY"); component.getConfiguration().setRegion(Regions.US_WEST_1.toString()); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?accessKey=xxxxxx&secretKey=yyyyy&region=US_EAST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey()); assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion()); } @Test public void createEndpointWithChunkedEncoding() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?chunkedEncodingDisabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isChunkedEncodingDisabled()); } @Test public void createEndpointWithAccelerateMode() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?accelerateModeEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isAccelerateModeEnabled()); } @Test public void createEndpointWithDualstack() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?dualstackEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isDualstackEnabled()); } @Test public void createEndpointWithPayloadSigning() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?payloadSigningEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isPayloadSigningEnabled()); } @Test public void createEndpointWithForceGlobalBucketAccess() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?forceGlobalBucketAccessEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isForceGlobalBucketAccessEnabled()); } @Test public void createEndpointWithAutocreateOption() throws Exception { S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component .createEndpoint("aws-s3://MyBucket?forceGlobalBucketAccessEnabled=true&accessKey=xxx&secretKey=yyy&region=US_WEST_1&autoCreateBucket=false"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertTrue(endpoint.getConfiguration().isForceGlobalBucketAccessEnabled()); assertFalse(endpoint.getConfiguration().isAutoCreateBucket()); } @Test public void createEndpointWithoutSecretKeyAndAccessKeyConfiguration() throws Exception { AmazonS3ClientMock clientMock = new AmazonS3ClientMock(); context.getRegistry().bind("amazonS3Client", clientMock); S3Component component = context.getComponent("aws-s3", S3Component.class); component.createEndpoint("aws-s3://MyTopic?amazonS3Client=#amazonS3Client"); } @Test public void createEndpointWithEndpointConfiguration() throws Exception { EndpointConfiguration endpointConfiguration = new EndpointConfiguration("localhost", Regions.US_EAST_1.toString()); context.getRegistry().bind("endpointConfiguration", endpointConfiguration); S3Component component = context.getComponent("aws-s3", S3Component.class); S3Endpoint endpoint = (S3Endpoint)component.createEndpoint("aws-s3://MyBucket?endpointConfiguration=#endpointConfiguration&accessKey=xxx&secretKey=yyy&region=US_WEST_1"); assertEquals("MyBucket", endpoint.getConfiguration().getBucketName()); assertEquals("xxx", endpoint.getConfiguration().getAccessKey()); assertEquals("yyy", endpoint.getConfiguration().getSecretKey()); assertNotNull(endpoint.getConfiguration().getEndpointConfiguration()); } }
/* * * Copyright 2012 Luca Molino (molino.luca--AT--gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.object.enhancement; import java.lang.annotation.Annotation; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javassist.util.proxy.Proxy; import javassist.util.proxy.ProxyObject; import javax.persistence.CascadeType; import javax.persistence.ManyToMany; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.reflection.OReflectionHelper; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.annotation.OAccess; import com.orientechnologies.orient.core.annotation.OAfterDeserialization; import com.orientechnologies.orient.core.annotation.OAfterSerialization; import com.orientechnologies.orient.core.annotation.OBeforeDeserialization; import com.orientechnologies.orient.core.annotation.OBeforeSerialization; import com.orientechnologies.orient.core.annotation.ODocumentInstance; import com.orientechnologies.orient.core.annotation.OId; import com.orientechnologies.orient.core.annotation.OVersion; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.object.ODatabaseObject; import com.orientechnologies.orient.core.db.record.ORecordLazyList; import com.orientechnologies.orient.core.db.record.ORecordLazyMap; import com.orientechnologies.orient.core.db.record.ORecordLazySet; import com.orientechnologies.orient.core.db.record.OTrackedList; import com.orientechnologies.orient.core.db.record.OTrackedMap; import com.orientechnologies.orient.core.db.record.OTrackedSet; import com.orientechnologies.orient.core.exception.OConfigurationException; import com.orientechnologies.orient.core.exception.OSerializationException; import com.orientechnologies.orient.core.exception.OTransactionException; import com.orientechnologies.orient.core.id.OClusterPositionFactory; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OProperty; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.record.ORecordAbstract; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.tx.OTransactionOptimistic; import com.orientechnologies.orient.object.db.OObjectLazyMap; import com.orientechnologies.orient.object.serialization.OObjectSerializationThreadLocal; import com.orientechnologies.orient.object.serialization.OObjectSerializerHelper; /** * @author luca.molino * */ public class OObjectEntitySerializer { private static final Set<Class<?>> classes = new HashSet<Class<?>>(); private static final HashMap<Class<?>, List<String>> allFields = new HashMap<Class<?>, List<String>>(); private static final HashMap<Class<?>, List<String>> embeddedFields = new HashMap<Class<?>, List<String>>(); private static final HashMap<Class<?>, List<String>> directAccessFields = new HashMap<Class<?>, List<String>>(); private static final HashMap<Class<?>, Field> boundDocumentFields = new HashMap<Class<?>, Field>(); private static final HashMap<Class<?>, List<String>> transientFields = new HashMap<Class<?>, List<String>>(); private static final HashMap<Class<?>, List<String>> cascadeDeleteFields = new HashMap<Class<?>, List<String>>(); private static final HashMap<Class<?>, Map<Field, Class<?>>> serializedFields = new HashMap<Class<?>, Map<Field, Class<?>>>(); private static final HashMap<Class<?>, Field> fieldIds = new HashMap<Class<?>, Field>(); private static final HashMap<Class<?>, Field> fieldVersions = new HashMap<Class<?>, Field>(); private static final HashMap<String, List<Method>> callbacks = new HashMap<String, List<Method>>(); /** * Method that given an object serialize it an creates a proxy entity, in case the object isn't generated using the * ODatabaseObject.newInstance() * * @param o * - the object to serialize * @return the proxied object */ public static <T> T serializeObject(T o, ODatabaseObject db) { if (o instanceof Proxy) { final ODocument iRecord = getDocument((Proxy) o); Class<?> pojoClass = o.getClass().getSuperclass(); invokeCallback(pojoClass, o, iRecord, OBeforeSerialization.class); invokeCallback(pojoClass, o, iRecord, OAfterSerialization.class); return o; } Proxy proxiedObject = (Proxy) db.newInstance(o.getClass()); try { return toStream(o, proxiedObject, db); } catch (IllegalArgumentException e) { throw new OSerializationException("Error serializing object of class " + o.getClass(), e); } catch (IllegalAccessException e) { throw new OSerializationException("Error serializing object of class " + o.getClass(), e); } } /** * Method that attaches all data contained in the object to the associated document * * @param <T> * @param o * :- the object to attach * @param db * :- the database instance * @return the object serialized or with attached data */ public static <T> T attach(T o, ODatabaseObject db) { if (o instanceof Proxy) { OObjectProxyMethodHandler handler = (OObjectProxyMethodHandler) ((ProxyObject) o).getHandler(); try { handler.attach(o); } catch (IllegalArgumentException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (IllegalAccessException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (NoSuchMethodException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (InvocationTargetException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } return o; } else return serializeObject(o, db); } /** * Method that detaches all fields contained in the document to the given object. It returns by default a proxied instance. To get * a detached non proxied instance @see {@link OObjectEntitySerializer.detach(T o, ODatabaseObject db, boolean * returnNonProxiedInstance)} * * @param <T> * @param o * :- the object to detach * @param db * :- the database instance * @return proxied instance: the object serialized or with detached data */ public static <T> T detach(T o, ODatabaseObject db) { return detach(o, db, false); } /** * Method that detaches all fields contained in the document to the given object. * * @param <T> * @param o * :- the object to detach * @param db * :- the database instance * @param returnNonProxiedInstance * :- defines if the return object will be a proxied instance or not. If set to TRUE and the object does not contains @Id * and @Version fields it could procude data replication * @return the object serialized or with detached data */ public static <T> T detach(T o, ODatabaseObject db, boolean returnNonProxiedInstance) { if (o instanceof Proxy) { OObjectProxyMethodHandler handler = (OObjectProxyMethodHandler) ((ProxyObject) o).getHandler(); try { if (returnNonProxiedInstance) { o = getNonProxiedInstance(o); } handler.detach(o, returnNonProxiedInstance); } catch (IllegalArgumentException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (IllegalAccessException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (NoSuchMethodException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (InvocationTargetException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } return o; } else if (!returnNonProxiedInstance) return serializeObject(o, db); return o; } /** * Method that detaches all fields contained in the document to the given object and recursively all object tree. This may throw a * {@link StackOverflowError} with big objects tree. To avoid it set the stack size with -Xss java option * * @param <T> * @param o * :- the object to detach * @param db * :- the database instance * @param returnNonProxiedInstance * :- defines if the return object will be a proxied instance or not. If set to TRUE and the object does not contains @Id * and @Version fields it could procude data replication * @return the object serialized or with detached data */ public static <T> T detachAll(T o, ODatabaseObject db, boolean returnNonProxiedInstance) { if (o instanceof Proxy) { OObjectProxyMethodHandler handler = (OObjectProxyMethodHandler) ((ProxyObject) o).getHandler(); try { if (returnNonProxiedInstance) { o = getNonProxiedInstance(o); } handler.detachAll(o, returnNonProxiedInstance); } catch (IllegalArgumentException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (IllegalAccessException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (NoSuchMethodException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } catch (InvocationTargetException e) { throw new OSerializationException("Error detaching object of class " + o.getClass(), e); } return o; } else if (!returnNonProxiedInstance) return serializeObject(o, db); return o; } /** * Method that given a proxied entity returns the associated ODocument * * @param proxiedObject * - the proxied entity object * @return The ODocument associated with the object */ public static ODocument getDocument(Proxy proxiedObject) { return ((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).getDoc(); } /** * Method that given a proxied entity returns the associated ODocument RID * * @param proxiedObject * - the proxied entity object * @return The ORID of associated ODocument */ public static ORID getRid(Proxy proxiedObject) { return getDocument(proxiedObject).getIdentity(); } /** * Method that given a proxied entity returns the associated ODocument version * * @param proxiedObject * - the proxied entity object * @return The version of associated ODocument */ public static int getVersion(Proxy proxiedObject) { return getDocument(proxiedObject).getVersion(); } public static boolean isClassField(Class<?> iClass, String iField) { checkClassRegistration(iClass); boolean isClassField = false; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && !isClassField;) { List<String> allClassFields = allFields.get(currentClass); isClassField = allClassFields != null && allClassFields.contains(iField); currentClass = currentClass.getSuperclass(); } return isClassField; } public static boolean isTransientField(Class<?> iClass, String iField) { checkClassRegistration(iClass); boolean isTransientField = false; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && !isTransientField;) { List<String> classCascadeDeleteFields = transientFields.get(currentClass); isTransientField = classCascadeDeleteFields != null && classCascadeDeleteFields.contains(iField); currentClass = currentClass.getSuperclass(); } return isTransientField; } public static List<String> getCascadeDeleteFields(Class<?> iClass) { checkClassRegistration(iClass); List<String> classCascadeDeleteFields = new ArrayList<String>(); for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class);) { List<String> classDeleteFields = cascadeDeleteFields.get(currentClass); if (classDeleteFields != null) classCascadeDeleteFields.addAll(classDeleteFields); currentClass = currentClass.getSuperclass(); } return classCascadeDeleteFields; } public static List<String> getCascadeDeleteFields(String iClassName) { if (iClassName == null || iClassName.isEmpty()) return null; for (Class<?> iClass : cascadeDeleteFields.keySet()) { if (iClass.getSimpleName().equals(iClassName)) return getCascadeDeleteFields(iClass); } return null; } public static boolean isCascadeDeleteField(Class<?> iClass, String iField) { checkClassRegistration(iClass); boolean isTransientField = false; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && !isTransientField;) { List<String> classEmbeddedFields = cascadeDeleteFields.get(currentClass); isTransientField = classEmbeddedFields != null && classEmbeddedFields.contains(iField); currentClass = currentClass.getSuperclass(); } return isTransientField; } public static boolean isEmbeddedField(Class<?> iClass, String iField) { checkClassRegistration(iClass); boolean isEmbeddedField = false; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && !isEmbeddedField;) { List<String> classEmbeddedFields = embeddedFields.get(currentClass); isEmbeddedField = classEmbeddedFields != null && classEmbeddedFields.contains(iField); currentClass = currentClass.getSuperclass(); } return isEmbeddedField; } protected static void checkClassRegistration(Class<?> iClass) { if (!classes.contains(iClass) && !(Proxy.class.isAssignableFrom(iClass))) registerClass(iClass); } /** * Registers the class informations that will be used in serialization, deserialization and lazy loading of it. If already * registered does nothing. * * @param iClass * :- the Class<?> to register */ @SuppressWarnings("unchecked") public static synchronized void registerClass(final Class<?> iClass) { if (Proxy.class.isAssignableFrom(iClass) || classes.contains(iClass)) return; boolean reloadSchema = false; if (ODatabaseRecordThreadLocal.INSTANCE.isDefined() && !ODatabaseRecordThreadLocal.INSTANCE.get().isClosed() && !ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().existsClass(iClass.getSimpleName())) { ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().createClass(iClass.getSimpleName()); reloadSchema = true; } for (Class<?> currentClass = iClass; currentClass != Object.class;) { if (!classes.contains(currentClass)) { classes.add(currentClass); Class<?> fieldType; for (Field f : currentClass.getDeclaredFields()) { final String fieldName = f.getName(); final int fieldModifier = f.getModifiers(); List<String> allClassFields = allFields.get(currentClass); if (allClassFields == null) allClassFields = new ArrayList<String>(); allClassFields.add(fieldName); allFields.put(currentClass, allClassFields); if (Modifier.isStatic(fieldModifier) || Modifier.isFinal(fieldModifier) || Modifier.isNative(fieldModifier) || Modifier.isTransient(fieldModifier)) { List<String> classTransientFields = transientFields.get(currentClass); if (classTransientFields == null) classTransientFields = new ArrayList<String>(); classTransientFields.add(fieldName); transientFields.put(currentClass, classTransientFields); } if (fieldName.equals("this$0")) { List<String> classTransientFields = transientFields.get(currentClass); if (classTransientFields == null) classTransientFields = new ArrayList<String>(); classTransientFields.add(fieldName); transientFields.put(currentClass, classTransientFields); } if (OObjectSerializerHelper.jpaTransientClass != null) { Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaTransientClass); if (ann != null) { // @Transient DEFINED List<String> classTransientFields = transientFields.get(currentClass); if (classTransientFields == null) classTransientFields = new ArrayList<String>(); classTransientFields.add(fieldName); transientFields.put(currentClass, classTransientFields); } } if (OObjectSerializerHelper.jpaOneToOneClass != null) { Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaOneToOneClass); if (ann != null) { // @OneToOne DEFINED OneToOne oneToOne = ((OneToOne) ann); if (checkCascadeDelete(oneToOne)) { addCascadeDeleteField(currentClass, fieldName); } } } if (OObjectSerializerHelper.jpaOneToManyClass != null) { Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaOneToManyClass); if (ann != null) { // @OneToMany DEFINED OneToMany oneToMany = ((OneToMany) ann); if (checkCascadeDelete(oneToMany)) { addCascadeDeleteField(currentClass, fieldName); } } } if (OObjectSerializerHelper.jpaManyToManyClass != null) { Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaManyToManyClass); if (ann != null) { // @OneToMany DEFINED ManyToMany manyToMany = ((ManyToMany) ann); if (checkCascadeDelete(manyToMany)) { addCascadeDeleteField(currentClass, fieldName); } } } fieldType = f.getType(); if (Collection.class.isAssignableFrom(fieldType) || fieldType.isArray() || Map.class.isAssignableFrom(fieldType)) { fieldType = OReflectionHelper.getGenericMultivalueType(f); } if (isToSerialize(fieldType)) { Map<Field, Class<?>> serializeClass = serializedFields.get(currentClass); if (serializeClass == null) serializeClass = new HashMap<Field, Class<?>>(); serializeClass.put(f, fieldType); serializedFields.put(currentClass, serializeClass); } // CHECK FOR DIRECT-BINDING boolean directBinding = true; if (f.getAnnotation(OAccess.class) == null || f.getAnnotation(OAccess.class).value() == OAccess.OAccessType.PROPERTY) directBinding = true; // JPA 2+ AVAILABLE? else if (OObjectSerializerHelper.jpaAccessClass != null) { Annotation ann = f.getAnnotation(OObjectSerializerHelper.jpaAccessClass); if (ann != null) { directBinding = true; } } if (directBinding) { List<String> classDirectAccessFields = directAccessFields.get(currentClass); if (classDirectAccessFields == null) classDirectAccessFields = new ArrayList<String>(); classDirectAccessFields.add(fieldName); directAccessFields.put(currentClass, classDirectAccessFields); } if (f.getAnnotation(ODocumentInstance.class) != null) // BOUND DOCUMENT ON IT boundDocumentFields.put(currentClass, f); boolean idFound = false; if (f.getAnnotation(OId.class) != null) { // RECORD ID fieldIds.put(currentClass, f); idFound = true; } // JPA 1+ AVAILABLE? else if (OObjectSerializerHelper.jpaIdClass != null && f.getAnnotation(OObjectSerializerHelper.jpaIdClass) != null) { // RECORD ID fieldIds.put(currentClass, f); idFound = true; } if (idFound) { // CHECK FOR TYPE if (fieldType.isPrimitive()) OLogManager.instance().warn(OObjectSerializerHelper.class, "Field '%s' cannot be a literal to manage the Record Id", f.toString()); else if (!ORID.class.isAssignableFrom(fieldType) && fieldType != String.class && fieldType != Object.class && !Number.class.isAssignableFrom(fieldType)) OLogManager.instance().warn(OObjectSerializerHelper.class, "Field '%s' cannot be managed as type: %s", f.toString(), fieldType); } boolean vFound = false; if (f.getAnnotation(OVersion.class) != null) { // RECORD ID fieldVersions.put(currentClass, f); vFound = true; } // JPA 1+ AVAILABLE? else if (OObjectSerializerHelper.jpaVersionClass != null && f.getAnnotation(OObjectSerializerHelper.jpaVersionClass) != null) { // RECORD ID fieldVersions.put(currentClass, f); vFound = true; } if (vFound) { // CHECK FOR TYPE if (fieldType.isPrimitive()) OLogManager.instance().warn(OObjectSerializerHelper.class, "Field '%s' cannot be a literal to manage the Version", f.toString()); else if (fieldType != String.class && fieldType != Object.class && !Number.class.isAssignableFrom(fieldType)) OLogManager.instance().warn(OObjectSerializerHelper.class, "Field '%s' cannot be managed as type: %s", f.toString(), fieldType); } // JPA 1+ AVAILABLE? if (OObjectSerializerHelper.jpaEmbeddedClass != null && f.getAnnotation(OObjectSerializerHelper.jpaEmbeddedClass) != null) { List<String> classEmbeddedFields = embeddedFields.get(currentClass); if (classEmbeddedFields == null) classEmbeddedFields = new ArrayList<String>(); classEmbeddedFields.add(fieldName); embeddedFields.put(currentClass, classEmbeddedFields); } } registerCallbacks(currentClass); } String iClassName = currentClass.getSimpleName(); currentClass = currentClass.getSuperclass(); if (currentClass == null || currentClass.equals(ODocument.class)) // POJO EXTENDS ODOCUMENT: SPECIAL CASE: AVOID TO CONSIDER // ODOCUMENT FIELDS currentClass = Object.class; if (ODatabaseRecordThreadLocal.INSTANCE.get() != null && !ODatabaseRecordThreadLocal.INSTANCE.get().isClosed() && !currentClass.equals(Object.class)) { OClass oSuperClass; OClass currentOClass = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().getClass(iClassName); if (!ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().existsClass(currentClass.getSimpleName())) { oSuperClass = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema() .createClass(currentClass.getSimpleName()); reloadSchema = true; } else { oSuperClass = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().getClass(currentClass.getSimpleName()); reloadSchema = true; } if (currentOClass.getSuperClass() == null || !currentOClass.getSuperClass().equals(oSuperClass)) { currentOClass.setSuperClass(oSuperClass); reloadSchema = true; } } } if (ODatabaseRecordThreadLocal.INSTANCE.get() != null && !ODatabaseRecordThreadLocal.INSTANCE.get().isClosed() && reloadSchema) { ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().save(); ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().reload(); } } protected static boolean checkCascadeDelete(OneToOne oneToOne) { return oneToOne.orphanRemoval() || checkCascadeAnnotationAttribute(oneToOne.cascade()); } protected static boolean checkCascadeDelete(OneToMany oneToMany) { return oneToMany.orphanRemoval() || checkCascadeAnnotationAttribute(oneToMany.cascade()); } protected static boolean checkCascadeDelete(ManyToMany manyToMany) { return checkCascadeAnnotationAttribute(manyToMany.cascade()); } protected static boolean checkCascadeAnnotationAttribute(CascadeType[] cascadeList) { if (cascadeList == null || cascadeList.length <= 0) return false; for (CascadeType type : cascadeList) { if (type.equals(CascadeType.ALL) || type.equals(CascadeType.REMOVE)) return true; } return false; } protected static void addCascadeDeleteField(Class<?> currentClass, final String fieldName) { List<String> classCascadeDeleteFields = cascadeDeleteFields.get(currentClass); if (classCascadeDeleteFields == null) classCascadeDeleteFields = new ArrayList<String>(); classCascadeDeleteFields.add(fieldName); cascadeDeleteFields.put(currentClass, classCascadeDeleteFields); } public static boolean isSerializedType(final Field iField) { if (!classes.contains(iField.getDeclaringClass())) registerCallbacks(iField.getDeclaringClass()); Map<Field, Class<?>> serializerFields = serializedFields.get(iField.getDeclaringClass()); return serializerFields != null && serializerFields.get(iField) != null; } public static Class<?> getSerializedType(final Field iField) { if (!classes.contains(iField.getDeclaringClass())) registerCallbacks(iField.getDeclaringClass()); return serializedFields.get(iField.getDeclaringClass()) != null ? serializedFields.get(iField.getDeclaringClass()).get(iField) : null; } public static boolean isToSerialize(final Class<?> type) { for (Class<?> classContext : OObjectSerializerHelper.serializerContexts.keySet()) { if (classContext != null && classContext.isAssignableFrom(type)) { return true; } } return OObjectSerializerHelper.serializerContexts.get(null) != null && OObjectSerializerHelper.serializerContexts.get(null).isClassBinded(type); } public static Object serializeFieldValue(final Class<?> type, final Object iFieldValue) { for (Class<?> classContext : OObjectSerializerHelper.serializerContexts.keySet()) { if (classContext != null && classContext.isAssignableFrom(type)) { return OObjectSerializerHelper.serializerContexts.get(classContext).serializeFieldValue(type, iFieldValue); } } if (OObjectSerializerHelper.serializerContexts.get(null) != null) return OObjectSerializerHelper.serializerContexts.get(null).serializeFieldValue(type, iFieldValue); return iFieldValue; } public static Object deserializeFieldValue(final Class<?> type, final Object iFieldValue) { for (Class<?> classContext : OObjectSerializerHelper.serializerContexts.keySet()) { if (classContext != null && classContext.isAssignableFrom(type)) { return OObjectSerializerHelper.serializerContexts.get(classContext).unserializeFieldValue(type, iFieldValue); } } if (OObjectSerializerHelper.serializerContexts.get(null) != null) return OObjectSerializerHelper.serializerContexts.get(null).unserializeFieldValue(type, iFieldValue); return iFieldValue; } public static Object typeToStream(Object iFieldValue, OType iType, final ODatabaseObject db, final ODocument iRecord) { if (iFieldValue == null) return null; if (iFieldValue instanceof Proxy) return getDocument((Proxy) iFieldValue); if (!OType.isSimpleType(iFieldValue) || iFieldValue.getClass().isArray()) { Class<?> fieldClass = iFieldValue.getClass(); if (fieldClass.isArray()) { if (iType.equals(OType.BINARY)) return iFieldValue; // ARRAY final int arrayLength = Array.getLength(iFieldValue); final List<Object> arrayList = new ArrayList<Object>(); for (int i = 0; i < arrayLength; i++) arrayList.add(Array.get(iFieldValue, i)); iFieldValue = multiValueToStream(arrayList, iType, db, iRecord); } else if (Collection.class.isAssignableFrom(fieldClass)) { // COLLECTION (LIST OR SET) iFieldValue = multiValueToStream(iFieldValue, iType, db, iRecord); } else if (Map.class.isAssignableFrom(fieldClass)) { // MAP iFieldValue = multiValueToStream(iFieldValue, iType, db, iRecord); } else if (fieldClass.isEnum()) { // ENUM iFieldValue = ((Enum<?>) iFieldValue).name(); } else { // LINK OR EMBEDDED fieldClass = db.getEntityManager().getEntityClass(fieldClass.getSimpleName()); if (fieldClass != null) { // RECOGNIZED TYPE, SERIALIZE IT iFieldValue = getDocument((Proxy) serializeObject(iFieldValue, db)); } else { final Object result = serializeFieldValue(null, iFieldValue); if (iFieldValue == result && !ORecordAbstract.class.isAssignableFrom(result.getClass())) throw new OSerializationException("Linked type [" + iFieldValue.getClass() + ":" + iFieldValue + "] cannot be serialized because is not part of registered entities. To fix this error register this class"); iFieldValue = result; } } } return iFieldValue; } public static boolean hasBoundedDocumentField(final Class<?> iClass) { if (!classes.contains(iClass)) { registerClass(iClass); } boolean hasBoundedField = false; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && !hasBoundedField;) { hasBoundedField = boundDocumentFields.get(currentClass) != null; currentClass = currentClass.getSuperclass(); } return hasBoundedField; } public static Field getBoundedDocumentField(final Class<?> iClass) { if (!classes.contains(iClass)) { registerClass(iClass); } for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class);) { Field f = boundDocumentFields.get(currentClass); if (f != null) return f; currentClass = currentClass.getSuperclass(); } return null; } public static boolean isIdField(final Class<?> iClass, String iFieldName) { if (!classes.contains(iClass)) { registerClass(iClass); } boolean isIdField = false; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && !isIdField;) { Field f = fieldIds.get(currentClass); isIdField = f != null && f.getName().equals(iFieldName); currentClass = currentClass.getSuperclass(); } return isIdField; } public static boolean isIdField(Field iField) { if (!classes.contains(iField.getDeclaringClass())) { registerClass(iField.getDeclaringClass()); } return fieldIds.containsValue(iField); } public static Field getIdField(final Class<?> iClass) { if (!classes.contains(iClass)) { registerClass(iClass); } Field idField = null; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && idField == null;) { idField = fieldIds.get(currentClass); currentClass = currentClass.getSuperclass(); } return idField; } public static void setIdField(final Class<?> iClass, Object iObject, ORID iValue) throws IllegalArgumentException, IllegalAccessException { if (!classes.contains(iClass)) { registerClass(iClass); } Field f = null; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class);) { f = fieldIds.get(currentClass); if (f != null) break; currentClass = currentClass.getSuperclass(); } if (f != null) { if (f.getType().equals(String.class)) setFieldValue(f, iObject, iValue.toString()); else if (f.getType().equals(Long.class)) setFieldValue(f, iObject, iValue.getClusterPosition().longValue()); else if (f.getType().equals(Object.class)) setFieldValue(f, iObject, iValue); } } public static boolean isVersionField(final Class<?> iClass, String iFieldName) { if (!classes.contains(iClass)) { registerClass(iClass); } boolean isVersionField = false; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && !isVersionField;) { Field f = fieldVersions.get(currentClass); isVersionField = f != null && f.getName().equals(iFieldName); currentClass = currentClass.getSuperclass(); } return isVersionField; } public static Field getVersionField(final Class<?> iClass) { if (!classes.contains(iClass)) { registerClass(iClass); } Field versionField = null; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class) && versionField == null;) { versionField = fieldVersions.get(currentClass); currentClass = currentClass.getSuperclass(); } return versionField; } public static void setVersionField(final Class<?> iClass, Object iObject, int iValue) throws IllegalArgumentException, IllegalAccessException { if (!classes.contains(iClass)) { registerClass(iClass); } Field f = null; for (Class<?> currentClass = iClass; currentClass != null && currentClass != Object.class && !currentClass.equals(ODocument.class);) { f = fieldVersions.get(currentClass); if (f != null) break; currentClass = currentClass.getSuperclass(); } if (f != null) { if (f.getType().equals(String.class)) setFieldValue(f, iObject, String.valueOf(iValue)); else if (f.getType().equals(Long.class)) setFieldValue(f, iObject, Long.valueOf(iValue)); else if (f.getType().equals(Object.class)) setFieldValue(f, iObject, iValue); } } public static Object getFieldValue(Field iField, Object iInstance) throws IllegalArgumentException, IllegalAccessException { if (!iField.isAccessible()) { iField.setAccessible(true); } return iField.get(iInstance); } public static void setFieldValue(Field iField, Object iInstance, Object iValue) throws IllegalArgumentException, IllegalAccessException { if (!iField.isAccessible()) { iField.setAccessible(true); } iField.set(iInstance, iValue); } public static void invokeBeforeSerializationCallbacks(Class<?> iClass, Object iInstance, ODocument iDocument) { invokeCallback(iClass, iInstance, iDocument, OBeforeSerialization.class); } public static void invokeAfterSerializationCallbacks(Class<?> iClass, Object iInstance, ODocument iDocument) { invokeCallback(iClass, iInstance, iDocument, OAfterSerialization.class); } public static void invokeAfterDeserializationCallbacks(Class<?> iClass, Object iInstance, ODocument iDocument) { invokeCallback(iClass, iInstance, iDocument, OAfterDeserialization.class); } public static void invokeBeforeDeserializationCallbacks(Class<?> iClass, Object iInstance, ODocument iDocument) { invokeCallback(iClass, iInstance, iDocument, OBeforeDeserialization.class); } public static OType getTypeByClass(final Class<?> iClass, final String fieldName) { Field f = getField(fieldName, iClass); if (f == null) return null; if (f.getType().isArray() || Collection.class.isAssignableFrom(f.getType()) || Map.class.isAssignableFrom(f.getType())) { Class<?> genericMultiValueType = OReflectionHelper.getGenericMultivalueType(f); if (f.getType().isArray()) if (genericMultiValueType.isPrimitive() && Byte.class.isAssignableFrom(genericMultiValueType)) { return OType.BINARY; } else { return OType.getTypeByClass(f.getType()); } else if (Collection.class.isAssignableFrom(f.getType())) { if (genericMultiValueType.isEnum() || isSerializedType(f) || OObjectEntitySerializer.isEmbeddedField(iClass, fieldName) || OReflectionHelper.isJavaType(genericMultiValueType)) return Set.class.isAssignableFrom(f.getType()) ? OType.EMBEDDEDSET : OType.EMBEDDEDLIST; else return Set.class.isAssignableFrom(f.getType()) ? OType.LINKSET : OType.LINKLIST; } else { if (genericMultiValueType.isEnum() || isSerializedType(f) || OObjectEntitySerializer.isEmbeddedField(iClass, fieldName) || OReflectionHelper.isJavaType(genericMultiValueType)) return OType.EMBEDDEDMAP; else return OType.LINKMAP; } } else if (OObjectEntitySerializer.isEmbeddedField(iClass, fieldName)) { return OType.EMBEDDED; } else if (Date.class.isAssignableFrom(f.getType())) { return OType.DATETIME; } else { return OType.getTypeByClass(f.getType()); } } public static Field getField(String fieldName, Class<?> iClass) { for (Field f : iClass.getDeclaredFields()) { if (f.getName().equals(fieldName)) return f; } if (iClass.getSuperclass().equals(Object.class)) return null; return getField(fieldName, iClass.getSuperclass()); } /** * Serialize the user POJO to a ORecordDocument instance. * * @param iPojo * User pojo to serialize * @throws IllegalAccessException * @throws IllegalArgumentException */ @SuppressWarnings("unchecked") protected static <T> T toStream(final T iPojo, final Proxy iProxiedPojo, ODatabaseObject db) throws IllegalArgumentException, IllegalAccessException { final ODocument iRecord = getDocument(iProxiedPojo); final long timer = Orient.instance().getProfiler().startChrono(); final Integer identityRecord = System.identityHashCode(iPojo); if (OObjectSerializationThreadLocal.INSTANCE.get().containsKey(identityRecord)) return (T) OObjectSerializationThreadLocal.INSTANCE.get().get(identityRecord); OObjectSerializationThreadLocal.INSTANCE.get().put(identityRecord, iProxiedPojo); OProperty schemaProperty; final Class<?> pojoClass = iPojo.getClass(); final OClass schemaClass = iRecord.getSchemaClass(); // CHECK FOR ID BINDING final Field idField = getIdField(pojoClass); if (idField != null) { Object id = getFieldValue(idField, iPojo); if (id != null) { // FOUND if (id instanceof ORecordId) { iRecord.setIdentity((ORecordId) id); } else if (id instanceof Number) { // TREATS AS CLUSTER POSITION ((ORecordId) iRecord.getIdentity()).clusterId = schemaClass.getDefaultClusterId(); ((ORecordId) iRecord.getIdentity()).clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(((Number) id).longValue()); } else if (id instanceof String) ((ORecordId) iRecord.getIdentity()).fromString((String) id); else if (id.getClass().equals(Object.class)) iRecord.setIdentity((ORecordId) id); else OLogManager.instance().warn(OObjectSerializerHelper.class, "@Id field has been declared as %s while the supported are: ORID, Number, String, Object", id.getClass()); } if (iRecord.getIdentity().isValid() && iRecord.getIdentity().isPersistent()) iRecord.reload(); } // CHECK FOR VERSION BINDING final Field vField = getVersionField(pojoClass); boolean versionConfigured = false; if (vField != null) { versionConfigured = true; Object ver = getFieldValue(vField, iPojo); if (ver != null) { // FOUND if (ver instanceof Number) { // TREATS AS CLUSTER POSITION // TODO add support of extended version to object database iRecord.setVersion(((Number) ver).intValue()); } else if (ver instanceof String) iRecord.setVersion(Integer.parseInt((String) ver)); else if (ver.getClass().equals(Object.class)) iRecord.setVersion((Integer) ver); else OLogManager.instance().warn(OObjectSerializerHelper.class, "@Version field has been declared as %s while the supported are: Number, String, Object", ver.getClass()); } } if (db.isMVCC() && !versionConfigured && db.getTransaction() instanceof OTransactionOptimistic) throw new OTransactionException( "Cannot involve an object of class '" + pojoClass + "' in an Optimistic Transaction commit because it does not define @Version or @OVersion and therefore cannot handle MVCC"); String fieldName; Object fieldValue; // CALL BEFORE MARSHALLING invokeCallback(pojoClass, iPojo, iRecord, OBeforeSerialization.class); Class<?> currentClass = pojoClass; while (!currentClass.equals(Object.class) && classes.contains(pojoClass)) { for (Field p : currentClass.getDeclaredFields()) { if (Modifier.isStatic(p.getModifiers()) || Modifier.isNative(p.getModifiers()) || Modifier.isTransient(p.getModifiers()) || p.getType().isAnonymousClass()) continue; fieldName = p.getName(); List<String> classTransientFields = transientFields.get(pojoClass); if ((idField != null && fieldName.equals(idField.getName()) || (vField != null && fieldName.equals(vField.getName())) || (classTransientFields != null && classTransientFields .contains(fieldName)))) continue; fieldValue = getFieldValue(p, iPojo); if (fieldValue != null && fieldValue.getClass().isAnonymousClass()) continue; if (isSerializedType(p)) fieldValue = serializeFieldValue(p.getType(), fieldValue); schemaProperty = schemaClass != null ? schemaClass.getProperty(fieldName) : null; OType fieldType = schemaProperty != null ? schemaProperty.getType() : getTypeByClass(currentClass, fieldName); if (fieldValue != null) { if (isEmbeddedObject(p)) { // AUTO CREATE SCHEMA CLASS if (iRecord.getSchemaClass() == null) { db.getMetadata().getSchema().createClass(iPojo.getClass()); iRecord.setClassNameIfExists(iPojo.getClass().getSimpleName()); } } } fieldValue = typeToStream(fieldValue, fieldType, db, iRecord); iRecord.field(fieldName, fieldValue, fieldType); } currentClass = currentClass.getSuperclass(); if (currentClass == null || currentClass.equals(ODocument.class)) // POJO EXTENDS ODOCUMENT: SPECIAL CASE: AVOID TO CONSIDER // ODOCUMENT FIELDS currentClass = Object.class; } // CALL AFTER MARSHALLING invokeCallback(pojoClass, iPojo, iRecord, OAfterSerialization.class); OObjectSerializationThreadLocal.INSTANCE.get().remove(identityRecord); Orient.instance().getProfiler().stopChrono("Object.toStream", "Serialize a POJO", timer); return (T) iProxiedPojo; } protected static void invokeCallback(final Object iPojo, final ODocument iDocument, final Class<?> iAnnotation) { invokeCallback(iPojo.getClass(), iPojo, iDocument, iAnnotation); } protected static void invokeCallback(final Class<?> iClass, final Object iPojo, final ODocument iDocument, final Class<?> iAnnotation) { final List<Method> methods = getCallbackMethods(iAnnotation, iClass); if (methods != null && !methods.isEmpty()) for (Method m : methods) { try { if (m.getParameterTypes().length > 0) m.invoke(iPojo, iDocument); else m.invoke(iPojo); } catch (Exception e) { throw new OConfigurationException("Error on executing user callback '" + m.getName() + "' annotated with '" + iAnnotation.getSimpleName() + "'", e); } } } protected static List<Method> getCallbackMethods(final Class<?> iAnnotation, final Class<?> iClass) { if (!classes.contains(iClass)) { registerClass(iClass); } List<Method> result = new ArrayList<Method>(); Class<?> currentClass = iClass; while (classes.contains(currentClass)) { List<Method> callbackMethods = callbacks.get(currentClass.getSimpleName() + "." + iAnnotation.getSimpleName()); if (callbackMethods != null && !callbackMethods.isEmpty()) result.addAll(callbackMethods); if (currentClass != Object.class) currentClass = currentClass.getSuperclass(); } return result; } @SuppressWarnings({ "unchecked", "rawtypes" }) private static void registerCallbacks(final Class<?> iRootClass) { // FIND KEY METHODS for (Method m : iRootClass.getDeclaredMethods()) { // SEARCH FOR CALLBACK ANNOTATIONS for (Class annotationClass : OObjectSerializerHelper.callbackAnnotationClasses) { final String key = iRootClass.getSimpleName() + "." + annotationClass.getSimpleName(); if (m.getAnnotation(annotationClass) != null) { if (!callbacks.containsKey(key)) { callbacks.put(key, new ArrayList<Method>(Arrays.asList(m))); } else { callbacks.get(key).add(m); } } } } } @SuppressWarnings("unchecked") private static Object multiValueToStream(final Object iMultiValue, OType iType, final ODatabaseObject db, final ODocument iRecord) { if (iMultiValue == null) return null; final Collection<Object> sourceValues; if (iMultiValue instanceof Collection<?>) { sourceValues = (Collection<Object>) iMultiValue; } else { sourceValues = (Collection<Object>) ((Map<?, ?>) iMultiValue).values(); } if (sourceValues.size() == 0) return iMultiValue; // TRY TO UNDERSTAND THE COLLECTION TYPE BY ITS CONTENT final Object firstValue = sourceValues.iterator().next(); if (firstValue == null) return iMultiValue; if (iType == null) { // DETERMINE THE RIGHT TYPE BASED ON SOURCE MULTI VALUE OBJECT if (OType.isSimpleType(firstValue)) { if (iMultiValue instanceof List) iType = OType.EMBEDDEDLIST; else if (iMultiValue instanceof Set) iType = OType.EMBEDDEDSET; else iType = OType.EMBEDDEDMAP; } else { if (iMultiValue instanceof List) iType = OType.LINKLIST; else if (iMultiValue instanceof Set) iType = OType.LINKSET; else iType = OType.LINKMAP; } } Object result = iMultiValue; final OType linkedType; // CREATE THE RETURN MULTI VALUE OBJECT BASED ON DISCOVERED TYPE if (iType.equals(OType.EMBEDDEDSET) || iType.equals(OType.LINKSET)) { if (isToSerialize(firstValue.getClass())) result = new HashSet<Object>(); else if ((iRecord != null && iType.equals(OType.EMBEDDEDSET)) || OType.isSimpleType(firstValue)) result = new OTrackedSet<Object>(iRecord); else result = new ORecordLazySet(iRecord); } else if (iType.equals(OType.EMBEDDEDLIST) || iType.equals(OType.LINKLIST)) { if (isToSerialize(firstValue.getClass())) result = new ArrayList<Object>(); else if ((iRecord != null && iType.equals(OType.EMBEDDEDLIST)) || OType.isSimpleType(firstValue)) result = new OTrackedList<Object>(iRecord); else result = new ORecordLazyList(iRecord); } if (iType.equals(OType.LINKLIST) || iType.equals(OType.LINKSET) || iType.equals(OType.LINKMAP)) linkedType = OType.LINK; else if (iType.equals(OType.EMBEDDEDLIST) || iType.equals(OType.EMBEDDEDSET) || iType.equals(OType.EMBEDDEDMAP)) if (firstValue instanceof List) linkedType = OType.EMBEDDEDLIST; else if (firstValue instanceof Set) linkedType = OType.EMBEDDEDSET; else if (firstValue instanceof Map) linkedType = OType.EMBEDDEDMAP; else linkedType = OType.EMBEDDED; else throw new IllegalArgumentException("Type " + iType + " must be a multi value type (collection or map)"); if (iMultiValue instanceof Set<?>) { for (Object o : sourceValues) { ((Set<Object>) result).add(typeToStream(o, linkedType, db, null)); } } else if (iMultiValue instanceof List<?>) { for (int i = 0; i < sourceValues.size(); i++) { ((List<Object>) result).add(typeToStream(((List<?>) sourceValues).get(i), linkedType, db, null)); } } else { if (iMultiValue instanceof OObjectLazyMap<?>) { result = ((OObjectLazyMap<?>) iMultiValue).getUnderlying(); } else { if (isToSerialize(firstValue.getClass())) result = new HashMap<Object, Object>(); else if (iRecord != null && iType.equals(OType.EMBEDDEDMAP)) result = new OTrackedMap<Object>(iRecord); else result = new ORecordLazyMap(iRecord); for (Entry<Object, Object> entry : ((Map<Object, Object>) iMultiValue).entrySet()) { ((Map<Object, Object>) result).put(entry.getKey(), typeToStream(entry.getValue(), linkedType, db, null)); } } } return result; } @SuppressWarnings("unchecked") public static <T> T getNonProxiedInstance(T iObject) { try { return (T) iObject.getClass().getSuperclass().newInstance(); } catch (InstantiationException ie) { OLogManager.instance().error(iObject, "Error creating instance for class " + iObject.getClass().getSuperclass(), ie); } catch (IllegalAccessException ie) { OLogManager.instance().error(iObject, "Error creating instance for class " + iObject.getClass().getSuperclass(), ie); } return null; } private static boolean isEmbeddedObject(Field f) { if (!classes.contains(f.getDeclaringClass())) registerClass(f.getDeclaringClass()); return isEmbeddedField(f.getDeclaringClass(), f.getName()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law * or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.apache.commons.math3.optim.nonlinear.vector.jacobian; import java.util.Arrays; import java.util.List; import java.util.ArrayList; import java.awt.geom.Point2D; import org.apache.commons.math3.optim.PointVectorValuePair; import org.apache.commons.math3.optim.InitialGuess; import org.apache.commons.math3.optim.MaxEval; import org.apache.commons.math3.optim.nonlinear.vector.Target; import org.apache.commons.math3.optim.nonlinear.vector.Weight; import org.apache.commons.math3.stat.descriptive.SummaryStatistics; import org.apache.commons.math3.stat.descriptive.StatisticalSummary; import org.apache.commons.math3.util.FastMath; import org.junit.Test; import org.junit.Assert; /** * This class demonstrates the main functionality of the * {@link AbstractLeastSquaresOptimizer}, common to the * optimizer implementations in package * {@link org.apache.commons.math3.optimization.general}. * <br/> * Not enabled by default, as the class name does not end with "Test". * <br/> * Invoke by running * <pre><code> * mvn test -Dtest=AbstractLeastSquaresOptimizerTestValidation * </code></pre> * or by running * <pre><code> * mvn test -Dtest=AbstractLeastSquaresOptimizerTestValidation -DargLine="-DmcRuns=1234 -server" * </code></pre> */ public class AbstractLeastSquaresOptimizerTestValidation { private static final int MONTE_CARLO_RUNS = Integer.parseInt(System.getProperty("mcRuns", "100")); /** * Using a Monte-Carlo procedure, this test checks the error estimations * as provided by the square-root of the diagonal elements of the * covariance matrix. * <br/> * The test generates sets of observations, each sampled from * a Gaussian distribution. * <br/> * The optimization problem solved is defined in class * {@link StraightLineProblem}. * <br/> * The output (on stdout) will be a table summarizing the distribution * of parameters generated by the Monte-Carlo process and by the direct * estimation provided by the diagonal elements of the covariance matrix. */ @Test public void testParametersErrorMonteCarloObservations() { // Error on the observations. final double yError = 15; // True values of the parameters. final double slope = 123.456; final double offset = -98.765; // Samples generator. final RandomStraightLinePointGenerator lineGenerator = new RandomStraightLinePointGenerator(slope, offset, yError, -1e3, 1e4, 138577L); // Number of observations. final int numObs = 100; // XXX Should be a command-line option. // number of parameters. final int numParams = 2; // Parameters found for each of Monte-Carlo run. final SummaryStatistics[] paramsFoundByDirectSolution = new SummaryStatistics[numParams]; // Sigma estimations (square-root of the diagonal elements of the // covariance matrix), for each Monte-Carlo run. final SummaryStatistics[] sigmaEstimate = new SummaryStatistics[numParams]; // Initialize statistics accumulators. for (int i = 0; i < numParams; i++) { paramsFoundByDirectSolution[i] = new SummaryStatistics(); sigmaEstimate[i] = new SummaryStatistics(); } // Dummy optimizer (to compute the covariance matrix). final AbstractLeastSquaresOptimizer optim = new DummyOptimizer(); final double[] init = { slope, offset }; // Monte-Carlo (generates many sets of observations). final int mcRepeat = MONTE_CARLO_RUNS; int mcCount = 0; while (mcCount < mcRepeat) { // Observations. final Point2D.Double[] obs = lineGenerator.generate(numObs); final StraightLineProblem problem = new StraightLineProblem(yError); for (int i = 0; i < numObs; i++) { final Point2D.Double p = obs[i]; problem.addPoint(p.x, p.y); } // Direct solution (using simple regression). final double[] regress = problem.solve(); // Estimation of the standard deviation (diagonal elements of the // covariance matrix). final PointVectorValuePair optimum = optim.optimize(new MaxEval(Integer.MAX_VALUE), problem.getModelFunction(), problem.getModelFunctionJacobian(), new Target(problem.target()), new Weight(problem.weight()), new InitialGuess(init)); final double[] sigma = optim.computeSigma(optimum.getPoint(), 1e-14); // Accumulate statistics. for (int i = 0; i < numParams; i++) { paramsFoundByDirectSolution[i].addValue(regress[i]); sigmaEstimate[i].addValue(sigma[i]); } // Next Monte-Carlo. ++mcCount; } // Print statistics. final String line = "--------------------------------------------------------------"; System.out.println(" True value Mean Std deviation"); for (int i = 0; i < numParams; i++) { System.out.println(line); System.out.println("Parameter #" + i); StatisticalSummary s = paramsFoundByDirectSolution[i].getSummary(); System.out.printf(" %+.6e %+.6e %+.6e\n", init[i], s.getMean(), s.getStandardDeviation()); s = sigmaEstimate[i].getSummary(); System.out.printf("sigma: %+.6e (%+.6e)\n", s.getMean(), s.getStandardDeviation()); } System.out.println(line); // Check the error estimation. for (int i = 0; i < numParams; i++) { Assert.assertEquals(paramsFoundByDirectSolution[i].getSummary().getStandardDeviation(), sigmaEstimate[i].getSummary().getMean(), 8e-2); } } /** * In this test, the set of observations is fixed. * Using a Monte-Carlo procedure, it generates sets of parameters, * and determine the parameter change that will result in the * normalized chi-square becoming larger by one than the value from * the best fit solution. * <br/> * The optimization problem solved is defined in class * {@link StraightLineProblem}. * <br/> * The output (on stdout) will be a list of lines containing: * <ul> * <li>slope of the straight line,</li> * <li>intercept of the straight line,</li> * <li>chi-square of the solution defined by the above two values.</li> * </ul> * The output is separated into two blocks (with a blank line between * them); the first block will contain all parameter sets for which * {@code chi2 < chi2_b + 1} * and the second block, all sets for which * {@code chi2 >= chi2_b + 1} * where {@code chi2_b} is the lowest chi-square (corresponding to the * best solution). */ @Test public void testParametersErrorMonteCarloParameters() { // Error on the observations. final double yError = 15; // True values of the parameters. final double slope = 123.456; final double offset = -98.765; // Samples generator. final RandomStraightLinePointGenerator lineGenerator = new RandomStraightLinePointGenerator(slope, offset, yError, -1e3, 1e4, 13839013L); // Number of observations. final int numObs = 10; // number of parameters. final int numParams = 2; // Create a single set of observations. final Point2D.Double[] obs = lineGenerator.generate(numObs); final StraightLineProblem problem = new StraightLineProblem(yError); for (int i = 0; i < numObs; i++) { final Point2D.Double p = obs[i]; problem.addPoint(p.x, p.y); } // Direct solution (using simple regression). final double[] regress = problem.solve(); // Dummy optimizer (to compute the chi-square). final AbstractLeastSquaresOptimizer optim = new DummyOptimizer(); final double[] init = { slope, offset }; // Get chi-square of the best parameters set for the given set of // observations. final double bestChi2N = getChi2N(optim, problem, regress); final double[] sigma = optim.computeSigma(regress, 1e-14); // Monte-Carlo (generates a grid of parameters). final int mcRepeat = MONTE_CARLO_RUNS; final int gridSize = (int) FastMath.sqrt(mcRepeat); // Parameters found for each of Monte-Carlo run. // Index 0 = slope // Index 1 = offset // Index 2 = normalized chi2 final List<double[]> paramsAndChi2 = new ArrayList<double[]>(gridSize * gridSize); final double slopeRange = 10 * sigma[0]; final double offsetRange = 10 * sigma[1]; final double minSlope = slope - 0.5 * slopeRange; final double minOffset = offset - 0.5 * offsetRange; final double deltaSlope = slopeRange/ gridSize; final double deltaOffset = offsetRange / gridSize; for (int i = 0; i < gridSize; i++) { final double s = minSlope + i * deltaSlope; for (int j = 0; j < gridSize; j++) { final double o = minOffset + j * deltaOffset; final double chi2N = getChi2N(optim, problem, new double[] {s, o}); paramsAndChi2.add(new double[] {s, o, chi2N}); } } // Output (for use with "gnuplot"). // Some info. // For plotting separately sets of parameters that have a large chi2. final double chi2NPlusOne = bestChi2N + 1; int numLarger = 0; final String lineFmt = "%+.10e %+.10e %.8e\n"; // Point with smallest chi-square. System.out.printf(lineFmt, regress[0], regress[1], bestChi2N); System.out.println(); // Empty line. // Points within the confidence interval. for (double[] d : paramsAndChi2) { if (d[2] <= chi2NPlusOne) { System.out.printf(lineFmt, d[0], d[1], d[2]); } } System.out.println(); // Empty line. // Points outside the confidence interval. for (double[] d : paramsAndChi2) { if (d[2] > chi2NPlusOne) { ++numLarger; System.out.printf(lineFmt, d[0], d[1], d[2]); } } System.out.println(); // Empty line. System.out.println("# sigma=" + Arrays.toString(sigma)); System.out.println("# " + numLarger + " sets filtered out"); } /** * @return the normalized chi-square. */ private double getChi2N(AbstractLeastSquaresOptimizer optim, StraightLineProblem problem, double[] params) { final double[] t = problem.target(); final double[] w = problem.weight(); optim.optimize(new MaxEval(Integer.MAX_VALUE), problem.getModelFunction(), problem.getModelFunctionJacobian(), new Target(t), new Weight(w), new InitialGuess(params)); return optim.getChiSquare() / (t.length - params.length); } } /** * A dummy optimizer. * Used for computing the covariance matrix. */ class DummyOptimizer extends AbstractLeastSquaresOptimizer { public DummyOptimizer() { super(null); } /** * This method does nothing and returns a dummy value. */ @Override public PointVectorValuePair doOptimize() { final double[] params = getStartPoint(); final double[] res = computeResiduals(computeObjectiveValue(params)); setCost(computeCost(res)); return new PointVectorValuePair(params, null); } }
/* * Copyright 2013 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.jenkins.plugins.util; import static com.google.common.base.Preconditions.checkNotNull; import com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import java.io.IOException; import java.util.LinkedList; /** * This is an implementation of {@link Executor} that can be injected to inject a set of canned * responses to requests including: * * <ul> * <li>A pre-determined object * <li>Throwing an {@link IOException} or {@link ExecutorException} * <li>Passing through a part of the request as the response * </ul> */ public class MockExecutor extends Executor { public MockExecutor() { requestTypes = new LinkedList<>(); responses = new LinkedList<>(); exceptions = new LinkedList<>(); predicates = new LinkedList<>(); sawUnexpected = false; } /** {@inheritDoc} */ @Override public void sleep() { // Never sleep, this is a test library, we want fast tests. } /** {@inheritDoc} */ @Override public <T> T execute(RequestCallable<T> request) throws IOException, ExecutorException { // TODO(nghia): Think about implementing this. throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public <T> T execute(AbstractGoogleJsonClientRequest<T> request) throws IOException, ExecutorException { Class<?> requestClass = request.getClass(); if (requestTypes.isEmpty()) { sawUnexpected = true; throw new IllegalStateException("Unexpected request: " + requestClass); } // Remove all three states to keep the lists in sync Class<?> clazz = requestTypes.removeFirst(); Object response = responses.removeFirst(); Exception exception = exceptions.removeFirst(); Predicate<AbstractGoogleJsonClientRequest<T>> predicate = (Predicate<AbstractGoogleJsonClientRequest<T>>) predicates.removeFirst(); if (requestClass != clazz) { sawUnexpected = true; throw new IllegalStateException( "Unexpected (or out of order) request: " + requestClass + " expected: " + clazz); } if (!predicate.apply(request)) { sawUnexpected = true; throw new IllegalStateException( "User predicate: " + predicate + " failed for request: " + requestClass); } if (response == null) { if (exception != null) { if (exception instanceof IOException) { throw (IOException) exception; // throwWhen(IOException) } else { throw (ExecutorException) exception; // throwWhen(ExecutorException) } } return (T) request.getJsonContent(); // passThruWhen } return (T) response; // when } /** * When the next request matches the given {@code requestType} and the provided user {@link * Predicate} return {@code response} as the response. */ public <T, S extends AbstractGoogleJsonClientRequest<T>, C extends S> void when( Class<C> requestType, T response, Predicate<S> predicate) { requestTypes.add(checkNotNull(requestType)); responses.add(response); // must allow null for delete's Void return type exceptions.add(null); predicates.add(checkNotNull(predicate)); } /** * When the next request matches the given {@code requestType} return {@code response} as the * response. */ public <T, C extends AbstractGoogleJsonClientRequest<T>> void when( Class<C> requestType, T response) { when(requestType, response, Predicates.alwaysTrue()); } /** * When the next request matches the given {@code requestType} and the provided user {@link * Predicate} throw {@code exception} instead of responding. */ public <T, S extends AbstractGoogleJsonClientRequest<T>, C extends S> void throwWhen( Class<C> requestType, IOException exception, Predicate<S> predicate) { throwWhenInternal(requestType, exception, predicate); } /** * When the next request matches the given {@code requestType} throw {@code exception} instead of * responding. */ public <T, C extends AbstractGoogleJsonClientRequest<T>> void throwWhen( Class<C> requestType, IOException exception) { throwWhen(requestType, exception, Predicates.alwaysTrue()); } /** * When the next request matches the given {@code requestType} and the provided user {@link * Predicate} throw {@code exception} instead of responding. */ public <T, S extends AbstractGoogleJsonClientRequest<T>, C extends S> void throwWhen( Class<C> requestType, ExecutorException exception, Predicate<S> predicate) { throwWhenInternal(requestType, exception, predicate); } /** * When the next request matches the given {@code requestType} throw {@code exception} instead of * responding. */ public <T, C extends AbstractGoogleJsonClientRequest<T>> void throwWhen( Class<C> requestType, ExecutorException exception) { throwWhen(requestType, exception, Predicates.alwaysTrue()); } /** * When the next request matches the given {@code requestType} and the provided user {@link * Predicate} throw {@code exception} instead of responding. */ private <T, S extends AbstractGoogleJsonClientRequest<T>, C extends S> void throwWhenInternal( Class<C> requestType, Exception exception, Predicate<S> predicate) { requestTypes.add(checkNotNull(requestType)); responses.add(null); exceptions.add(exception); predicates.add(checkNotNull(predicate)); } /** * When the next request matches the given {@code requestType} and the provided user {@link * Predicate} pass through the request's {@code getJsonContent()} cast to the expected response * type. */ public <T, S extends AbstractGoogleJsonClientRequest<T>, C extends S> void passThruWhen( Class<C> requestType, Predicate<S> predicate) { requestTypes.add(checkNotNull(requestType)); responses.add(null); exceptions.add(null); predicates.add(checkNotNull(predicate)); } /** * When the next request matches the given {@code requestType} pass through the request's {@code * getJsonContent()} cast to the expected response type. */ public <T, C extends AbstractGoogleJsonClientRequest<T>> void passThruWhen(Class<C> requestType) { passThruWhen(requestType, Predicates.alwaysTrue()); } /** Did we see all of the expected requests? */ public boolean sawAll() { return requestTypes.isEmpty(); } /** Did we see any unexpected requests? */ public boolean sawUnexpected() { return sawUnexpected; } private final LinkedList<Class<?>> requestTypes; private final LinkedList<Object> responses; private final LinkedList<Exception> exceptions; private final LinkedList<Predicate<?>> predicates; private boolean sawUnexpected; }
package uk.ac.ebi.pride.archive.repo.assay; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.transaction.annotation.Transactional; import uk.ac.ebi.pride.archive.dataprovider.param.CvParamProvider; import uk.ac.ebi.pride.archive.dataprovider.param.ParamProvider; import uk.ac.ebi.pride.archive.dataprovider.utils.TitleConstants; import uk.ac.ebi.pride.archive.repo.config.ArchiveOracleConfig; import uk.ac.ebi.pride.archive.repo.repos.assay.*; import uk.ac.ebi.pride.archive.repo.repos.assay.instrument.*; import uk.ac.ebi.pride.archive.repo.repos.assay.software.Software; import uk.ac.ebi.pride.archive.repo.repos.assay.software.SoftwareCvParam; import uk.ac.ebi.pride.archive.repo.repos.assay.software.SoftwareUserParam; import uk.ac.ebi.pride.archive.repo.repos.param.CvParam; import uk.ac.ebi.pride.archive.repo.repos.param.CvParamRepository; import java.util.*; import static junit.framework.Assert.assertNotNull; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; /** * @author Jose A. Dianes * @version $Id$ */ @RunWith(SpringRunner.class) @SpringBootTest(classes = {ArchiveOracleConfig.class}) public class AssayPersistenceTest { private static final long ASSAY_1_ID = 44444; private static final long ASSAY_2_ID = 444440; private static final String ASSAY_1_ACCESSION = "000001"; private static final String ASSAY_2_ACCESSION = "000002"; private static final long PROJECT_1_ID = 11111; private static final int NUM_ASSAY_PROJECT_1 = 2; private static final String ASSAY_1_TITLE = "Experiment title"; private static final String ASSAY_2_TITLE = "Experiment 2 title"; private static final String ASSAY_1_SHORT_LABEL = "Short label"; private static final String ASSAY_2_SHORT_LABEL = "Short label 2"; private static final int ASSAY_1_PROTEIN_COUNT = 1; private static final int ASSAY_2_PROTEIN_COUNT = 1; private static final int ASSAY_1_PEPTIDE_COUNT = 1; private static final int ASSAY_2_PEPTIDE_COUNT = 1; private static final int ASSAY_1_UNIQUE_PEPTIDE_COUNT = 1; private static final int ASSAY_2_UNIQUE_PEPTIDE_COUNT = 1; private static final int ASSAY_1_IDENTIFIED_SPECTRUM_COUNT = 1; private static final int ASSAY_2_IDENTIFIED_SPECTRUM_COUNT = 1; private static final int ASSAY_1_TOTAL_SPECTRUM_COUNT = 1; private static final int ASSAY_2_TOTAL_SPECTRUM_COUNT = 1; private static final boolean ASSAY_1_HAS_MS2_ANNOTATION = true; private static final boolean ASSAY_2_HAS_MS2_ANNOTATION = false; private static final boolean ASSAY_1_HAS_CHROMATOGRAM = true; private static final boolean ASSAY_2_HAS_CHROMATOGRAM = false; private static final String ASSAY_1_EXPERIMENT_FACTOR = "Experimental factor"; private static final String ASSAY_2_EXPERIMENT_FACTOR = "Experimental 2 factor"; private static final int NUM_PTM_ASSAY_1 = 1; private static final long PTM_1_ID = 1010101010; private static final long CV_PARAM_1_ID = 50005; private static final String CV_PARAM_2_LABEL = "Project Sample Param Label"; private static final String CV_PARAM_2_ACCESSION = "Project Sample"; private static final String CV_PARAM_2_NAME = "Project Sample Name"; private static final long CV_PARAM_2_ID = 66666; private static final String CV_PARAM_1_LABEL = "MOD"; private static final String CV_PARAM_1_ACCESSION = "MOD:00091"; private static final String CV_PARAM_1_NAME = "L-arginine amide"; private static final long CV_PARAM_3_ID = 1212121212; private static final String CV_PARAM_3_LABEL = "Exp Type CV Param Label"; private static final String CV_PARAM_3_ACCESSION = "Exp Type CV Param Accession"; private static final String CV_PARAM_3_NAME = "Exp Type CV Param Name"; private static final long CV_PARAM_4_ID = 1313131313; private static final String CV_PARAM_4_LABEL = "Group Project CV Param Label"; private static final String CV_PARAM_4_ACCESSION = "Group Project CV Param Accession"; private static final String CV_PARAM_4_NAME = "Group Project CV Param Name"; private static final int NUM_QUANTIFICATION_METHODS_ASSAY_1 = 1; private static final long QUANTIFICATION_METHOD_1_ID = 1515151515; private static final int NUM_ASSAY_SAMPLE_PARAM_PROJECT_1 = 1; private static final long ASSAY_SAMPLE_PARAM_1_ID = 1717171717; private static final long SOFTWARE_1_ID = 1111; private static final int NUM_PARAMS_SOFTWARE_1 = 2; private static final String SOFTWARE_1_CUSTOMIZATION = "customizations"; private static final String SOFTWARE_1_NAME = "Mascot"; private static final String SOFTWARE_1_VERSION = "1.2.3"; private static final int NUM_SOFTWARES_ASSAY_1 = 1; private static final int NUM_CONTACTS_ASSAY_1 = 1; private static final String CONTACT_1_AFFILIATION = "EBI"; private static final String CONTACT_1_EMAIL = "john.smith@dummy.ebi.com"; private static final String CONTACT_1_FIRST_NAME = "John"; private static final String CONTACT_1_LAST_NAME = "Smith"; private static final long CONTACT_1_ID = 33333; private static final int NUM_GROUP_PARAMS_ASSAY_1 = 1; private static final long GROUP_PARAM_1_ID = 1818181818; private static final int NUM_INSTRUMENTS_ASSAY_1 = 1; private static final long INSTRUMENT_1_ID = 1111; private static final String MODEL_NAME = "FT_ICR"; private static final String MODEL_VALUE = "icr"; private static final long ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_ID = 1111; private static final int ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_ORDER = 1; private static final long ASSAY_1_ANALYZER_INSTRUMENT_COMPONENT_ID = 1112; private static final int ASSAY_1_ANALYZER_INSTRUMENT_COMPONENT_ORDER = 2; private static final long ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_ID = 1113; private static final int ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_ORDER = 3; private static final String ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_NAME = "ESI"; private static final String ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_ACCESSION = "source"; private static final String ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_LABEL = "MS"; private static final String ASSAY_1_ANALYZER1_INSTRUMENT_COMPONENT_NAME = "TOF"; private static final String ASSAY_1_ANALYZER1_INSTRUMENT_COMPONENT_ACCESSION = "analyzer1"; private static final String ASSAY_1_ANALYZER1_INSTRUMENT_COMPONENT_LABEL = "MS"; private static final String ASSAY_1_ANALYZER2_INSTRUMENT_COMPONENT_NAME = "LTQ"; private static final String ASSAY_1_ANALYZER2_INSTRUMENT_COMPONENT_ACCESSION = "analyzer2"; private static final String ASSAY_1_ANALYZER2_INSTRUMENT_COMPONENT_LABEL = "MS"; private static final String ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_NAME = "plate"; private static final String ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_ACCESSION = "detector"; private static final String ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_LABEL = "MS"; private static final String ANOTHER_ASSAY_ACCESSION = "Another Assay Accession"; private static final String ANOTHER_ASSAY_TITLE = "Another Assay Title"; private static final long ANOTHER_ASSAY_PROJECT_ID = 11111; private static final String ANOTHER_ASSAY_SHORT_LABEL = "Another Assay Short label"; private static final int ANOTHER_ASSAY_PROTEIN_COUNT = 1; private static final int ANOTHER_ASSAY_PEPTIDE_COUNT = 1; private static final int ANOTHER_ASSAY_UNIQUE_PEPTIDE_COUNT = 1; private static final int ANOTHER_ASSAY_IDENTIFIED_SPECTRUM_COUNT = 1; private static final int ANOTHER_ASSAY_TOTAL_SPECTRUM_COUNT = 1; private static final boolean ANOTHER_ASSAY_HAS_MS2_ANNOTATION = true; private static final boolean ANOTHER_ASSAY_HAS_CHROMATOGRAM = true; private static final String ANOTHER_ASSAY_EXPERIMENTAL_FACTOR = "Another Assay Experimental factor"; private static final int NUM_PTM_OTHER_ASSAY = 1; private static final int NUM_GROUP_PARAMS_OTHER_ASSAY = 1; private static final int NUM_CONTACTS_OTHER_ASSAY = 1; private static final int NUM_QUANTIFICATION_METHODS_OTHER_ASSAY = 1; private static final int NUM_OTHER_ASSAY_SAMPLE_PARAM = 1; private static final int NUM_PARAMS_ANOTHER_SOFTWARE = 2; private static final int ANOTHER_SOFTWARE_ORDER = 0; private static final String ANOTHER_SOFTWARE_CUSTOMIZATION = "another customizations"; private static final String ANOTHER_SOFTWARE_NAME = "Sequest"; private static final String ANOTHER_SOFTWARE_VERSION = "1.0"; private static final String ANOTHER_SOFTWARE_CV_PARAM_VALUE = "another software value"; private static final String ANOTHER_CV_PARAM_LABEL = "Another Software CV Param Label"; private static final String ANOTHER_CV_PARAM_ACCESSION = "Another Software CV Param Accession"; private static final String ANOTHER_CV_PARAM_NAME = "Another Software CV Param Name"; private static final String ANOTHER_SOFTWARE_USER_PARAM_NAME = "another software param name"; private static final String ANOTHER_SOFTWARE_USER_PARAM_VALUE = "another software value"; private static final int NUM_SOFTWARES_OTHER_ASSAY = 1; private static final int NUM_INSTRUMENTS_ANOTHER_ASSAY = 1; private static long newInstrumentId; @Autowired private AssayRepository assayRepository; @Autowired private CvParamRepository cvParamRepository; public static void checkIsInstrument1InDb(Instrument instrument) { // check model CvParamProvider model = instrument.getModel(); assertNotNull(model); assertThat(model.getName(), is(MODEL_NAME)); assertThat(model.getValue(), is(MODEL_VALUE)); // check components SourceInstrumentComponent sourceInstrumentComponent = instrument.getSources().iterator().next(); assertNotNull(sourceInstrumentComponent); assertThat(sourceInstrumentComponent.getOrder(), is(ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_ORDER)); InstrumentComponentCvParam instrumentSourceComponentCvParam = (InstrumentComponentCvParam) sourceInstrumentComponent.getParams().iterator().next(); assertNotNull(instrumentSourceComponentCvParam); CvParam sourceCvParam = instrumentSourceComponentCvParam.getCvParam(); assertNotNull(sourceCvParam); assertThat(sourceCvParam.getName(), is(ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_NAME)); assertThat(sourceCvParam.getAccession(), is(ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_ACCESSION)); assertThat(sourceCvParam.getCvLabel(), is(ASSAY_1_SOURCE_INSTRUMENT_COMPONENT_LABEL)); AnalyzerInstrumentComponent analyzerInstrumentComponent = instrument.getAnalyzers().iterator().next(); assertNotNull(analyzerInstrumentComponent); assertThat( analyzerInstrumentComponent.getOrder(), is(ASSAY_1_ANALYZER_INSTRUMENT_COMPONENT_ORDER)); Iterator<ParamProvider> params = analyzerInstrumentComponent.getParams().iterator(); InstrumentComponentCvParam instrumentAnalyzer1ComponentCvParam = (InstrumentComponentCvParam) params.next(); assertNotNull(instrumentAnalyzer1ComponentCvParam); CvParam analyzer1CvParam = instrumentAnalyzer1ComponentCvParam.getCvParam(); assertNotNull(analyzer1CvParam); assertThat(analyzer1CvParam.getName(), is(ASSAY_1_ANALYZER1_INSTRUMENT_COMPONENT_NAME)); assertThat( analyzer1CvParam.getAccession(), is(ASSAY_1_ANALYZER1_INSTRUMENT_COMPONENT_ACCESSION)); assertThat(analyzer1CvParam.getCvLabel(), is(ASSAY_1_ANALYZER1_INSTRUMENT_COMPONENT_LABEL)); InstrumentComponentCvParam instrumentAnalyzer2ComponentCvParam = (InstrumentComponentCvParam) params.next(); assertNotNull(instrumentAnalyzer2ComponentCvParam); CvParam analyzer2CvParam = instrumentAnalyzer2ComponentCvParam.getCvParam(); assertNotNull(analyzer2CvParam); assertThat(analyzer2CvParam.getName(), is(ASSAY_1_ANALYZER2_INSTRUMENT_COMPONENT_NAME)); assertThat( analyzer2CvParam.getAccession(), is(ASSAY_1_ANALYZER2_INSTRUMENT_COMPONENT_ACCESSION)); assertThat(analyzer2CvParam.getCvLabel(), is(ASSAY_1_ANALYZER2_INSTRUMENT_COMPONENT_LABEL)); DetectorInstrumentComponent detectorInstrumentComponent = instrument.getDetectors().iterator().next(); assertNotNull(detectorInstrumentComponent); assertThat( detectorInstrumentComponent.getOrder(), is(ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_ORDER)); InstrumentComponentCvParam instrumentDetectorComponentCvParam = (InstrumentComponentCvParam) detectorInstrumentComponent.getParams().iterator().next(); assertNotNull(instrumentDetectorComponentCvParam); CvParam detectorCvParam = instrumentDetectorComponentCvParam.getCvParam(); assertNotNull(detectorCvParam); assertThat(detectorCvParam.getName(), is(ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_NAME)); assertThat(detectorCvParam.getAccession(), is(ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_ACCESSION)); assertThat(detectorCvParam.getCvLabel(), is(ASSAY_1_DETECTOR_INSTRUMENT_COMPONENT_LABEL)); } @Test @Transactional public void testGetById() throws Exception { Optional<Assay> assay = assayRepository.findById(ASSAY_1_ID); assay.ifPresent(this::checkIsAssay1InDb); } @Test @Transactional public void testGetByAccession() throws Exception { Assay assay = assayRepository.findByAccession(ASSAY_1_ACCESSION); checkIsAssay1InDb(assay); } @Test @Transactional public void testGetByProjectId() throws Exception { List<Assay> assays = assayRepository.findAllByProjectId(PROJECT_1_ID); assertNotNull(assays); assertThat(assays.size(), is(NUM_ASSAY_PROJECT_1)); for (Assay assay : assays) { if (assay.getId() == ASSAY_1_ID) checkIsAssay1InDb(assay); else checkIsAssay2InDb(assay); } } @Test @Transactional public void testGetByProjectIdPage() throws Exception { Page<Assay> assays = assayRepository.findAllByProjectId(PROJECT_1_ID, new PageRequest(0, 1)); assertNotNull(assays); assertThat((int) assays.getTotalElements(), is(NUM_ASSAY_PROJECT_1)); for (Assay assay : assays) { if (assay.getId() == ASSAY_1_ID) checkIsAssay1InDb(assay); else checkIsAssay2InDb(assay); } } @Test @Transactional public void testSaveAndGet() throws Exception { Assay assay = new Assay(); assay.setAccession(ANOTHER_ASSAY_ACCESSION); assay.setTitle(ANOTHER_ASSAY_TITLE); assay.setProjectId(ANOTHER_ASSAY_PROJECT_ID); assay.setShortLabel(ANOTHER_ASSAY_SHORT_LABEL); assay.setProteinCount(ANOTHER_ASSAY_PROTEIN_COUNT); assay.setPeptideCount(ANOTHER_ASSAY_PEPTIDE_COUNT); assay.setUniquePeptideCount(ANOTHER_ASSAY_UNIQUE_PEPTIDE_COUNT); assay.setIdentifiedSpectrumCount(ANOTHER_ASSAY_IDENTIFIED_SPECTRUM_COUNT); assay.setTotalSpectrumCount(ANOTHER_ASSAY_TOTAL_SPECTRUM_COUNT); assay.setMs2Annotation(ANOTHER_ASSAY_HAS_MS2_ANNOTATION); assay.setChromatogram(ANOTHER_ASSAY_HAS_CHROMATOGRAM); assay.setExperimentalFactor(ANOTHER_ASSAY_EXPERIMENTAL_FACTOR); // add PTM AssayPTM assayPtm = new AssayPTM(); assayPtm.setAssay(assay); if (cvParamRepository.findById(CV_PARAM_1_ID).isPresent()) assayPtm.setCvParam(cvParamRepository.findById(CV_PARAM_1_ID).get()); LinkedList<AssayPTM> assayPtms = new LinkedList<>(); assayPtms.add(assayPtm); assay.setPtms(assayPtms); // add group params AssayGroupCvParam assayGroupCvParam = new AssayGroupCvParam(); assayGroupCvParam.setAssay(assay); assayGroupCvParam.setCvParam(cvParamRepository.findById(CV_PARAM_3_ID).get()); LinkedList<AssayGroupCvParam> assayGroupCvParams = new LinkedList<>(); assayGroupCvParams.add(assayGroupCvParam); assay.setAssayGroupCvParams(assayGroupCvParams); // Add contacts Contact contact = new Contact(); contact.setAffiliation(CONTACT_1_AFFILIATION); contact.setAssay(assay); contact.setEmail(CONTACT_1_EMAIL); contact.setFirstName(CONTACT_1_FIRST_NAME); contact.setLastName(CONTACT_1_LAST_NAME); contact.setTitle(TitleConstants.Mr); LinkedList<Contact> contacts = new LinkedList<>(); contacts.add(contact); assay.setContacts(contacts); // add quantification methods AssayQuantificationMethodCvParam assayQuantificationMethod = new AssayQuantificationMethodCvParam(); assayQuantificationMethod.setAssay(assay); assayQuantificationMethod.setCvParam(cvParamRepository.findById(CV_PARAM_3_ID).get()); LinkedList<AssayQuantificationMethodCvParam> assayQuantificationMethods = new LinkedList<>(); assayQuantificationMethods.add(assayQuantificationMethod); assay.setQuantificationMethods(assayQuantificationMethods); // add quantification methods AssaySampleCvParam assaySample = new AssaySampleCvParam(); assaySample.setAssay(assay); assaySample.setCvParam(cvParamRepository.findById(CV_PARAM_3_ID).get()); LinkedList<AssaySampleCvParam> assaySamples = new LinkedList<>(); assaySamples.add(assaySample); assay.setSamples(assaySamples); // set softwares setAnotherSoftwaresToAssay(assay); // set instruments Instrument newInstrument = new Instrument(); newInstrument.setAssay(assay); CvParam instrParam = cvParamRepository.findById(13L).get(); newInstrument.setCvParam(instrParam); newInstrument.setValue("icr"); // source SourceInstrumentComponent source = new SourceInstrumentComponent(); source.setInstrument(newInstrument); source.setOrder(1); Collection<InstrumentComponentCvParam> sourceParams = new ArrayList<>(); InstrumentComponentCvParam cv1 = new InstrumentComponentCvParam(); cv1.setCvParam(cvParamRepository.findById(14L).get()); cv1.setInstrumentComponent(source); sourceParams.add(cv1); source.setInstrumentComponentCvParams(sourceParams); newInstrument.setSources(Collections.singleton(source)); // analyser AnalyzerInstrumentComponent analyzer1 = new AnalyzerInstrumentComponent(); analyzer1.setInstrument(newInstrument); analyzer1.setOrder(2); Collection<InstrumentComponentCvParam> analyzerParams = new ArrayList<>(); InstrumentComponentCvParam cv2 = new InstrumentComponentCvParam(); cv2.setCvParam(cvParamRepository.findById(15l).get()); cv2.setInstrumentComponent(analyzer1); InstrumentComponentCvParam cv3 = new InstrumentComponentCvParam(); cv3.setCvParam(cvParamRepository.findById(151l).get()); cv3.setInstrumentComponent(analyzer1); analyzerParams.add(cv2); analyzerParams.add(cv3); analyzer1.setInstrumentComponentCvParams(analyzerParams); newInstrument.setAnalyzers(Collections.singleton(analyzer1)); // detector DetectorInstrumentComponent detector = new DetectorInstrumentComponent(); detector.setInstrument(newInstrument); detector.setOrder(3); Collection<InstrumentComponentCvParam> detectorParams = new ArrayList<>(); InstrumentComponentCvParam cv4 = new InstrumentComponentCvParam(); cv4.setCvParam(cvParamRepository.findById(16l).get()); cv4.setInstrumentComponent(detector); detectorParams.add(cv4); detector.setInstrumentComponentCvParams(detectorParams); newInstrument.setDetectors(Collections.singleton(detector)); LinkedList<Instrument> instruments = new LinkedList<>(); instruments.add(newInstrument); assay.setInstruments(instruments); assayRepository.save(assay); // id set after save long newId = assay.getId(); // update instrumentID newInstrumentId = newInstrument.getId(); Assay other = assayRepository.findById(newId).get(); checkIsAnotherAssayInDb(other); // delete the assay assayRepository.delete(other); } private void setAnotherSoftwaresToAssay(Assay assay) { Software software = new Software(); software.setName(ANOTHER_SOFTWARE_NAME); software.setCustomization(ANOTHER_SOFTWARE_CUSTOMIZATION); software.setVersion(ANOTHER_SOFTWARE_VERSION); software.setOrder(ANOTHER_SOFTWARE_ORDER); software.setAssay(assay); LinkedList<SoftwareCvParam> softwareCvParams = new LinkedList<>(); CvParam cvParam = new CvParam(); cvParam.setAccession(ANOTHER_CV_PARAM_ACCESSION); cvParam.setCvLabel(ANOTHER_CV_PARAM_LABEL); cvParam.setName(ANOTHER_CV_PARAM_NAME); cvParamRepository.save(cvParam); SoftwareCvParam softwareCvParam = new SoftwareCvParam(); softwareCvParam.setCvParam(cvParam); softwareCvParam.setSoftware(software); softwareCvParam.setValue(ANOTHER_SOFTWARE_CV_PARAM_VALUE); softwareCvParams.add(softwareCvParam); software.setSoftwareCvParams(softwareCvParams); LinkedList<SoftwareUserParam> softwareUserParams = new LinkedList<>(); SoftwareUserParam softwareUserParam = new SoftwareUserParam(); softwareUserParam.setName(ANOTHER_SOFTWARE_USER_PARAM_NAME); softwareUserParam.setSoftware(software); softwareUserParam.setValue(ANOTHER_SOFTWARE_USER_PARAM_VALUE); softwareUserParams.add(softwareUserParam); software.setSoftwareUserParams(softwareUserParams); LinkedList<Software> softwares = new LinkedList<>(); softwares.add(software); assay.setSoftwares(softwares); } private void checkIsAnotherAssayInDb(Assay assay) { assertNotNull(assay); assertThat(assay.getAccession(), is(ANOTHER_ASSAY_ACCESSION)); assertThat(assay.getTitle(), is(ANOTHER_ASSAY_TITLE)); assertThat(assay.getShortLabel(), is(ANOTHER_ASSAY_SHORT_LABEL)); assertThat(assay.getProteinCount(), is(ANOTHER_ASSAY_PROTEIN_COUNT)); assertThat(assay.getPeptideCount(), is(ANOTHER_ASSAY_PEPTIDE_COUNT)); assertThat(assay.getUniquePeptideCount(), is(ANOTHER_ASSAY_UNIQUE_PEPTIDE_COUNT)); assertThat(assay.getIdentifiedSpectrumCount(), is(ANOTHER_ASSAY_IDENTIFIED_SPECTRUM_COUNT)); assertThat(assay.getTotalSpectrumCount(), is(ANOTHER_ASSAY_TOTAL_SPECTRUM_COUNT)); assertThat(assay.hasMs2Annotation(), is(ANOTHER_ASSAY_HAS_MS2_ANNOTATION)); assertThat(assay.hasChromatogram(), is(ANOTHER_ASSAY_HAS_CHROMATOGRAM)); assertThat(assay.getExperimentalFactor(), is(ANOTHER_ASSAY_EXPERIMENTAL_FACTOR)); checkOtherPTMs(assay); checkOtherParams(assay); checkOtherContacts(assay); checkOtherQuantificationMethods(assay); checkOtherSamples(assay); checkOtherSoftwares(assay); checkOtherInstruments(assay); } private void checkOtherInstruments(Assay assay) { Collection<Instrument> instruments = assay.getInstruments(); assertNotNull(instruments); assertThat(instruments.size(), is(NUM_INSTRUMENTS_ANOTHER_ASSAY)); Instrument instrument = instruments.iterator().next(); checkIsInstrument1InDb(instrument); } private void checkOtherSoftwares(Assay assay) { Collection<Software> softwares = assay.getSoftwares(); assertNotNull(softwares); assertThat(softwares.size(), is(NUM_SOFTWARES_OTHER_ASSAY)); checkIsAnotherSoftwareInDb(softwares.iterator().next()); } private void checkOtherSamples(Assay assay) { Collection<AssaySampleCvParam> assaySamples = assay.getSamples(); Assert.assertNotNull(assaySamples); assertEquals(assaySamples.size(), NUM_OTHER_ASSAY_SAMPLE_PARAM); AssaySampleCvParam assaySample = assaySamples.iterator().next(); assertThat(assaySample.getAssay(), is(assay)); CvParam cvParam = assaySample.getCvParam(); Assert.assertNotNull(cvParam); Assert.assertThat(cvParam.getId(), is(CV_PARAM_3_ID)); assertEquals(CV_PARAM_3_LABEL, cvParam.getCvLabel()); assertEquals(CV_PARAM_3_ACCESSION, cvParam.getAccession()); assertEquals(CV_PARAM_3_NAME, cvParam.getName()); } private void checkOtherQuantificationMethods(Assay assay) { Collection<AssayQuantificationMethodCvParam> quantificationMethods = assay.getQuantificationMethods(); Assert.assertNotNull(quantificationMethods); assertEquals(quantificationMethods.size(), NUM_QUANTIFICATION_METHODS_OTHER_ASSAY); AssayQuantificationMethodCvParam quantificationMethod = quantificationMethods.iterator().next(); assertThat(quantificationMethod.getAssay(), is(assay)); CvParam cvParam = quantificationMethod.getCvParam(); Assert.assertNotNull(cvParam); Assert.assertThat(cvParam.getId(), is(CV_PARAM_3_ID)); assertEquals(CV_PARAM_3_LABEL, cvParam.getCvLabel()); assertEquals(CV_PARAM_3_ACCESSION, cvParam.getAccession()); assertEquals(CV_PARAM_3_NAME, cvParam.getName()); } private void checkOtherContacts(Assay assay) { Collection<Contact> contacts = assay.getContacts(); assertNotNull(contacts); assertEquals(contacts.size(), NUM_CONTACTS_OTHER_ASSAY); Contact contact = contacts.iterator().next(); assertThat(contact.getAssay(), is(assay)); assertThat(contact.getAffiliation(), is(CONTACT_1_AFFILIATION)); assertThat(contact.getEmail(), is(CONTACT_1_EMAIL)); assertThat(contact.getFirstName(), is(CONTACT_1_FIRST_NAME)); assertThat(contact.getLastName(), is(CONTACT_1_LAST_NAME)); assertThat(contact.getTitle(), is(TitleConstants.Mr)); } private void checkOtherParams(Assay assay) { Collection<ParamProvider> groupParams = assay.getParams(); Assert.assertNotNull(groupParams); assertEquals(groupParams.size(), NUM_GROUP_PARAMS_OTHER_ASSAY); AssayCvParam assayCvParam = (AssayCvParam) groupParams.iterator().next(); assertThat(assayCvParam.getAssay(), is(assay)); CvParam cvParam = assayCvParam.getCvParam(); Assert.assertNotNull(cvParam); Assert.assertThat(cvParam.getId(), is(CV_PARAM_3_ID)); assertEquals(CV_PARAM_3_LABEL, cvParam.getCvLabel()); assertEquals(CV_PARAM_3_ACCESSION, cvParam.getAccession()); assertEquals(CV_PARAM_3_NAME, cvParam.getName()); } private void checkOtherPTMs(Assay assay) { Collection<AssayPTM> ptms = assay.getPtms(); Assert.assertNotNull(ptms); assertEquals(ptms.size(), NUM_PTM_OTHER_ASSAY); AssayPTM ptm = ptms.iterator().next(); assertThat(ptm.getAssay(), is(assay)); assertThat(ptm.getAccession(), is(CV_PARAM_1_ACCESSION)); CvParam cvParam = ptm.getCvParam(); Assert.assertNotNull(cvParam); Assert.assertThat(cvParam.getId(), is(CV_PARAM_1_ID)); assertEquals(CV_PARAM_1_LABEL, cvParam.getCvLabel()); assertEquals(CV_PARAM_1_ACCESSION, cvParam.getAccession()); assertEquals(CV_PARAM_1_NAME, cvParam.getName()); } private void checkIsAssay1InDb(Assay assay) { assertNotNull(assay); assertThat(assay.getId(), is(ASSAY_1_ID)); assertThat(assay.getAccession(), is(ASSAY_1_ACCESSION)); assertThat(assay.getTitle(), is(ASSAY_1_TITLE)); assertThat(assay.getShortLabel(), is(ASSAY_1_SHORT_LABEL)); assertThat(assay.getProteinCount(), is(ASSAY_1_PROTEIN_COUNT)); assertThat(assay.getPeptideCount(), is(ASSAY_1_PEPTIDE_COUNT)); assertThat(assay.getUniquePeptideCount(), is(ASSAY_1_UNIQUE_PEPTIDE_COUNT)); assertThat(assay.getIdentifiedSpectrumCount(), is(ASSAY_1_IDENTIFIED_SPECTRUM_COUNT)); assertThat(assay.getTotalSpectrumCount(), is(ASSAY_1_TOTAL_SPECTRUM_COUNT)); assertThat(assay.hasMs2Annotation(), is(ASSAY_1_HAS_MS2_ANNOTATION)); assertThat(assay.hasChromatogram(), is(ASSAY_1_HAS_CHROMATOGRAM)); assertThat(assay.getExperimentalFactor(), is(ASSAY_1_EXPERIMENT_FACTOR)); checkPTMs(assay); checkParams(assay); checkContacts(assay); checkQuantificationMethods(assay); checkSoftwares(assay); checkInstruments(assay); checkSamples(assay); } private void checkIsAssay2InDb(Assay assay) { assertNotNull(assay); assertThat(assay.getId(), is(ASSAY_2_ID)); assertThat(assay.getAccession(), is(ASSAY_2_ACCESSION)); assertThat(assay.getTitle(), is(ASSAY_2_TITLE)); assertThat(assay.getShortLabel(), is(ASSAY_2_SHORT_LABEL)); assertThat(assay.getProteinCount(), is(ASSAY_2_PROTEIN_COUNT)); assertThat(assay.getPeptideCount(), is(ASSAY_2_PEPTIDE_COUNT)); assertThat(assay.getUniquePeptideCount(), is(ASSAY_2_UNIQUE_PEPTIDE_COUNT)); assertThat(assay.getIdentifiedSpectrumCount(), is(ASSAY_2_IDENTIFIED_SPECTRUM_COUNT)); assertThat(assay.getTotalSpectrumCount(), is(ASSAY_2_TOTAL_SPECTRUM_COUNT)); assertThat(assay.hasMs2Annotation(), is(ASSAY_2_HAS_MS2_ANNOTATION)); assertThat(assay.hasChromatogram(), is(ASSAY_2_HAS_CHROMATOGRAM)); assertThat(assay.getExperimentalFactor(), is(ASSAY_2_EXPERIMENT_FACTOR)); } private void checkInstruments(Assay assay) { Collection<Instrument> instruments = assay.getInstruments(); assertNotNull(instruments); assertThat(instruments.size(), is(NUM_INSTRUMENTS_ASSAY_1)); Instrument instrument = instruments.iterator().next(); checkIsInstrument1InDb(instrument); } private void checkSoftwares(Assay assay) { Collection<Software> softwares = assay.getSoftwares(); assertNotNull(softwares); assertThat(softwares.size(), is(NUM_SOFTWARES_ASSAY_1)); checkIsSoftware1InDb(softwares.iterator().next()); } private void checkQuantificationMethods(Assay assay) { Collection<AssayQuantificationMethodCvParam> quantificationMethods = assay.getQuantificationMethods(); Assert.assertNotNull(quantificationMethods); assertEquals(quantificationMethods.size(), NUM_QUANTIFICATION_METHODS_ASSAY_1); AssayQuantificationMethodCvParam quantificationMethod = quantificationMethods.iterator().next(); assertThat(quantificationMethod.getId(), is(QUANTIFICATION_METHOD_1_ID)); CvParam cvParam = quantificationMethod.getCvParam(); Assert.assertNotNull(cvParam); Assert.assertThat(cvParam.getId(), is(CV_PARAM_3_ID)); assertEquals(CV_PARAM_3_LABEL, cvParam.getCvLabel()); assertEquals(CV_PARAM_3_ACCESSION, cvParam.getAccession()); assertEquals(CV_PARAM_3_NAME, cvParam.getName()); } private void checkContacts(Assay assay) { Collection<Contact> contacts = assay.getContacts(); assertNotNull(contacts); assertEquals(contacts.size(), NUM_CONTACTS_ASSAY_1); Contact contact = contacts.iterator().next(); assertThat(contact.getId(), is(CONTACT_1_ID)); assertThat(contact.getAffiliation(), is(CONTACT_1_AFFILIATION)); assertThat(contact.getEmail(), is(CONTACT_1_EMAIL)); assertThat(contact.getFirstName(), is(CONTACT_1_FIRST_NAME)); assertThat(contact.getLastName(), is(CONTACT_1_LAST_NAME)); assertThat(contact.getTitle(), is(TitleConstants.Mr)); } private void checkParams(Assay assay) { Collection<ParamProvider> groupParams = assay.getParams(); Assert.assertNotNull(groupParams); assertEquals(groupParams.size(), NUM_GROUP_PARAMS_ASSAY_1); AssayCvParam assayCvParam = (AssayCvParam) groupParams.iterator().next(); assertThat(assayCvParam.getId(), is(GROUP_PARAM_1_ID)); CvParam cvParam = assayCvParam.getCvParam(); Assert.assertNotNull(cvParam); Assert.assertThat(cvParam.getId(), is(CV_PARAM_3_ID)); assertEquals(CV_PARAM_3_LABEL, cvParam.getCvLabel()); assertEquals(CV_PARAM_3_ACCESSION, cvParam.getAccession()); assertEquals(CV_PARAM_3_NAME, cvParam.getName()); } private void checkSamples(Assay assay) { Collection<AssaySampleCvParam> assaySamples = assay.getSamples(); Assert.assertNotNull(assaySamples); assertEquals(assaySamples.size(), NUM_ASSAY_SAMPLE_PARAM_PROJECT_1); AssaySampleCvParam assaySample = assaySamples.iterator().next(); assertThat(assaySample.getId(), is(ASSAY_SAMPLE_PARAM_1_ID)); CvParam cvParam = assaySample.getCvParam(); Assert.assertNotNull(cvParam); Assert.assertThat(cvParam.getId(), is(CV_PARAM_3_ID)); assertEquals(CV_PARAM_3_LABEL, cvParam.getCvLabel()); assertEquals(CV_PARAM_3_ACCESSION, cvParam.getAccession()); assertEquals(CV_PARAM_3_NAME, cvParam.getName()); } private void checkPTMs(Assay assay) { Collection<AssayPTM> ptms = assay.getPtms(); Assert.assertNotNull(ptms); assertEquals(ptms.size(), NUM_PTM_ASSAY_1); AssayPTM ptm = ptms.iterator().next(); assertThat(ptm.getId(), is(PTM_1_ID)); CvParam cvParam = ptm.getCvParam(); Assert.assertNotNull(cvParam); Assert.assertThat(cvParam.getId(), is(CV_PARAM_1_ID)); assertEquals(CV_PARAM_1_LABEL, cvParam.getCvLabel()); assertEquals(CV_PARAM_1_ACCESSION, cvParam.getAccession()); assertEquals(CV_PARAM_1_NAME, cvParam.getName()); } private void checkIsSoftware1InDb(Software software) { assertThat(software.getId(), is(SOFTWARE_1_ID)); assertThat(software.getCustomization(), is(Collections.singletonList((SOFTWARE_1_CUSTOMIZATION)))); assertThat(software.getName(), is(SOFTWARE_1_NAME)); assertThat(software.getVersion(), is(SOFTWARE_1_VERSION)); checkParams(software); } private void checkParams(Software software) { Collection<ParamProvider> params = software.getParams(); assertNotNull(params); assertThat(params.size(), is(NUM_PARAMS_SOFTWARE_1)); } private void checkIsAnotherSoftwareInDb(Software software) { assertThat(software.getCustomization(), is(Collections.singletonList(ANOTHER_SOFTWARE_CUSTOMIZATION))); assertThat(software.getName(), is(ANOTHER_SOFTWARE_NAME)); assertThat(software.getVersion(), is(ANOTHER_SOFTWARE_VERSION)); checkAnotherSoftwareParams(software); } private void checkAnotherSoftwareParams(Software software) { Collection<ParamProvider> params = software.getParams(); assertNotNull(params); assertThat(params.size(), is(NUM_PARAMS_ANOTHER_SOFTWARE)); Iterator<ParamProvider> paramsIt = params.iterator(); // check cv param SoftwareCvParam softwareCvParam = (SoftwareCvParam) paramsIt.next(); assertNotNull(softwareCvParam); assertThat(softwareCvParam.getValue(), is(ANOTHER_SOFTWARE_CV_PARAM_VALUE)); CvParam cvParam = softwareCvParam.getCvParam(); assertNotNull(cvParam); assertThat(cvParam.getCvLabel(), is(ANOTHER_CV_PARAM_LABEL)); assertThat(cvParam.getAccession(), is(ANOTHER_CV_PARAM_ACCESSION)); assertThat(cvParam.getName(), is(ANOTHER_CV_PARAM_NAME)); // check user param SoftwareUserParam softwareUserParam = (SoftwareUserParam) paramsIt.next(); assertNotNull(softwareUserParam); assertThat(softwareUserParam.getName(), is(ANOTHER_SOFTWARE_USER_PARAM_NAME)); assertThat(softwareUserParam.getValue(), is(ANOTHER_SOFTWARE_USER_PARAM_VALUE)); } }
/* * Copyright (C) 2012 Zach Melamed * * Latest version available online at https://github.com/zach-m/jonix * Contact me at zach@tectonica.co.il * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tectonica.jonix.onix2; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import com.tectonica.jonix.JPU; import com.tectonica.jonix.OnixComposite.OnixSuperComposite; import com.tectonica.jonix.codelist.BookFormDetails; import com.tectonica.jonix.codelist.LanguageCodes; import com.tectonica.jonix.codelist.ProductContentTypes; import com.tectonica.jonix.codelist.ProductFormDetails; import com.tectonica.jonix.codelist.ProductFormFeatureTypes; import com.tectonica.jonix.codelist.ProductForms; import com.tectonica.jonix.codelist.ProductIdentifierTypes; import com.tectonica.jonix.codelist.ProductPackagingTypes; import com.tectonica.jonix.codelist.RecordSourceTypes; import com.tectonica.jonix.codelist.TextCaseFlags; import com.tectonica.jonix.codelist.TextFormats; import com.tectonica.jonix.codelist.TradeCategorys; import com.tectonica.jonix.codelist.TransliterationSchemes; import com.tectonica.jonix.struct.JonixProductFormFeature; import com.tectonica.jonix.struct.JonixProductIdentifier; /* * NOTE: THIS IS AN AUTO-GENERATED FILE, DON'T EDIT MANUALLY */ @SuppressWarnings("serial") public class ContainedItem implements OnixSuperComposite, Serializable { public static final String refname = "ContainedItem"; public static final String shortname = "containeditem"; public TextFormats textformat; public TextCaseFlags textcase; public LanguageCodes language; public TransliterationSchemes transliteration; /** * (type: DateOrDateTime) */ public String datestamp; public RecordSourceTypes sourcetype; public String sourcename; /** * (this field is required) */ public ISBN isbn; /** * (this field is optional) */ public EAN13 ean13; /** * (this list may be empty) */ public List<ProductIdentifier> productIdentifiers; /** * (this field is optional) */ public ProductForm productForm; /** * (this list may be empty) */ public List<ProductFormDetail> productFormDetails; /** * (this list may be empty) */ public List<ProductFormFeature> productFormFeatures; /** * (this list may be empty) */ public List<BookFormDetail> bookFormDetails; /** * (this field is optional) */ public ProductPackaging productPackaging; /** * (this field is optional) */ public ProductFormDescription productFormDescription; /** * (this field is optional) */ public NumberOfPieces numberOfPieces; /** * (this field is optional) */ public TradeCategory tradeCategory; /** * (this list may be empty) */ public List<ProductContentType> productContentTypes; /** * (this field is optional) */ public ItemQuantity itemQuantity; public ContainedItem() {} public ContainedItem(org.w3c.dom.Element element) { textformat = TextFormats.byValue(JPU.getAttribute(element, "textformat")); textcase = TextCaseFlags.byValue(JPU.getAttribute(element, "textcase")); language = LanguageCodes.byValue(JPU.getAttribute(element, "language")); transliteration = TransliterationSchemes.byValue(JPU.getAttribute(element, "transliteration")); datestamp = JPU.getAttribute(element, "datestamp"); sourcetype = RecordSourceTypes.byValue(JPU.getAttribute(element, "sourcetype")); sourcename = JPU.getAttribute(element, "sourcename"); JPU.forElementsOf(element, new JPU.ElementListener() { @Override public void onElement(org.w3c.dom.Element element) { final String name = element.getNodeName(); if (name.equals(ISBN.refname) || name.equals(ISBN.shortname)) isbn = new ISBN(element); else if (name.equals(EAN13.refname) || name.equals(EAN13.shortname)) ean13 = new EAN13(element); else if (name.equals(ProductIdentifier.refname) || name.equals(ProductIdentifier.shortname)) productIdentifiers = JPU.addToList(productIdentifiers, new ProductIdentifier(element)); else if (name.equals(ProductForm.refname) || name.equals(ProductForm.shortname)) productForm = new ProductForm(element); else if (name.equals(ProductFormDetail.refname) || name.equals(ProductFormDetail.shortname)) productFormDetails = JPU.addToList(productFormDetails, new ProductFormDetail(element)); else if (name.equals(ProductFormFeature.refname) || name.equals(ProductFormFeature.shortname)) productFormFeatures = JPU.addToList(productFormFeatures, new ProductFormFeature(element)); else if (name.equals(BookFormDetail.refname) || name.equals(BookFormDetail.shortname)) bookFormDetails = JPU.addToList(bookFormDetails, new BookFormDetail(element)); else if (name.equals(ProductPackaging.refname) || name.equals(ProductPackaging.shortname)) productPackaging = new ProductPackaging(element); else if (name.equals(ProductFormDescription.refname) || name.equals(ProductFormDescription.shortname)) productFormDescription = new ProductFormDescription(element); else if (name.equals(NumberOfPieces.refname) || name.equals(NumberOfPieces.shortname)) numberOfPieces = new NumberOfPieces(element); else if (name.equals(TradeCategory.refname) || name.equals(TradeCategory.shortname)) tradeCategory = new TradeCategory(element); else if (name.equals(ProductContentType.refname) || name.equals(ProductContentType.shortname)) productContentTypes = JPU.addToList(productContentTypes, new ProductContentType(element)); else if (name.equals(ItemQuantity.refname) || name.equals(ItemQuantity.shortname)) itemQuantity = new ItemQuantity(element); } }); } public String getISBNValue() { return (isbn == null) ? null : isbn.value; } public String getEAN13Value() { return (ean13 == null) ? null : ean13.value; } public ProductForms getProductFormValue() { return (productForm == null) ? null : productForm.value; } public List<ProductFormDetails> getProductFormDetailValues() { if (productFormDetails != null) { List<ProductFormDetails> list = new ArrayList<>(); for (ProductFormDetail i : productFormDetails) list.add(i.value); return list; } return null; } public List<BookFormDetails> getBookFormDetailValues() { if (bookFormDetails != null) { List<BookFormDetails> list = new ArrayList<>(); for (BookFormDetail i : bookFormDetails) list.add(i.value); return list; } return null; } public ProductPackagingTypes getProductPackagingValue() { return (productPackaging == null) ? null : productPackaging.value; } public String getProductFormDescriptionValue() { return (productFormDescription == null) ? null : productFormDescription.value; } public String getNumberOfPiecesValue() { return (numberOfPieces == null) ? null : numberOfPieces.value; } public TradeCategorys getTradeCategoryValue() { return (tradeCategory == null) ? null : tradeCategory.value; } public List<ProductContentTypes> getProductContentTypeValues() { if (productContentTypes != null) { List<ProductContentTypes> list = new ArrayList<>(); for (ProductContentType i : productContentTypes) list.add(i.value); return list; } return null; } public String getItemQuantityValue() { return (itemQuantity == null) ? null : itemQuantity.value; } public JonixProductIdentifier findProductIdentifier(ProductIdentifierTypes productIDType) { if (productIdentifiers != null) { for (ProductIdentifier x : productIdentifiers) { if (x.getProductIDTypeValue() == productIDType) return x.asJonixProductIdentifier(); } } return null; } public List<JonixProductIdentifier> findProductIdentifiers(java.util.Set<ProductIdentifierTypes> productIDTypes) { if (productIdentifiers != null) { List<JonixProductIdentifier> matches = new ArrayList<>(); for (ProductIdentifier x : productIdentifiers) { if (productIDTypes == null || productIDTypes.contains(x.getProductIDTypeValue())) matches.add(x.asJonixProductIdentifier()); } return matches; } return null; } public JonixProductFormFeature findProductFormFeature(ProductFormFeatureTypes productFormFeatureType) { if (productFormFeatures != null) { for (ProductFormFeature x : productFormFeatures) { if (x.getProductFormFeatureTypeValue() == productFormFeatureType) return x.asJonixProductFormFeature(); } } return null; } public List<JonixProductFormFeature> findProductFormFeatures( java.util.Set<ProductFormFeatureTypes> productFormFeatureTypes) { if (productFormFeatures != null) { List<JonixProductFormFeature> matches = new ArrayList<>(); for (ProductFormFeature x : productFormFeatures) { if (productFormFeatureTypes == null || productFormFeatureTypes.contains(x.getProductFormFeatureTypeValue())) matches.add(x.asJonixProductFormFeature()); } return matches; } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.query.DefaultGenericQueryMetricsFactory; import org.apache.druid.query.MapQueryToolChestWarehouse; import org.apache.druid.query.Query; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.QueryToolChestWarehouse; import org.apache.druid.query.Result; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.query.timeboundary.TimeBoundaryResultValue; import org.apache.druid.server.log.TestRequestLogger; import org.apache.druid.server.metrics.NoopServiceEmitter; import org.apache.druid.server.security.Access; import org.apache.druid.server.security.Action; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.Authorizer; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.Resource; import org.easymock.EasyMock; import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; /** * */ public class QueryResourceTest { private static final QueryToolChestWarehouse warehouse = new MapQueryToolChestWarehouse(ImmutableMap.of()); private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); private static final AuthenticationResult authenticationResult = new AuthenticationResult("druid", "druid", null, null); private final HttpServletRequest testServletRequest = EasyMock.createMock(HttpServletRequest.class); public static final QuerySegmentWalker testSegmentWalker = new QuerySegmentWalker() { @Override public <T> QueryRunner<T> getQueryRunnerForIntervals( Query<T> query, Iterable<Interval> intervals ) { return new QueryRunner<T>() { @Override public Sequence<T> run(QueryPlus<T> query, Map<String, Object> responseContext) { return Sequences.empty(); } }; } @Override public <T> QueryRunner<T> getQueryRunnerForSegments( Query<T> query, Iterable<SegmentDescriptor> specs ) { return getQueryRunnerForIntervals(null, null); } }; private static final ServiceEmitter noopServiceEmitter = new NoopServiceEmitter(); private QueryResource queryResource; private QueryManager queryManager; private TestRequestLogger testRequestLogger; @BeforeClass public static void staticSetup() { EmittingLogger.registerEmitter(noopServiceEmitter); } @Before public void setup() { EasyMock.expect(testServletRequest.getContentType()).andReturn(MediaType.APPLICATION_JSON).anyTimes(); EasyMock.expect(testServletRequest.getHeader(QueryResource.HEADER_IF_NONE_MATCH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getRemoteAddr()).andReturn("localhost").anyTimes(); queryManager = new QueryManager(); testRequestLogger = new TestRequestLogger(); queryResource = new QueryResource( new QueryLifecycleFactory( warehouse, testSegmentWalker, new DefaultGenericQueryMetricsFactory(jsonMapper), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), AuthTestUtils.TEST_AUTHORIZER_MAPPER ), jsonMapper, jsonMapper, queryManager, new AuthConfig(), null, new DefaultGenericQueryMetricsFactory(jsonMapper) ); } private static final String simpleTimeSeriesQuery = "{\n" + " \"queryType\": \"timeseries\",\n" + " \"dataSource\": \"mmx_metrics\",\n" + " \"granularity\": \"hour\",\n" + " \"intervals\": [\n" + " \"2014-12-17/2015-12-30\"\n" + " ],\n" + " \"aggregations\": [\n" + " {\n" + " \"type\": \"count\",\n" + " \"name\": \"rows\"\n" + " }\n" + " ]\n" + "}"; @Test public void testGoodQuery() throws IOException { EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) .andReturn(null) .anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) .andReturn(authenticationResult) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); EasyMock.expectLastCall().anyTimes(); EasyMock.replay(testServletRequest); Response response = queryResource.doPost( new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), null /*pretty*/, testServletRequest ); Assert.assertNotNull(response); } @Test public void testBadQuery() throws IOException { EasyMock.replay(testServletRequest); Response response = queryResource.doPost( new ByteArrayInputStream("Meka Leka Hi Meka Hiney Ho".getBytes("UTF-8")), null /*pretty*/, testServletRequest ); Assert.assertNotNull(response); Assert.assertEquals(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus()); } @Test public void testSecuredQuery() throws Exception { EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) .andReturn(null) .anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) .andReturn(authenticationResult) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false); EasyMock.expectLastCall().times(1); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); EasyMock.expectLastCall().times(1); EasyMock.replay(testServletRequest); AuthorizerMapper authMapper = new AuthorizerMapper(null) { @Override public Authorizer getAuthorizer(String name) { return new Authorizer() { @Override public Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action) { if (resource.getName().equals("allow")) { return new Access(true); } else { return new Access(false); } } }; } }; queryResource = new QueryResource( new QueryLifecycleFactory( warehouse, testSegmentWalker, new DefaultGenericQueryMetricsFactory(jsonMapper), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), authMapper ), jsonMapper, jsonMapper, queryManager, new AuthConfig(), authMapper, new DefaultGenericQueryMetricsFactory(jsonMapper) ); try { queryResource.doPost( new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), null /*pretty*/, testServletRequest ); Assert.fail("doPost did not throw ForbiddenException for an unauthorized query"); } catch (ForbiddenException e) { } Response response = queryResource.doPost( new ByteArrayInputStream("{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"}".getBytes("UTF-8")), null /*pretty*/, testServletRequest ); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); ((StreamingOutput) response.getEntity()).write(baos); final List<Result<TimeBoundaryResultValue>> responses = jsonMapper.readValue( baos.toByteArray(), new TypeReference<List<Result<TimeBoundaryResultValue>>>() {} ); Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); Assert.assertEquals(0, responses.size()); Assert.assertEquals(1, testRequestLogger.getLogs().size()); Assert.assertEquals(true, testRequestLogger.getLogs().get(0).getQueryStats().getStats().get("success")); Assert.assertEquals("druid", testRequestLogger.getLogs().get(0).getQueryStats().getStats().get("identity")); } @Test(timeout = 60_000L) public void testSecuredCancelQuery() throws Exception { final CountDownLatch waitForCancellationLatch = new CountDownLatch(1); final CountDownLatch waitFinishLatch = new CountDownLatch(2); final CountDownLatch startAwaitLatch = new CountDownLatch(1); final CountDownLatch cancelledCountDownLatch = new CountDownLatch(1); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) .andReturn(null) .anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) .andReturn(authenticationResult) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); EasyMock.expectLastCall().times(1); EasyMock.replay(testServletRequest); AuthorizerMapper authMapper = new AuthorizerMapper(null) { @Override public Authorizer getAuthorizer(String name) { return new Authorizer() { @Override public Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action) { // READ action corresponds to the query // WRITE corresponds to cancellation of query if (action.equals(Action.READ)) { try { // Countdown startAwaitLatch as we want query cancellation to happen // after we enter isAuthorized method so that we can handle the // InterruptedException here because of query cancellation startAwaitLatch.countDown(); waitForCancellationLatch.await(); } catch (InterruptedException e) { // When the query is cancelled the control will reach here, // countdown the latch and rethrow the exception so that error response is returned for the query cancelledCountDownLatch.countDown(); Throwables.propagate(e); } return new Access(true); } else { return new Access(true); } } }; } }; queryResource = new QueryResource( new QueryLifecycleFactory( warehouse, testSegmentWalker, new DefaultGenericQueryMetricsFactory(jsonMapper), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), authMapper ), jsonMapper, jsonMapper, queryManager, new AuthConfig(), authMapper, new DefaultGenericQueryMetricsFactory(jsonMapper) ); final String queryString = "{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"," + "\"context\":{\"queryId\":\"id_1\"}}"; ObjectMapper mapper = new DefaultObjectMapper(); Query query = mapper.readValue(queryString, Query.class); ListenableFuture future = MoreExecutors.listeningDecorator( Execs.singleThreaded("test_query_resource_%s") ).submit( new Runnable() { @Override public void run() { try { Response response = queryResource.doPost( new ByteArrayInputStream(queryString.getBytes("UTF-8")), null, testServletRequest ); Assert.assertEquals(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus()); } catch (IOException e) { Throwables.propagate(e); } waitFinishLatch.countDown(); } } ); queryManager.registerQuery(query, future); startAwaitLatch.await(); Executors.newSingleThreadExecutor().submit( new Runnable() { @Override public void run() { Response response = queryResource.cancelQuery("id_1", testServletRequest); Assert.assertEquals(Response.Status.ACCEPTED.getStatusCode(), response.getStatus()); waitForCancellationLatch.countDown(); waitFinishLatch.countDown(); } } ); waitFinishLatch.await(); cancelledCountDownLatch.await(); } @Test(timeout = 60_000L) public void testDenySecuredCancelQuery() throws Exception { final CountDownLatch waitForCancellationLatch = new CountDownLatch(1); final CountDownLatch waitFinishLatch = new CountDownLatch(2); final CountDownLatch startAwaitLatch = new CountDownLatch(1); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)) .andReturn(null) .anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) .andReturn(authenticationResult) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); EasyMock.expectLastCall().times(1); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false); EasyMock.expectLastCall().times(1); EasyMock.replay(testServletRequest); AuthorizerMapper authMapper = new AuthorizerMapper(null) { @Override public Authorizer getAuthorizer(String name) { return new Authorizer() { @Override public Access authorize(AuthenticationResult authenticationResult, Resource resource, Action action) { // READ action corresponds to the query // WRITE corresponds to cancellation of query if (action.equals(Action.READ)) { try { waitForCancellationLatch.await(); } catch (InterruptedException e) { Throwables.propagate(e); } return new Access(true); } else { // Deny access to cancel the query return new Access(false); } } }; } }; queryResource = new QueryResource( new QueryLifecycleFactory( warehouse, testSegmentWalker, new DefaultGenericQueryMetricsFactory(jsonMapper), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), authMapper ), jsonMapper, jsonMapper, queryManager, new AuthConfig(), authMapper, new DefaultGenericQueryMetricsFactory(jsonMapper) ); final String queryString = "{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"," + "\"context\":{\"queryId\":\"id_1\"}}"; ObjectMapper mapper = new DefaultObjectMapper(); Query query = mapper.readValue(queryString, Query.class); ListenableFuture future = MoreExecutors.listeningDecorator( Execs.singleThreaded("test_query_resource_%s") ).submit( new Runnable() { @Override public void run() { try { startAwaitLatch.countDown(); Response response = queryResource.doPost( new ByteArrayInputStream(queryString.getBytes("UTF-8")), null, testServletRequest ); Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); } catch (IOException e) { Throwables.propagate(e); } waitFinishLatch.countDown(); } } ); queryManager.registerQuery(query, future); startAwaitLatch.await(); Executors.newSingleThreadExecutor().submit( new Runnable() { @Override public void run() { try { queryResource.cancelQuery("id_1", testServletRequest); } catch (ForbiddenException e) { waitForCancellationLatch.countDown(); waitFinishLatch.countDown(); } } } ); waitFinishLatch.await(); } @After public void tearDown() { EasyMock.verify(testServletRequest); } }
package shijimi.json; import java.io.File; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.net.InetAddress; import java.net.URI; import java.net.URL; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import javax.swing.text.Document; import shijimi.base.ExceptionShell; import shijimi.base.ReflectionShell; import shijimi.base.UtilShell; import shijimi.type.TypeShell; public class JsonFromValueMapper { protected Object source; protected Object json; protected FromRule rule; protected PropertyFromRule propertyRule; public static Object SKIP = new Object(); public JsonFromValueMapper() { rule = getDefaultRule(); propertyRule = getDefaultPropertyRule(); } public JsonFromValueMapper(Object source, Object json) { this(); this.source = source; this.json = json; } public FromRule getRule() { return rule; } public void setRule(FromRule rule) { this.rule = rule; } public FromRule getDefaultRule() { return new FromRuleList().add( new NullFromRule(getJDKExcludeRule()), getDefaultSpecialRule(), new ValueFromRule(), new ListFromRule(), new MapFromRule(), getJDKExcludeRule(), new ObjectFromRule()); } public FromRule getDefaultSpecialRule() { try { return new FromRuleList().add( new MethodFromRule(MethodInvoker.getGetter(File.class, "getPath")), //new File(String) new MethodFromRule(MethodInvoker.getGetter(URL.class, "toExternalForm")), //new URL(String) new MethodFromRule(MethodInvoker.getGetter(URI.class, "toString"), URI.class), //new URI(String) new MethodFromRule(MethodInvoker.getGetter(Class.class, "getName")), //Class.forName(String) new MethodFromRule(MethodInvoker.getGetter(StringBuilder.class, "toString"), StringBuilder.class), //new StringBuilder(String) new MethodFromRule(MethodInvoker.getGetter(StringBuffer.class, "toString"), StringBuffer.class), //new StringBuffer(String) new MethodFromRule(MethodInvoker.getGetter(Charset.class, "name")), //Charset.forName(String) new MethodFromRule(MethodInvoker.getGetter(InetAddress.class, "getHostName")), //InetAddress.getByName(String) new MethodFromRule(MethodInvoker.getStaticFactory(JsonFromValueMapper.class, "convertDateToJson", Date.class)), new MethodFromRule(MethodInvoker.getStaticFactory(JsonFromValueMapper.class, "convertCalendarToJson", Calendar.class)), new MethodFromRule(MethodInvoker.getStaticFactory(JsonFromValueMapper.class, "convertEnumToJson", Enum.class)), new DocumentFromRule(), new ArrayFromRule()); } catch (Exception ex) { ExceptionShell.v().throwWrapped(ex); return null; } } public FromRule getJDKExcludeRule() { return new FromRuleList().add( new ExcludeTypeFromRule("java."), new ExcludeTypeFromRule("javax."), new ExcludeTypeFromRule("org.ietf.jgss."), new ExcludeTypeFromRule("org.omg."), new ExcludeTypeFromRule("org.w3c.dom."), new ExcludeTypeFromRule("org.xml.sax.")); } public PropertyFromRule getPropertyRule() { return propertyRule; } public void setPropertyRule(PropertyFromRule propertyRule) { this.propertyRule = propertyRule; } public PropertyFromRule getDefaultPropertyRule() { return new PrpertyFromRuleList().add( new JsonExcludeAnnotationPropertyFromRule(), new InvokePropertyFromRule()); } public Object mapFromValue() { Object r = mapFromValue(source, json, Object.class); if (isSkip(r)) { r = null; } return r; } @SuppressWarnings("unchecked") public Object mapFromValue(Object source, Object json, Type staticType) { FromRule matchedRule = rule.match(source, staticType, this); if (staticType == null || !matchedRule.isEqualToStaticType(source, staticType, this)) { Class<?> sourceCls = source == null ? Object.class : source.getClass(); FromRule dynamicRule = rule.match(source, sourceCls, null); json = dynamicRule.mapFromValue(source, json, staticType, this); if (json != null && !isSkip(json) && !json.getClass().equals(sourceCls)) { Map<String,Object> map; //if json is a map, add #class entry, otherwise wrap map with #class and #value. if (json instanceof Map<?,?>) { map = (Map<String,Object>) json; } else { map = new HashMap<String, Object>(); map.put("#value", json); } map.put("#class", sourceCls.getName()); json = map; } } else { json = matchedRule.mapFromValue(source, json, staticType, this); } return json; } public boolean isEqualToTypeOrInterfaceDefaultClass(Object source, Type type, Class<?> defaultCls) { Class<?> cls = TypeShell.v().asClass(type); if (cls.isInterface() || Modifier.isAbstract(cls.getModifiers())) { return source.getClass().equals(defaultCls); } else { return source.getClass().equals(cls); } } public Type getParameterType(Type type, Class<?> interfCls, int index) { return getParameterTypeImpl(type, interfCls, index); } public Type[] getParameterTypes(Type type, Class<?> interfCls) { return getParameterTypesImpl(type, interfCls); } public static Type getParameterTypeImpl(Type type, Class<?> interfCls, int index) { ParameterizedType t = TypeShell.v().asParameterizedType(type, interfCls); if (t != null) { return t.getActualTypeArguments()[index]; } else { return null; } } public static Type[] getParameterTypesImpl(Type type, Class<?> interfCls) { ParameterizedType t = TypeShell.v().asParameterizedType(type, interfCls); if (t != null) { return t.getActualTypeArguments(); } else { return null; } } public void handleError(Exception ex) { } public boolean isSkip(Object json) { return json != null && json.equals(SKIP); } public Object getSkip() { return SKIP; } public interface FromRule { public FromRule match(Object source, Type type, JsonFromValueMapper sender); public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender); public Object mapFromValue(Object source, Object json, Type staticType, JsonFromValueMapper sender); } public static class FromRuleList implements FromRule { protected List<FromRule> rules = new ArrayList<FromRule>(); public FromRuleList add(FromRule... rs) { for (FromRule r : rs) { rules.add(r); } return this; } @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { for (FromRule r : rules) { if (r == null) { continue; } FromRule mr = r.match(source, type, sender); if (mr != null) { return mr; } } return null; } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return false; } @Override public Object mapFromValue(Object source, Object json, Type staticType, JsonFromValueMapper sender) { return null; } } public static class NullFromRule implements FromRule { protected FromRule optionRule; public NullFromRule() { } public NullFromRule(FromRule optionRule) { this.optionRule = optionRule; } @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { if (source == null) { FromRule rule = null; if (optionRule != null) { rule = optionRule.match(source, type, sender); } if (rule == null) { rule = this; } return rule; } else { return null; } } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return true; } @Override public Object mapFromValue(Object source, Object json, Type staticType, JsonFromValueMapper sender) { return null; } } public static class ValueFromRule implements FromRule { @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { return ReflectionShell.v().isValueType(TypeShell.v().asClass(type)) ? this : null; } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { Class<?> cls = TypeShell.v().asClass(type); if (cls.equals(Object.class) || cls.isPrimitive()) { return true; } else { return source.getClass().equals(cls); } } @Override public Object mapFromValue(Object source, Object json, Type type, JsonFromValueMapper sender) { return source; } } public static class ListFromRule implements FromRule { @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { return Collection.class.isAssignableFrom(TypeShell.v().asClass(type)) ? this : null; } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return sender.isEqualToTypeOrInterfaceDefaultClass(source, type, ArrayList.class); } @SuppressWarnings("unchecked") @Override public Object mapFromValue(Object source, Object json, Type type, JsonFromValueMapper sender) { List<Object> jsonList; Type eType = sender.getParameterType(type, Collection.class, 0); if (eType == null) { eType = Object.class; } if (json != null && json instanceof List<?>) { jsonList = (List<Object>) json; } else { jsonList = new ArrayList<Object>(); } ListIterator<Object> listIter = jsonList.listIterator(); for (Object e : (Iterable<Object>) source) { if (listIter.hasNext()) { Object je = listIter.next(); Object jsonEntry = sender.mapFromValue(e, je, eType); if (sender.isSkip(jsonEntry)) { listIter.remove(); } else { listIter.set(jsonEntry); } } else { Object jsonEntry = sender.mapFromValue(e, null, eType); if (!sender.isSkip(jsonEntry)) { listIter.add(jsonEntry); } } } while (listIter.hasNext()) { listIter.next(); listIter.remove(); } return jsonList; } } public static class MapFromRule implements FromRule { @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { return Map.class.isAssignableFrom(TypeShell.v().asClass(type)) ? this : null; } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return sender.isEqualToTypeOrInterfaceDefaultClass(source, type, HashMap.class); } @SuppressWarnings("unchecked") @Override public Object mapFromValue(Object source, Object json, Type type, JsonFromValueMapper sender) { Map<String,Object> jsonMap; if (json != null && json instanceof Map<?,?>) { jsonMap = (Map<String,Object>) json; } else { jsonMap = new HashMap<String, Object>(); } Set<String> keys = new HashSet<String>(jsonMap.keySet()); Type[] pTypes = sender.getParameterTypes(type, Map.class); Type keyType = Object.class; Type valType = Object.class; Class<?> keyCls = Object.class; if (pTypes != null) { keyType = pTypes[0]; valType = pTypes[1]; keyCls = TypeShell.v().asClass(keyType); } if (keyCls.equals(String.class) || (keyCls.isAssignableFrom(String.class) && allKeysAreString(source))) { for (Map.Entry<?,?> e : ((Map<?,?>) source).entrySet()) { String key = e.getKey().toString(); Object val = e.getValue(); Object jsonEntry = sender.mapFromValue(val, jsonMap.get(key), valType); if (!sender.isSkip(jsonEntry)) { String actualKey = key; if (actualKey.startsWith("#")) { //escape #key actualKey = "#" + actualKey; } jsonMap.put(actualKey, jsonEntry); keys.remove(key); } } } else { //alternate form: {"#value":[[key,value],...]} List<Object> es = new ArrayList<Object>(); for (Map.Entry<?,?> e : ((Map<?,?>) source).entrySet()) { Object key = e.getKey(); Object val = e.getValue(); Object jk = sender.mapFromValue(key, null, keyType); Object jv = sender.mapFromValue(val, null, valType); if (!sender.isSkip(jk) && !sender.isSkip(jv)) { List<Object> entry = new ArrayList<Object>(); entry.add(jk); entry.add(jv); es.add(entry); } } jsonMap.put("#value", es); keys.remove("#value"); } for (String key : keys) { jsonMap.remove(key); } return jsonMap; } public boolean allKeysAreString(Object source) { for (Object k : ((Map<?,?>) source).keySet()) { if (!(k instanceof String)) { return false; } } return true; } } public static class ObjectFromRule implements FromRule { protected ReflectionShell refSh = ReflectionShell.v(); @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { return source != null ? this : null; } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return source.getClass().equals(TypeShell.v().asClass(type)); } @SuppressWarnings("unchecked") @Override public Object mapFromValue(Object source, Object json, Type staticType, JsonFromValueMapper sender) { Map<String,Object> jsonMap; if (json != null && json instanceof Map<?,?>) { jsonMap = (Map<String,Object>) json; } else { jsonMap = new HashMap<String, Object>(); } Set<String> keys = new HashSet<String>(jsonMap.keySet()); Class<?> cls = source.getClass(); for (Method method : cls.getMethods()) { if (refSh.isMethod(method, false, null)) { String propName = getPropertyName(method); if (propName != null) { Object jsonProp = jsonMap.get(propName); try { PropertyFromRule prule = sender.getPropertyRule().match(method, source, sender); jsonProp = prule.mapFromValue(method, source, jsonProp, sender); if (!sender.isSkip(jsonProp)) { jsonMap.put(propName, jsonProp); keys.remove(propName); } } catch (Exception ex) { sender.handleError(ex); } } } } for (String key : keys) { jsonMap.remove(key); } return jsonMap; } public String getPropertyName(Method method) { String propertyName; String mName = method.getName(); if (method.getReturnType().equals(boolean.class)) { propertyName = refSh.getNameSuffix("is", mName); if (propertyName == null) { propertyName = refSh.getNameSuffix("get", mName); } } else { propertyName = refSh.getNameSuffix("get", mName); } return propertyName; } } public interface PropertyFromRule { public PropertyFromRule match(Method method, Object source, JsonFromValueMapper sender); public Object mapFromValue(Method method, Object source, Object jsonProp, JsonFromValueMapper sender) throws Exception; } public static class PrpertyFromRuleList implements PropertyFromRule { protected List<PropertyFromRule> rules = new ArrayList<PropertyFromRule>(); public PrpertyFromRuleList add(PropertyFromRule... rs) { for (PropertyFromRule r : rs) { rules.add(r); } return this; } @Override public PropertyFromRule match(Method method, Object source, JsonFromValueMapper sender) { for (PropertyFromRule r : rules) { PropertyFromRule mr = r.match(method, source, sender); if (mr != null) { return mr; } } return null; } @Override public Object mapFromValue(Method method, Object source, Object jsonProp, JsonFromValueMapper sender) throws Exception { return null; } } public static class InvokePropertyFromRule implements PropertyFromRule { @Override public PropertyFromRule match(Method method, Object source, JsonFromValueMapper sender) { return this; } @Override public Object mapFromValue(Method method, Object source, Object jsonProp, JsonFromValueMapper sender) throws Exception { Object v = method.invoke(source); return sender.mapFromValue(v, jsonProp, method.getGenericReturnType()); } } //// additional rules public static class ArrayFromRule implements FromRule { protected ListFromRule listRule = new ListFromRule(); @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { return TypeShell.v().isArray(type) ? this : null; } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return source.getClass().equals(TypeShell.v().asClass(type)); } @SuppressWarnings("unchecked") @Override public Object mapFromValue(Object source, Object json, Type staticType, JsonFromValueMapper sender) { List<Object> jsonList = new ArrayList<Object>(); if (source instanceof int[]) { for (int i : (int[]) source) { jsonList.add(i); } } else if (source instanceof short[]) { for (short s : (short[]) source) { jsonList.add(s); } } else if (source instanceof long[]) { for (long l : (long[]) source) { jsonList.add(l); } } else if (source instanceof char[]) { for (char c : (char[]) source) { jsonList.add(c); } } else if (source instanceof float[]) { for (float f : (float[]) source) { jsonList.add(f); } } else if (source instanceof double[]) { for (double d : (double[]) source) { jsonList.add(d); } } else if (source instanceof boolean[]) { for (boolean b : (boolean[]) source) { jsonList.add(b); } } else if (source instanceof byte[]) { return Base64Shell.v().encode((byte[]) source); } else { if (json != null && json instanceof List<?>) { jsonList = (List<Object>) json; } Type compType = TypeShell.v().getArrayComponentType(staticType); Object[] os = (Object[]) source; List<Object> list = Arrays.asList(os); jsonList = (List<Object>) listRule.mapFromValue(list, jsonList, TypeShell.v().make(List.class, compType), sender); } return jsonList; } } public static class ExcludeTypeFromRule implements FromRule { protected String namePrefix; public ExcludeTypeFromRule(String namePrefix) { this.namePrefix = namePrefix; } @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { Class<?> cls = TypeShell.v().asClass(type); if (cls.equals(Object.class)) { return null; } else { return cls.getName().startsWith(namePrefix) ? this : null; } } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return true; } @Override public Object mapFromValue(Object source, Object json, Type staticType, JsonFromValueMapper sender) { return sender.getSkip(); } } public static class ExcludeAnnotationPropertyFromRule implements PropertyFromRule { protected Class<? extends Annotation> annotationType; public ExcludeAnnotationPropertyFromRule(Class<? extends Annotation> annotationType) { this.annotationType = annotationType; } public Class<? extends Annotation> getAnnotationType() { return annotationType; } @Override public PropertyFromRule match(Method method, Object source, JsonFromValueMapper sender) { return method.isAnnotationPresent(annotationType) ? this : null; } @Override public Object mapFromValue(Method method, Object source, Object jsonProp, JsonFromValueMapper sender) throws Exception { return sender.getSkip(); } } public static class JsonExcludeAnnotationPropertyFromRule implements PropertyFromRule { public JsonExcludeAnnotationPropertyFromRule() { } @Override public PropertyFromRule match(Method method, Object source, JsonFromValueMapper sender) { JsonExclude ex = method.getAnnotation(JsonExclude.class); return ex != null && ex.excludeJson() ? this : null; } @Override public Object mapFromValue(Method method, Object source, Object jsonProp, JsonFromValueMapper sender) throws Exception { return sender.getSkip(); } } public static class MethodFromRule implements FromRule { protected MethodInvoker getter; protected Class<?> cls; public MethodFromRule(MethodInvoker getter) { this.getter = getter; this.cls = getter.getParameterType(0); } public MethodFromRule(MethodInvoker getter, Class<?> cls) { this.getter = getter; this.cls = cls; } @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { return cls.isAssignableFrom(TypeShell.v().asClass(type)) ? this : null; } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return source.getClass().equals(TypeShell.v().asClass(type)); } @Override public Object mapFromValue(Object source, Object json, Type staticType, JsonFromValueMapper sender) { return getter.invoke(source); } } public static String convertDateToJson(Date date) { return UtilShell.v().getDateFormat().format(date); } public static Map<String,Object> convertCalendarToJson(Calendar cal) { Map<String,Object> o = new HashMap<String, Object>(); o.put("time", convertDateToJson(cal.getTime())); o.put("timeZone", cal.getTimeZone().getID()); return o; } public static Map<String,Object> convertEnumToJson(Enum<?> e) { Map<String,Object> o = new HashMap<String, Object>(); o.put("name", e.name()); o.put("declaringClass", e.getDeclaringClass().getName()); return o; } public static String DOC_MAPPER_CLASS = "shijimi.json.JsonDocumentMapper"; public static class DocumentFromRule implements FromRule { @Override public FromRule match(Object source, Type type, JsonFromValueMapper sender) { return Document.class.isAssignableFrom(TypeShell.v().asClass(type)) ? this : null; } @Override public boolean isEqualToStaticType(Object source, Type type, JsonFromValueMapper sender) { return false; } @Override public Object mapFromValue(Object source, Object json, Type staticType, JsonFromValueMapper sender) { try { Class<?> cls = Class.forName(DOC_MAPPER_CLASS); return cls.getMethod("mapFrom", Document.class).invoke(cls.newInstance(), source); } catch (Exception ex) { sender.handleError(ex); return null; } } } }
/* * Copyright 2017 Long Term Software LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ltsllc.miranda.cluster.states; import com.google.gson.reflect.TypeToken; import com.ltsllc.miranda.Message; import com.ltsllc.miranda.Panic; import com.ltsllc.miranda.State; import com.ltsllc.miranda.clientinterface.MirandaException; import com.ltsllc.miranda.clientinterface.basicclasses.NodeElement; import com.ltsllc.miranda.clientinterface.basicclasses.Version; import com.ltsllc.miranda.clientinterface.requests.Files; import com.ltsllc.miranda.cluster.ClusterFile; import com.ltsllc.miranda.cluster.messages.*; import com.ltsllc.miranda.file.SingleFile; import com.ltsllc.miranda.file.messages.GetFileResponseMessage; import com.ltsllc.miranda.file.states.SingleFileReadyState; import com.ltsllc.miranda.manager.StandardManager; import com.ltsllc.miranda.miranda.Miranda; import com.ltsllc.miranda.node.Node; import com.ltsllc.miranda.node.messages.GetClusterFileMessage; import com.ltsllc.miranda.node.messages.GetFileMessage; import com.ltsllc.miranda.node.messages.GetVersionMessage; import com.ltsllc.miranda.operations.syncfiles.messages.GetVersionResponseMessage; import com.ltsllc.miranda.property.MirandaProperties; import com.ltsllc.miranda.topics.TopicManager; import com.ltsllc.miranda.writer.WriteMessage; import org.apache.log4j.Logger; import org.junit.Test; import java.lang.reflect.Type; import java.security.GeneralSecurityException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by Clark on 2/6/2017. */ public class ClusterFileReadyState extends SingleFileReadyState { private static Logger logger = Logger.getLogger(ClusterFileReadyState.class); public ClusterFileReadyState(ClusterFile clusterFile) throws MirandaException { super(clusterFile); } public ClusterFile getClusterFile() { return (ClusterFile) getContainer(); } public static void setLogger(Logger logger) { ClusterFileReadyState.logger = logger; } @Override public State processMessage(Message message) throws MirandaException { State nextState = this; switch (message.getSubject()) { case GetClusterFile: { GetClusterFileMessage getClusterFileMessage = (GetClusterFileMessage) message; nextState = processGetClusterFileMessage(getClusterFileMessage); break; } case NodesUpdated: { NodesUpdatedMessage nodesUpdatedMessage = (NodesUpdatedMessage) message; nextState = processNodesUpdatedMessage(nodesUpdatedMessage); break; } case HealthCheckUpdate: { HealthCheckUpdateMessage healthCheckUpdateMessage = (HealthCheckUpdateMessage) message; nextState = processHealthCheckUpdateMessage(healthCheckUpdateMessage); break; } case GetVersions: { GetVersionMessage getVersionMessage = (GetVersionMessage) message; nextState = processGetVersionMessage (getVersionMessage); break; } default: { nextState = super.processMessage(message); break; } } return nextState; } private State processGetClusterFileMessage(GetClusterFileMessage getClusterFileMessage) throws MirandaException { List<NodeElement> newList = new ArrayList<NodeElement>(getClusterFile().getData()); GetFileResponseMessage getFileResponseMessage = new GetFileResponseMessage(getClusterFile().getQueue(), getClusterFile(), getClusterFile().asJson()); return this; } public State processGetVersionMessage (GetVersionMessage getVersionMessag) { try { Miranda miranda = Miranda.getInstance(); Node node = miranda.getCluster().getOurNode(); Map<Files, Version> fileToVersion = new HashMap<>(); fileToVersion.put(Files.Topic, miranda.getTopicManager().getVersion()); fileToVersion.put(Files.Cluster, miranda.getCluster().getVersion()); fileToVersion.put(Files.Subscription, miranda.getSubscriptionManager().getVersion()); fileToVersion.put(Files.User, miranda.getUserManager().getVersion()); fileToVersion.put(Files.Cluster, miranda.getCluster().getVersion()); fileToVersion.put(Files.User, miranda.getUserManager().getVersion()); fileToVersion.put(Files.DeliveriesList, miranda.getDeliveryManager().getVersion()); fileToVersion.put(Files.EventList, miranda.getEventManager().getVersion()); GetVersionResponseMessage getVersionResponseMessage = new GetVersionResponseMessage(getContainer().getQueue(), this, fileToVersion, node); send(getContainer().getQueue(), getVersionResponseMessage); return getContainer().getCurrentState(); } catch (GeneralSecurityException e) { Panic panic = new Panic("Exception", e); Miranda.getInstance().panic(panic); return null; } } /** * This message means that we should update all the matching nodes time * of last connection, and possibly drop the nodes that don't match. A * node that has not connected in an amount of time (in milliseconds) * specified by {@link MirandaProperties#PROPERTY_CLUSTER_TIMEOUT} * should be dropped. * * @param healthCheckUpdateMessage * @return */ private State processHealthCheckUpdateMessage(HealthCheckUpdateMessage healthCheckUpdateMessage) { // // update the time of last connect for nodes in the message // boolean nodesUpdated = false; for (NodeElement nodeElement : healthCheckUpdateMessage.getUpdates()) { NodeElement match = getClusterFile().matchingNode(nodeElement); if (null != match) { match.setLastConnected(System.currentTimeMillis()); nodesUpdated = true; } } // // check to see if we should drop any nodes // boolean nodesDropped = false; long timeout = Miranda.properties.getLongProperty(MirandaProperties.PROPERTY_CLUSTER_TIMEOUT, MirandaProperties.DEFAULT_CLUSTER_TIMEOUT); long now = System.currentTimeMillis(); List<NodeElement> drops = new ArrayList<NodeElement>(); for (NodeElement nodeElement : getClusterFile().getData()) { long timeSinceLastConnect = now - nodeElement.getLastConnected(); if (timeSinceLastConnect >= timeout) { drops.add(nodeElement); nodesDropped = true; } } // // drop nodes // if (nodesDropped) { logger.info("dropping nodes that have timed out: " + drops); getClusterFile().getData().removeAll(drops); getClusterFile().updateVersion(); getClusterFile().write(); for (NodeElement droppedNode : drops) { DropNodeMessage message = new DropNodeMessage(getClusterFile().getQueue(), this, droppedNode); send(getClusterFile().getCluster(), message); nodesDropped = true; } } // // if we changed anything, update the version and write out the file // if (nodesUpdated || nodesDropped) { getClusterFile().updateVersion(); getClusterFile().write(); } return this; } public Type getListType() { return new TypeToken<List<NodeElement>>() { }.getType(); } public void write() { WriteMessage writeMessage = new WriteMessage(getClusterFile().getFilename(), getClusterFile().getBytes(), getClusterFile().getQueue(), this); send(getClusterFile().getWriterQueue(), writeMessage); } public boolean contains(Object o) { NodeElement nodeElement = (NodeElement) o; return getClusterFile().contains(nodeElement); } public void add(Object o) { NodeElement nodeElement = (NodeElement) o; getClusterFile().getData().add(nodeElement); } @Override public SingleFile getFile() { return getClusterFile(); } public String getName() { return "clusters"; } @Override public String toString() { return "ReadyState"; } public State start() { State nextState = super.start(); MirandaProperties properties = Miranda.properties; long healthCheckPeriod = properties.getLongProperty(MirandaProperties.PROPERTY_CLUSTER_HEALTH_CHECK_PERIOD, MirandaProperties.DEFAULT_CLUSTER_HEALTH_CHECK_PERIOD); HealthCheckMessage healthCheckMessage = new HealthCheckMessage(getClusterFile().getCluster(), this); Miranda.timer.sendSchedulePeriodic(0, healthCheckPeriod, getClusterFile().getCluster(), healthCheckMessage); return nextState; } private State processNodesUpdatedMessage(NodesUpdatedMessage nodesUpdatedMessage) { List<NodeElement> copy = new ArrayList<NodeElement>(nodesUpdatedMessage.getNodeList()); getClusterFile().setData(copy); getClusterFile().write(); return this; } }
/* * Copyright 2015-2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.inventory.impl.tinkerpop; import static com.tinkerpop.gremlin.java.GremlinFluentUtility.optimizePipelineForQuery; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.hawkular.inventory.api.FilterFragment; import org.hawkular.inventory.api.Query; import org.hawkular.inventory.api.QueryFragment; import org.hawkular.inventory.api.filters.Contained; import org.hawkular.inventory.api.filters.Defined; import org.hawkular.inventory.api.filters.Filter; import org.hawkular.inventory.api.filters.Incorporated; import org.hawkular.inventory.api.filters.Marker; import org.hawkular.inventory.api.filters.RecurseFilter; import org.hawkular.inventory.api.filters.Related; import org.hawkular.inventory.api.filters.RelationWith; import org.hawkular.inventory.api.filters.SwitchElementType; import org.hawkular.inventory.api.filters.With; import org.hawkular.inventory.base.spi.NoopFilter; import com.tinkerpop.pipes.Pipe; import com.tinkerpop.pipes.filter.IntervalFilterPipe; import com.tinkerpop.pipes.filter.PropertyFilterPipe; import com.tinkerpop.pipes.filter.RangeFilterPipe; import com.tinkerpop.pipes.transform.VertexQueryPipe; import com.tinkerpop.pipes.transform.VerticesEdgesPipe; /** * A filter applicator applies a filter to a Gremlin query. * * @author Lukas Krejci * @author Jirka Kremser * @see FilterVisitor * @since 0.0.1 */ abstract class FilterApplicator<T extends Filter> { private static Map<Class<? extends Filter>, Class<? extends FilterApplicator<?>>> applicators; static { applicators = new HashMap<>(); applicators.put(Related.class, RelatedApplicator.class); applicators.put(Contained.class, RelatedApplicator.class); applicators.put(Defined.class, RelatedApplicator.class); applicators.put(Incorporated.class, RelatedApplicator.class); applicators.put(With.Ids.class, WithIdsApplicator.class); applicators.put(With.Types.class, WithTypesApplicator.class); applicators.put(With.PropertyValues.class, WithPropertyValuesApplicator.class); applicators.put(RelationWith.Ids.class, RelationWithIdsApplicator.class); applicators.put(RelationWith.PropertyValues.class, RelationWithPropertiesApplicator.class); applicators.put(RelationWith.SourceOfType.class, RelationWithSourcesOfTypesApplicator.class); applicators.put(RelationWith.TargetOfType.class, RelationWithTargetsOfTypesApplicator.class); applicators.put(RelationWith.SourceOrTargetOfType.class, RelationWithSourcesOrTargetsOfTypesApplicator.class); applicators.put(SwitchElementType.class, SwitchElementTypeApplicator.class); applicators.put(NoopFilter.class, NoopApplicator.class); applicators.put(With.CanonicalPaths.class, CanonicalPathApplicator.class); applicators.put(With.RelativePaths.class, RelativePathApplicator.class); applicators.put(Marker.class, MarkerApplicator.class); applicators.put(With.DataAt.class, DataAtApplicator.class); applicators.put(With.DataValued.class, DataValuedApplicator.class); applicators.put(With.DataOfTypes.class, DataOfTypesApplicator.class); applicators.put(RecurseFilter.class, RecurseApplicator.class); applicators.put(With.SameIdentityHash.class, SameIdentityHashApplicator.class); applicators.put(With.Names.class, NamesApplicator.class); } protected final T filter; protected final FilterVisitor visitor = new FilterVisitor(); private FilterApplicator(T f) { this.filter = f; } public static FilterApplicator of(Filter filter) { if (filter == null) { throw new IllegalArgumentException("filter == null"); } Class<? extends Filter> filterClazz = filter.getClass(); Class<? extends FilterApplicator<?>> applicatorClazz = applicators.get(filterClazz); if (applicatorClazz == null) { throw new IllegalArgumentException("Unsupported filter type " + filterClazz); } Constructor<? extends FilterApplicator<?>> constructor = null; try { constructor = applicatorClazz.getDeclaredConstructor(filterClazz); } catch (NoSuchMethodException e) { try { // Contained, Defined, Owned constructor = applicatorClazz.getDeclaredConstructor(filterClazz.getSuperclass()); } catch (NoSuchMethodException e1) { throw new IllegalArgumentException("Unable to create an instance of " + applicatorClazz); } } try { constructor.setAccessible(true); return constructor.newInstance(filter); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new IllegalArgumentException("Unable to create an instance of " + applicatorClazz); } } /** * Applies all the filters from the applicator tree to the provided Gremlin query. * * @param filterTree the tree of filters to apply to the query * @param q the query to update with filters from the tree * @param <S> type of the source of the query * @param <E> type of the output of the query */ public static <S, E> void applyAll(Query filterTree, HawkularPipeline<S, E> q) { if (filterTree == null) { return; } QueryTranslationState state = new QueryTranslationState(); if (applyAll(filterTree, q, false, state)) { q.recall(); } } /** * A private impl of the {@code applyAll()} method that tracks the current type of the filter being applied. * The type of the filter is either a path ({@code isFilter == false}) which potentially progresses the query to * next positions in the inventory traversal or a filter ({@code isFilter == true}) which merely trims down the * number of the elements at the current "tail" of the traversal by applying filters to them. * * @param query the query * @param pipeline the Gremlin pipeline that the query gets translated to * @param isFilter whether we are currently processing filters as filters or path elements * @param <S> the start element type of the pipeline * @param <E> the end element type of the pipeline * @return true if after applying the filters, we're the filtering state or false if we are in path-progression * state. */ @SuppressWarnings("unchecked") private static <S, E> boolean applyAll(Query query, HawkularPipeline<S, E> pipeline, boolean isFilter, QueryTranslationState state) { QueryTranslationState origState = state.clone(); HawkularPipeline<S, E> workingPipeline = new HawkularPipeline<>(); for (QueryFragment qf : query.getFragments()) { boolean thisIsFilter = qf instanceof FilterFragment; if (thisIsFilter != isFilter) { isFilter = thisIsFilter; if (thisIsFilter) { //add the path progressions we had finishPipeline(workingPipeline, state, origState); workingPipeline.getPipes().forEach((p) -> addOptimized(pipeline, p)); } else { if (needsRememberingPosition(workingPipeline)) { finishPipeline(workingPipeline, state, origState); pipeline.remember(); //add the path progressions we had workingPipeline.getPipes().forEach((p) -> addOptimized(pipeline, p)); pipeline.recall(); } else { //add the path progressions we had //finishPipeline(workingPipeline, state, origState); workingPipeline.getPipes().forEach((p) -> addOptimized(pipeline, p)); } } workingPipeline = new HawkularPipeline<>(); } FilterApplicator.of(qf.getFilter()).applyTo(workingPipeline, state); } boolean remember = isFilter && needsRememberingPosition(workingPipeline); if (remember) { pipeline.remember(); } //empty the working pipeline into the true pipeline workingPipeline.getPipes().forEach((p) -> addOptimized(pipeline, p)); finishPipeline(pipeline, state, origState); if (query.getSubTrees().isEmpty()) { return remember; } if (query.getSubTrees().size() == 1) { return applyAll(query.getSubTrees().get(0), pipeline, isFilter, state); } else { List<HawkularPipeline<E, ?>> branches = new ArrayList<>(); Iterator<Query> it = query.getSubTrees().iterator(); // apply the first branch - in here, we know there are at least 2 actually HawkularPipeline<E, ?> branch = new HawkularPipeline<>(); // the branch is a brand new pipeline, so it doesn't make sense for it to inherit // our current filter state. boolean newIsFilter = applyAll(it.next(), branch, false, state.clone()); // close the filter in the branch, if needed if (newIsFilter) { branch.recall(); } branches.add(branch); while (it.hasNext()) { branch = new HawkularPipeline<>(); boolean nextIsFilter = applyAll(it.next(), branch, false, state.clone()); // close the filter in the branch, if needed if (nextIsFilter) { branch.recall(); } if (nextIsFilter != newIsFilter) { // this shouldn't normally be the case because the base impl extends the query tree // symmetrically, but here we can't be sure of that. throw new IllegalArgumentException("The branches of the query [" + query + "] don't change" + " the path/filter state consistently."); } branches.add(branch); } pipeline.copySplit(branches.toArray(new HawkularPipeline[branches.size()])).exhaustMerge(); finishPipeline(pipeline, state, origState); return isFilter; } } static <S, E> void finishPipeline(HawkularPipeline<S, E> pipeline, QueryTranslationState state, QueryTranslationState originalState) { if (state.isExplicitChange()) { return; } if (originalState.isInEdges() != state.isInEdges()) { if (originalState.isInEdges()) { switch (originalState.getComingFrom()) { case IN: pipeline.outE(); break; case OUT: pipeline.inE(); break; case BOTH: pipeline.bothE(); } } else { switch (state.getComingFrom()) { case IN: pipeline.outV(); break; case OUT: pipeline.inV(); break; case BOTH: pipeline.bothV(); } } } //we've moved back to the state as it was originally. reflect that. state.setInEdges(originalState.isInEdges()); state.setComingFrom(originalState.getComingFrom()); } private static boolean needsRememberingPosition(HawkularPipeline<?, ?> pipeline) { for (Pipe<?, ?> p : pipeline.getPipes()) { if (p instanceof VertexQueryPipe || p instanceof VerticesEdgesPipe) { return true; } } return false; } private static void addOptimized(HawkularPipeline<?, ?> pipeline, Pipe<?, ?> pipe) { if (pipe instanceof PropertyFilterPipe || pipe instanceof IntervalFilterPipe || pipe instanceof RangeFilterPipe) { optimizePipelineForQuery(pipeline, pipe); } else { pipeline.add(pipe); } } /** * To be implemented by inheritors, this applies the filter this applicator holds to the provided query taking into * the account the type of the filter. * * @param query the query to update with filter */ public abstract void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state); public Filter filter() { return filter; } @Override public String toString() { return "FilterApplicator[filter=" + filter + "]"; } private static final class RelatedApplicator extends FilterApplicator<Related> { private RelatedApplicator(Related filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class WithIdsApplicator extends FilterApplicator<With.Ids> { private WithIdsApplicator(With.Ids filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class WithTypesApplicator extends FilterApplicator<With.Types> { private WithTypesApplicator(With.Types filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class RelationWithIdsApplicator extends FilterApplicator<RelationWith.Ids> { private RelationWithIdsApplicator(RelationWith.Ids filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class RelationWithPropertiesApplicator extends FilterApplicator<RelationWith.PropertyValues> { private RelationWithPropertiesApplicator(RelationWith.PropertyValues filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class RelationWithSourcesOfTypesApplicator extends FilterApplicator<RelationWith.SourceOfType> { private RelationWithSourcesOfTypesApplicator(RelationWith.SourceOfType filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class RelationWithTargetsOfTypesApplicator extends FilterApplicator<RelationWith.TargetOfType> { private RelationWithTargetsOfTypesApplicator(RelationWith.TargetOfType filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class RelationWithSourcesOrTargetsOfTypesApplicator extends FilterApplicator<RelationWith.SourceOrTargetOfType> { private RelationWithSourcesOrTargetsOfTypesApplicator(RelationWith.SourceOrTargetOfType filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class SwitchElementTypeApplicator extends FilterApplicator<SwitchElementType> { private SwitchElementTypeApplicator(SwitchElementType filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class NoopApplicator extends FilterApplicator<NoopFilter> { private NoopApplicator(NoopFilter filter) { super(filter); } public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class WithPropertyValuesApplicator extends FilterApplicator<With.PropertyValues> { private WithPropertyValuesApplicator(With.PropertyValues f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class CanonicalPathApplicator extends FilterApplicator<With.CanonicalPaths> { private CanonicalPathApplicator(With.CanonicalPaths f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class RelativePathApplicator extends FilterApplicator<With.RelativePaths> { private RelativePathApplicator(With.RelativePaths f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class MarkerApplicator extends FilterApplicator<Marker> { private MarkerApplicator(Marker f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class DataAtApplicator extends FilterApplicator<With.DataAt> { private DataAtApplicator(With.DataAt f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class DataValuedApplicator extends FilterApplicator<With.DataValued> { private DataValuedApplicator(With.DataValued f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class DataOfTypesApplicator extends FilterApplicator<With.DataOfTypes> { private DataOfTypesApplicator(With.DataOfTypes f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class RecurseApplicator extends FilterApplicator<RecurseFilter> { private RecurseApplicator(RecurseFilter f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class SameIdentityHashApplicator extends FilterApplicator<With.SameIdentityHash> { private SameIdentityHashApplicator(With.SameIdentityHash f) { super(f); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } private static final class NamesApplicator extends FilterApplicator<With.Names> { private NamesApplicator(With.Names names) { super(names); } @Override public void applyTo(HawkularPipeline<?, ?> query, QueryTranslationState state) { visitor.visit(query, filter, state); } } }
package core.handlers; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import org.apache.commons.io.FileUtils; import ch.qos.logback.classic.Logger; public class FileHandler { private static final Logger LOGGER = null; private final String sepReg = "(\\\\|/)"; private boolean createIfNotExist; private boolean appendToFile; private boolean fileIsWritable = false; private boolean fileIsReadable = false; private String filePath; private String fileName; private String fileExtension; private FileWriter writeableFile; private File currentFile; private OutputStream writableFileOutputStream; public FileHandler(File fileObject) throws Exception { if (fileObject.exists()) { if (fileObject.canRead()) { this.currentFile = fileObject; this.fileIsReadable = true; } else { // LOGGER.error("Unable to read '{}'", this.filePath + this.fileName); throw new IOException("Unable to read file " + this.filePath + this.fileName); } } setAbsoluteFilename(fileObject.getAbsolutePath()); } /** * @param absoluteFilename */ public FileHandler(String absoluteFilename) { initialiseFile(absoluteFilename, false); } /** * @param absoluteFilename * @param value */ public FileHandler(String absoluteFilename, boolean value) { initialiseFile(absoluteFilename, value); } public void initialiseFile(String absoluteFilename, boolean value) { setAbsoluteFilename(absoluteFilename); setCreateIfNotExist(value); setAppendToFile(false); } public final void setAbsoluteFilename(String value) { setFileName(value.replaceFirst("^.*" + sepReg, "")); setFilePath(value.substring(0, value.length() - this.fileName.length())); } public final void setFileName(String value) { if (value.matches(sepReg)) { // LOGGER.error("The filename '{}' is not valid!", value); return; } this.fileName = value; String[] fileComponents = this.fileName.split("\\."); if (fileComponents.length > 1) { this.fileExtension = fileComponents[fileComponents.length - 1]; } else { this.fileExtension = ""; } } public String getFileName() { return this.fileName; } public String getExtension() { return this.fileExtension; } private boolean isFileWriteable() { return this.fileIsWritable; } public final void setFilePath(String value) { String[] pathExploded = value.split(sepReg); String path = ""; for (String pathSegment : pathExploded) { path += pathSegment + System.getProperty("file.separator"); } this.filePath = path; } public String getFilePath() { return this.filePath; } public String getAbsoluteFile() { return this.filePath + this.fileName; } public final void setCreateIfNotExist(boolean value) { this.createIfNotExist = value; } public boolean getCreateIfNotExist() { return this.createIfNotExist; } public final void setAppendToFile(boolean value) { this.appendToFile = value; } public boolean getAppendToFile() { return this.appendToFile; } public FileWriter getWriteableFile() throws Exception { if (!this.fileIsWritable) { this.openFileForWriting(); } return this.writeableFile; } public OutputStream getWritableFileOutputStream() throws Exception { if (!this.fileIsWritable) { this.openFileForWriting(); } return this.writableFileOutputStream; } public File getFile() throws Exception { if (!this.fileIsReadable) { this.openFile(); } return this.currentFile; } private void openFile() throws Exception { File fileToOpen = new File(this.filePath + this.fileName); if (fileToOpen.exists()) { if (fileToOpen.canRead()) { this.currentFile = fileToOpen; this.fileIsReadable = true; } else { // LOGGER.error("Unable to read '{}'", this.filePath + this.fileName); throw new IOException("Unable to read file " + this.filePath + this.fileName); } } else if (this.createIfNotExist) { File directory = new File(this.filePath); if (!directory.exists()) { directory.mkdirs(); } fileToOpen.createNewFile(); this.currentFile = fileToOpen; } else { // LOGGER.error("'{}' does not exist!", this.filePath + this.fileName); throw new IOException(this.filePath + this.fileName + "does not exist!"); } } private void openFileForWriting() throws Exception { if (this.fileIsReadable != true) { this.openFile(); } if (this.fileIsWritable == false) { this.currentFile.setWritable(true); this.fileIsWritable = true; } this.writeableFile = new FileWriter(this.currentFile, this.appendToFile); this.writableFileOutputStream = new FileOutputStream(this.currentFile); this.fileIsWritable = true; } public void write(String value) throws Exception { if (!this.fileIsWritable) { this.openFileForWriting(); } this.writeableFile.write(value); } public void close() throws Exception { if (this.writeableFile != null) { this.writeableFile.close(); } if (writableFileOutputStream != null) { this.writableFileOutputStream.close(); this.writeableFile = null; } this.fileIsWritable = false; this.currentFile = null; this.fileIsReadable = false; } /** * Copy the file to a specific location * * @param absoluteFileName - Target location for copy. * @return * @throws Exception */ public boolean copyFileTo(String absoluteFileName) throws Exception { if (this.fileIsReadable != true) { this.openFile(); } File fileDestination = new File(absoluteFileName); if (this.currentFile.exists()) { if (this.currentFile.canRead()) { try { FileUtils.copyFile(this.currentFile, fileDestination); return true; } catch (Exception Ex) { // LOGGER.warn("Failed to copy file to '{}'", absoluteFileName); return false; } } else { // LOGGER.error("Unable to read '{}'", this.filePath + this.fileName); throw new IOException("Unable to read file " + this.filePath + this.fileName); } } else { //LOGGER.error("'{}' does not exist!", this.filePath + this.fileName); throw new IOException(this.filePath + this.fileName + "does not exist!"); } } }
/* * Copyright 2009-2016 DigitalGlobe, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * */ package org.mrgeo.resources.wms; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mrgeo.core.MrGeoConstants; import org.mrgeo.junit.IntegrationTest; import org.mrgeo.test.TestUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.core.Response; @SuppressWarnings("all") // Test code, not included in production public class MissingStatsTest extends WmsGeneratorTestAbstract { @SuppressWarnings("unused") private static final Logger log = LoggerFactory.getLogger(MissingStatsTest.class); @BeforeClass public static void setUpForJUnit() { try { baselineInput = TestUtils.composeInputDir(MissingStatsTest.class); WmsGeneratorTestAbstract.setUpForJUnit(); } catch (Exception e) { e.printStackTrace(); } } /* * If no stats have been calculated on an image, a default range of 0.0 to 1.0 is used for the * extrema during color scale application. */ @Test @Category(IntegrationTest.class) public void testGetMapPngNoStats() throws Exception { String contentType = "image/png"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "getmap") .queryParam("LAYERS", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("BBOX", ISLANDS_ELEVATION_V2_IN_BOUNDS_SINGLE_SOURCE_TILE) .queryParam("WIDTH", MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT) .queryParam("HEIGHT", MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT) .request().get(); processImageResponse(response, contentType, "png"); } @Test @Category(IntegrationTest.class) public void testGetMapJpgNoStats() throws Exception { try { String contentType = "image/jpeg"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "getmap") .queryParam("LAYERS", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("BBOX", ISLANDS_ELEVATION_V2_IN_BOUNDS_SINGLE_SOURCE_TILE) .queryParam("WIDTH", MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT) .queryParam("HEIGHT", MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT) .request().get(); processImageResponse(response, contentType, "jpg"); } catch (Exception e) { e.printStackTrace(); throw e; } } @Test @Category(IntegrationTest.class) public void testGetMapTifNoStats() throws Exception { String contentType = "image/tiff"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "getmap") .queryParam("LAYERS", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("BBOX", ISLANDS_ELEVATION_V2_IN_BOUNDS_SINGLE_SOURCE_TILE) .queryParam("WIDTH", MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT) .queryParam("HEIGHT", MrGeoConstants.MRGEO_MRS_TILESIZE_DEFAULT) .request().get(); processImageResponse(response, contentType, "tif"); } /* * If no stats have been calculated on an image, a default range of 0.0 to 1.0 is used for the * extrema during color scale application. */ @Test @Category(IntegrationTest.class) public void testGetMosaicPngNoStats() throws Exception { String contentType = "image/png"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "getmosaic") .queryParam("LAYERS", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("BBOX", ISLANDS_ELEVATION_V2_IN_BOUNDS_SINGLE_SOURCE_TILE) .request().get(); processImageResponse(response, contentType, "png"); } @Test @Category(IntegrationTest.class) public void testGetMosaicJpgNoStats() throws Exception { String contentType = "image/jpeg"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "getmosaic") .queryParam("LAYERS", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("BBOX", ISLANDS_ELEVATION_V2_IN_BOUNDS_SINGLE_SOURCE_TILE) .request().get(); processImageResponse(response, contentType, "jpg"); } @Test @Category(IntegrationTest.class) public void testGetMosaicTifNoStats() throws Exception { String contentType = "image/tiff"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "getmosaic") .queryParam("LAYERS", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("BBOX", ISLANDS_ELEVATION_V2_IN_BOUNDS_SINGLE_SOURCE_TILE) .request().get(); processImageResponse(response, contentType, "tif"); } /* * If no stats have been calculated on an image, a default range of 0.0 to 1.0 is used for the * extrema during color scale application. */ @Test @Category(IntegrationTest.class) public void testGetTilePngNoStats() throws Exception { String contentType = "image/png"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "gettile") .queryParam("LAYER", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("TILEROW", "56") .queryParam("TILECOL", "242") .queryParam("SCALE", "0.0027465820") // zoom level 8 .request().get(); processImageResponse(response, contentType, "png"); } @Test @Category(IntegrationTest.class) public void testGetTileJpgNoStats() throws Exception { String contentType = "image/jpeg"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "gettile") .queryParam("LAYER", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("TILEROW", "56") .queryParam("TILECOL", "242") .queryParam("SCALE", "0.0027465820") // zoom level 8 .request().get(); processImageResponse(response, contentType, "jpg"); } @Test @Category(IntegrationTest.class) public void testGetTileTifNoStats() throws Exception { String contentType = "image/tiff"; Response response = target("wms") .queryParam("SERVICE", "WMS") .queryParam("REQUEST", "gettile") .queryParam("LAYER", "IslandsElevation-v2-no-stats") .queryParam("FORMAT", contentType) .queryParam("TILEROW", "56") .queryParam("TILECOL", "242") .queryParam("SCALE", "0.0027465820") // zoom level 8 .request().get(); processImageResponse(response, contentType, "tif"); } }
/** * Copyright (c) 2007-2009, Fintan Fairmichael, University College Dublin under the BSD licence. * See LICENCE.TXT for details. */ package ie.ucd.clops.dsl.parser; import java.io.File; import org.antlr.runtime.CommonToken; import org.antlr.runtime.Token; public class SourceLocation implements Comparable<SourceLocation> { public static final int UNKNOWN = -1; public static final SourceLocation NO_LOCATION = new SourceLocation(null, UNKNOWN, UNKNOWN, UNKNOWN, UNKNOWN); public static final String STDIN_TEXT = "<stdin>"; private final File sourceFile; private int lineNumber; private int charPositionInLine; private int absoluteCharPositionStart; private int absoluteCharPositionEnd; public SourceLocation(File sourceFile, int lineNumber, int charPositionInLine, int absoluteCharPositionStart, int absoluteCharPositionEnd ) { this.sourceFile = sourceFile; this.lineNumber = lineNumber; this.charPositionInLine = charPositionInLine; this.absoluteCharPositionEnd = absoluteCharPositionEnd; this.absoluteCharPositionStart = absoluteCharPositionStart; } public SourceLocation(Token t, File sourceFile) { this(t, t, sourceFile); } public SourceLocation(Token start, Token end, File sourceFile) { this.sourceFile = sourceFile; //System.out.println("start token: " + start); this.lineNumber = start.getLine(); this.charPositionInLine = start.getCharPositionInLine(); //System.out.println("SourceLoc from token: " + start.getText()); if (start instanceof CommonToken) { CommonToken cToken = (CommonToken)start; this.absoluteCharPositionStart = cToken.getStartIndex(); //System.out.println("Set absolute start: " + this.absoluteCharPositionStart); } else { this.absoluteCharPositionStart = -1; } if (end instanceof CommonToken) { CommonToken cToken = (CommonToken)end; this.absoluteCharPositionEnd = cToken.getStopIndex(); //System.out.println("Set absolute end: " + this.absoluteCharPositionEnd); } else { this.absoluteCharPositionEnd = -1; } } public SourceLocation(SourceLocation start, SourceLocation end) { this(start.sourceFile, start.lineNumber, start.charPositionInLine, start.absoluteCharPositionStart, end.absoluteCharPositionEnd); } public void setStartToken(Token start) { this.lineNumber = start.getLine(); this.charPositionInLine = start.getCharPositionInLine(); if (start instanceof CommonToken) { this.absoluteCharPositionStart = ((CommonToken)start).getStartIndex(); } else { this.absoluteCharPositionStart = -1; } } public void setEndToken(Token end) { if (end instanceof CommonToken) { this.absoluteCharPositionEnd = ((CommonToken)end).getStopIndex(); } else { this.absoluteCharPositionEnd = -1; } } public final File getSourceFile() { return sourceFile; } public final String getSourceFilePath() { return sourceFile != null ? sourceFile.getPath() : "stdin"; } public final String getFileName() { return sourceFile == null ? null : sourceFile.getName(); } public final int getLineNumber() { return lineNumber; } public final int getCharPositionInLine() { return charPositionInLine; } public final int getAbsoluteCharPositionStart() { return absoluteCharPositionStart; } public final int getAbsoluteCharPositionEnd() { return absoluteCharPositionEnd; } @Override public String toString() { return "File: " + (sourceFile!=null ? sourceFile.getPath() : "stdin") + ", line: " + lineNumber + ", char: " + charPositionInLine; } public String getFilePath() { return getFilePath(sourceFile); } public static String getFilePath(File file) { if (file == null) { return STDIN_TEXT; } else { return file.getPath(); } } @Override public boolean equals(Object obj) { if (!(obj instanceof SourceLocation)) { return false; } SourceLocation other = (SourceLocation)obj; if (this.sourceFile != null && !this.sourceFile.equals(other.sourceFile)) { return false; } return this.lineNumber == other.lineNumber && this.charPositionInLine == other.charPositionInLine && this.absoluteCharPositionStart == other.absoluteCharPositionStart && this.absoluteCharPositionEnd == other.absoluteCharPositionEnd; } public int compareTo(SourceLocation o) { if (o == null) { return 1; } int fileCompare = this.compareFile(o); if (fileCompare != 0) { return fileCompare; } int lineNumberCompare = this.compareLineNumber(o); if (lineNumberCompare != 0) { return lineNumberCompare; } int charPositionCompare = this.compareCharacterPosition(o); if (charPositionCompare != 0) { return charPositionCompare; } return 0; } private int compareFile(final SourceLocation o) { //General errors not involving a specific file... if (sourceFile == null) { return o.sourceFile == null ? 0 : -1; } else if (o.sourceFile == null) { return 1; } //Compare file name return this.getFileName().compareTo(o.getFileName()); } private int compareLineNumber(final SourceLocation o) { //Compare line number if (this.getLineNumber() == UNKNOWN) { return o.getLineNumber() == UNKNOWN ? 0 : -1; } else if (o.getLineNumber() == UNKNOWN) { return 1; } else { return this.getLineNumber() - o.getLineNumber(); } } private int compareCharacterPosition(final SourceLocation o) { //Compare character position if (this.getCharPositionInLine() == UNKNOWN) { return o.getCharPositionInLine() == UNKNOWN ? 0 : -1; } else if (o.getCharPositionInLine() == UNKNOWN) { return 1; } else { return this.getCharPositionInLine() - o.getCharPositionInLine(); } } @Override public int hashCode() { return (this.sourceFile == null ? 0 : this.sourceFile.hashCode()) + (this.lineNumber*1024*1024) + (this.absoluteCharPositionStart*1024) + this.charPositionInLine; } }
package ru.qatools.properties; import ru.qatools.properties.converters.ConversionException; import ru.qatools.properties.converters.Converter; import ru.qatools.properties.converters.ConverterManager; import ru.qatools.properties.internal.PropertiesProxy; import ru.qatools.properties.internal.PropertyInfo; import ru.qatools.properties.providers.DefaultPropertyProvider; import ru.qatools.properties.providers.PropertyProvider; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Proxy; import java.lang.reflect.Type; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.Set; /** * @author Dmitry Baev charlie@yandex-team.ru * Eroshenko Artem eroshenkoam@yandex-team.ru * Date: 09.04.15 */ public class PropertyLoader { protected ClassLoader classLoader = getClass().getClassLoader(); protected Properties defaults = new Properties(); protected Properties compiled = new Properties(); protected PropertyProvider propertyProvider = new DefaultPropertyProvider(); protected final ConverterManager manager = new ConverterManager(); /** * Do not instance this class by yourself. Use {@link #newInstance()} instead. */ PropertyLoader() { } /** * Populate given bean using properties from {@link #defaults} and * {@link PropertyProvider#provide(ClassLoader, Class)} */ public void populate(Object bean) { Objects.requireNonNull(bean); compileProperties(bean.getClass()); Class<?> clazz = bean.getClass(); while (clazz != Object.class) { Map<Field, PropertyInfo> propertyInfoMap = resolve(clazz.getDeclaredFields()); for (Field field : propertyInfoMap.keySet()) { PropertyInfo info = propertyInfoMap.get(field); setValueToField(field, bean, info.getValue()); } clazz = clazz.getSuperclass(); } } public <T> T populate(Class<T> clazz) { Objects.requireNonNull(clazz); compileProperties(clazz); Set<Class> resolvedConfigs = new HashSet<>(); resolvedConfigs.add(clazz); return populate(clazz, resolvedConfigs); } /** * Shortcut for {@link #populate(String, Class, Set)}. */ public <T> T populate(Class<T> clazz, Set<Class> resolvedConfigs) { return populate(null, clazz, resolvedConfigs); } /** * Creates a proxy instance of given configuration. */ public <T> T populate(String prefix, Class<T> clazz, Set<Class> resolvedConfigs) { checkConfigurationClass(clazz); Map<Method, PropertyInfo> properties = resolve(prefix, clazz.getMethods(), resolvedConfigs); //noinspection unchecked return (T) Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{clazz}, new PropertiesProxy(properties)); } /** * Set given value to specified field of given object. * * @throws PropertyLoaderException if some exceptions occurs during reflection calls. * @see Field#setAccessible(boolean) * @see Field#set(Object, Object) */ protected void setValueToField(Field field, Object bean, Object value) { try { field.setAccessible(true); field.set(bean, value); } catch (Exception e) { throw new PropertyLoaderException( String.format("Can not set bean <%s> field <%s> value", bean, field), e ); } } /** * Check that given class is interface. */ protected void checkConfigurationClass(Class<?> clazz) { if (!clazz.isInterface()) { throw new PropertyLoaderException(clazz + " is not an interface"); } } /** * Compile properties to {@link #compiled} field. */ protected void compileProperties(Class<?> clazz) { compiled.putAll(defaults); compiled.putAll(propertyProvider.provide(classLoader, clazz)); } /** * Shortcut for {@link #resolve(String, AnnotatedElement[], Set)}. */ protected <T extends AnnotatedElement> Map<T, PropertyInfo> resolve(T[] elements) { return resolve(null, elements, Collections.<Class>emptySet()); } /** * Return {@link PropertyInfo} for each of given elements. */ protected <T extends AnnotatedElement> Map<T, PropertyInfo> resolve(String keyPrefix, T[] elements, Set<Class> resolvedConfigs) { Map<T, PropertyInfo> result = new HashMap<>(); for (T element : elements) { try { result.putAll(resolveProperty(keyPrefix, element)); result.putAll(resolveConfig(keyPrefix, element, resolvedConfigs)); } catch (PropertyLoaderException e) { throw new PropertyLoaderException(String.format("Error while process %s", element), e); } } return result; } /** * Resolve the property for given element. */ private <T extends AnnotatedElement> Map<T, PropertyInfo> resolveProperty(String keyPrefix, T element) { Map<T, PropertyInfo> result = new HashMap<>(); if (!shouldDecorate(element)) { return result; } String key = getKey(keyPrefix, element); String defaultValue = getPropertyDefaultValue(element); String stringValue = compiled.getProperty(key, defaultValue); if (stringValue == null) { checkRequired(key, element); return result; } Object value = convertValue(element, stringValue); result.put(element, new PropertyInfo(key, stringValue, value)); return result; } /** * Resolve the config for given element. */ private <T extends AnnotatedElement> Map<T, PropertyInfo> resolveConfig(String keyPrefix, T element, Set<Class> resolvedConfigs) { Map<T, PropertyInfo> result = new HashMap<>(); if (!element.isAnnotationPresent(Config.class)) { return result; } String prefix = concat(keyPrefix, element.getAnnotation(Config.class).prefix()); Class<?> returnType = getValueType(element); checkRecursiveConfigs(resolvedConfigs, returnType); resolvedConfigs.add(returnType); Object proxy = populate(prefix, returnType, resolvedConfigs); result.put(element, new PropertyInfo(proxy)); return result; } /** * Returns true if given annotatedElement should be decorated, * false otherwise. */ protected boolean shouldDecorate(AnnotatedElement element) { return element.isAnnotationPresent(Property.class); } /** * Throws an exception if already meet the config. */ private void checkRecursiveConfigs(Set<Class> resolvedConfigs, Class<?> configClass) { if (resolvedConfigs.contains(configClass)) { throw new PropertyLoaderException(String.format("Recursive configuration <%s>", configClass)); } } /** * Throws an exception if given element is required. * * @see #isRequired(AnnotatedElement) */ protected void checkRequired(String key, AnnotatedElement element) { if (isRequired(element)) { throw new PropertyLoaderException(String.format("Required property <%s> doesn't exists", key)); } } /** * Returns true if annotatedElement marked as required with {@link Required}. */ protected boolean isRequired(AnnotatedElement element) { return element.isAnnotationPresent(Required.class); } /** * Get the default value for given element. Annotation {@link Property} should * be present. */ protected String getPropertyDefaultValue(AnnotatedElement element) { if (element.isAnnotationPresent(DefaultValue.class)) { return element.getAnnotation(DefaultValue.class).value(); } return null; } /** * Get property key for specified element with given prefix. Annotation {@link Property} should * be present. */ protected String getKey(String prefix, AnnotatedElement element) { String value = element.getAnnotation(Property.class).value(); return concat(prefix, value); } /** * Concat the given prefixes into one. */ protected String concat(String first, String second) { return first == null ? second : String.format("%s.%s", first, second); } /** * Convert given value to specified type. If given element annotated with {@link Use} annotation * use {@link #getConverterForElementWithUseAnnotation(AnnotatedElement)} converter, otherwise * if element has collection type convert collection and finally try to convert element * using registered converters. */ protected Object convertValue(AnnotatedElement element, String value) { Class<?> type = getValueType(element); Type genericType = getValueGenericType(element); try { if (element.isAnnotationPresent(Use.class)) { Converter converter = getConverterForElementWithUseAnnotation(element); return converter.convert(value); } if (Collection.class.isAssignableFrom(type)) { return manager.convert(type, getCollectionElementType(genericType), value); } return manager.convert(type, value); } catch (Exception e) { throw new PropertyLoaderException(String.format( "Can't convert value <%s> to type <%s>", value, type), e); } } /** * Returns the type of the value for given element. {@link Field} and {@link Method} * are only supported. */ protected Class<?> getValueType(AnnotatedElement element) { if (element instanceof Field) { return ((Field) element).getType(); } if (element instanceof Method) { return ((Method) element).getReturnType(); } throw new PropertyLoaderException("Could not get element type"); } /** * Returns the generic type of the value for given element. {@link Field} and {@link Method} * are only supported. */ protected Type getValueGenericType(AnnotatedElement element) { if (element instanceof Field) { return ((Field) element).getGenericType(); } if (element instanceof Method) { return ((Method) element).getGenericReturnType(); } throw new PropertyLoaderException("Could not get generic type for element"); } /** * Get collection element type for given type. Given type type should * be assignable from {@link Collection}. For collections without * generic returns {@link String}. */ protected Class<?> getCollectionElementType(Type genericType) throws ConversionException { if (genericType instanceof ParameterizedType) { ParameterizedType parameterizedType = (ParameterizedType) genericType; Type[] typeArguments = parameterizedType.getActualTypeArguments(); if (typeArguments.length != 1) { throw new ConversionException("Types with more then one generic are not supported"); } Type type = typeArguments[0]; if (type instanceof Class) { return (Class<?>) type; } throw new ConversionException(String.format("Could not resolve generic type <%s>", type)); } return String.class; } /** * Returns new instance of converter specified in {@link Use} annotation for * given element. * * @param element given element with {@link Use} annotation. */ protected Converter getConverterForElementWithUseAnnotation(AnnotatedElement element) { Class<? extends Converter> clazz = element.getAnnotation(Use.class).value(); try { return clazz.newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new PropertyLoaderException(String.format( "Can't instance converter <%s>", clazz), e); } } /** * Register custom converter for given type. */ public <T> PropertyLoader register(Converter<T> converter, Class<T> type) { manager.register(type, converter); return this; } public Properties getCompiled() { return compiled; } public ClassLoader getClassLoader() { return classLoader; } /** * @see #classLoader */ public void setClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; } /** * Fluent-api builder. * * @see #setClassLoader(ClassLoader) */ public PropertyLoader withClassLoader(ClassLoader classLoader) { setClassLoader(classLoader); return this; } public Properties getDefaults() { return defaults; } /** * @see #defaults */ public void setDefaults(Properties defaults) { this.defaults = defaults; } /** * Fluent-api builder. * * @see #setDefaults(Properties) */ public PropertyLoader withDefaults(Properties defaults) { setDefaults(defaults); return this; } public PropertyProvider getPropertyProvider() { return propertyProvider; } /** * @see #propertyProvider */ public void setPropertyProvider(PropertyProvider propertyProvider) { this.propertyProvider = propertyProvider; } /** * Fluent-api builder. * * @see #setPropertyProvider(PropertyProvider) */ public PropertyLoader withPropertyProvider(PropertyProvider propertyProvider) { setPropertyProvider(propertyProvider); return this; } /** * Do not instance class by yourself. Use this builder */ public static PropertyLoader newInstance() { return new PropertyLoader(); } }
package io.branch.indexing; import android.app.Activity; import android.content.Context; import android.content.SharedPreferences; import android.text.TextUtils; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * Created by sojanpr on 6/14/16. * <p> * The content discovery manifest is the instruction set for how the Branch SDK will *optionally* and automatically discover content * within your app. It parses the configuration from the server, which will tell the client whether it's eligible for content discovery. * This manifest is then used to inform the ContentDiscover class's behavior. * * Note that this behavior can be controlled from the dashboard. * </p> */ public class ContentDiscoveryManifest { private static ContentDiscoveryManifest thisInstance_; /* JsonObject representation for the CD manifest */ private JSONObject cdManifestObject_; /* Manifest version number */ private String manifestVersion_; /* Max length for an individual text item */ private int maxTextLen_ = 0; /* Max num of views to do content discovery in a session */ private int maxViewHistoryLength_ = 1; /* Maximum size of CD data payload per requests updating CD data to server */ private int maxPacketSize_ = 0; /* Specifies if CD is enabled for this session */ private boolean isCDEnabled_ = false; /* Json Array for the content path object and the filtered views for this application */ private JSONArray contentPaths_; public static final String MANIFEST_VERSION_KEY = "mv"; public static final String PACKAGE_NAME_KEY = "pn"; static final String HASH_MODE_KEY = "h"; private static final String MANIFEST_KEY = "m"; private static final String PATH_KEY = "p"; private static final String FILTERED_KEYS = "ck"; private static final String MAX_TEXT_LEN_KEY = "mtl"; private static final String MAX_VIEW_HISTORY_LENGTH = "mhl"; private static final String MAX_PACKET_SIZE_KEY = "mps"; public static final String CONTENT_DISCOVER_KEY = "cd"; private static final String DISCOVERY_REPEAT_INTERVAL = "dri"; private static final String MAX_DISCOVERY_REPEAT = "mdr"; static final int DEF_MAX_DISCOVERY_REPEAT = 15; // Default Maximum number for discovery repeat static final int DRI_MINIMUM_THRESHOLD = 500; // Minimum value for Discovery repeat interval private SharedPreferences sharedPref; private final String PREF_KEY = "BNC_CD_MANIFEST"; private ContentDiscoveryManifest(Context context) { sharedPref = context.getSharedPreferences("bnc_content_discovery_manifest_storage", Context.MODE_PRIVATE); retrieve(context); } public static ContentDiscoveryManifest getInstance(Context context) { if (thisInstance_ == null) { thisInstance_ = new ContentDiscoveryManifest(context); } return thisInstance_; } private void persist() { SharedPreferences.Editor editor = sharedPref.edit(); editor.putString(PREF_KEY, cdManifestObject_.toString()).apply(); } private void retrieve(Context context) { String jsonStr = sharedPref.getString(PREF_KEY, null); if (jsonStr != null) { try { cdManifestObject_ = new JSONObject(jsonStr); if (cdManifestObject_.has(MANIFEST_VERSION_KEY)) { manifestVersion_ = cdManifestObject_.getString(MANIFEST_VERSION_KEY); } if (cdManifestObject_.has(MANIFEST_KEY)) { contentPaths_ = cdManifestObject_.getJSONArray(MANIFEST_KEY); } } catch (JSONException ignored) { cdManifestObject_ = new JSONObject(); } } else { cdManifestObject_ = new JSONObject(); } } public void onBranchInitialised(JSONObject branchInitResp) { if (branchInitResp.has(CONTENT_DISCOVER_KEY)) { isCDEnabled_ = true; try { JSONObject cdObj = branchInitResp.getJSONObject(CONTENT_DISCOVER_KEY); if (cdObj.has(MANIFEST_VERSION_KEY)) { manifestVersion_ = cdObj.getString(MANIFEST_VERSION_KEY); } if (cdObj.has(MAX_VIEW_HISTORY_LENGTH)) { maxViewHistoryLength_ = cdObj.getInt(MAX_VIEW_HISTORY_LENGTH); } if (cdObj.has(MANIFEST_KEY)) { contentPaths_ = cdObj.getJSONArray(MANIFEST_KEY); } if (cdObj.has(MAX_TEXT_LEN_KEY)) { int maxTextLength = cdObj.getInt(MAX_TEXT_LEN_KEY); if (maxTextLength > 0) { maxTextLen_ = maxTextLength; } } if (cdObj.has(MAX_PACKET_SIZE_KEY)) { maxPacketSize_ = cdObj.getInt(MAX_PACKET_SIZE_KEY); } cdManifestObject_.put(MANIFEST_VERSION_KEY, manifestVersion_); cdManifestObject_.put(MANIFEST_KEY, contentPaths_); persist(); } catch (JSONException ignore) { } } else { isCDEnabled_ = false; } } CDPathProperties getCDPathProperties(Activity activity) { CDPathProperties pathProperties = null; if (contentPaths_ != null) { String viewPath = "/" + activity.getClass().getSimpleName(); try { for (int i = 0; i < contentPaths_.length(); i++) { JSONObject pathObj = contentPaths_.getJSONObject(i); if (pathObj.has(PATH_KEY) && pathObj.getString(PATH_KEY).equals(viewPath)) { pathProperties = new CDPathProperties(pathObj); break; } } } catch (JSONException ignore) { } } return pathProperties; } boolean isCDEnabled() { return isCDEnabled_; } int getMaxTextLen() { return maxTextLen_; } int getMaxPacketSize() { return maxPacketSize_; } int getMaxViewHistorySize() { return maxViewHistoryLength_; } public String getManifestVersion() { if (TextUtils.isEmpty(manifestVersion_)) { return "-1"; } return manifestVersion_; } class CDPathProperties { final JSONObject pathInfo_; private boolean isClearText_; private int discoveryRepeatInterval_; private int maxDiscoveryRepeat_; int getDiscoveryRepeatInterval() { return discoveryRepeatInterval_; } int getMaxDiscoveryRepeatNumber() { return maxDiscoveryRepeat_; } CDPathProperties(JSONObject pathInfo) { pathInfo_ = pathInfo; maxDiscoveryRepeat_ = DEF_MAX_DISCOVERY_REPEAT; if (pathInfo.has(HASH_MODE_KEY)) { try { isClearText_ = !pathInfo.getBoolean(HASH_MODE_KEY); } catch (JSONException e) { e.printStackTrace(); } } try { if (pathInfo.has(DISCOVERY_REPEAT_INTERVAL)) { discoveryRepeatInterval_ = pathInfo.getInt(DISCOVERY_REPEAT_INTERVAL); } if (pathInfo.has(MAX_DISCOVERY_REPEAT)) { maxDiscoveryRepeat_ = pathInfo.getInt(MAX_DISCOVERY_REPEAT); } } catch (JSONException e) { e.printStackTrace(); } } JSONArray getFilteredElements() { JSONArray elementArray = null; if (pathInfo_.has(FILTERED_KEYS)) { try { elementArray = pathInfo_.getJSONArray(FILTERED_KEYS); } catch (JSONException e) { e.printStackTrace(); } } return elementArray; } boolean isClearTextRequested() { return isClearText_; } boolean isSkipContentDiscovery() { JSONArray filteredElements = getFilteredElements(); return filteredElements != null && filteredElements.length() == 0; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.webmonitor.handlers.checkpoints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.flink.api.common.JobID; import org.apache.flink.runtime.checkpoint.AbstractCheckpointStats; import org.apache.flink.runtime.checkpoint.CheckpointProperties; import org.apache.flink.runtime.checkpoint.CheckpointStatsHistory; import org.apache.flink.runtime.checkpoint.CheckpointStatsSnapshot; import org.apache.flink.runtime.checkpoint.CheckpointStatsStatus; import org.apache.flink.runtime.checkpoint.CompletedCheckpointStats; import org.apache.flink.runtime.checkpoint.FailedCheckpointStats; import org.apache.flink.runtime.checkpoint.PendingCheckpointStats; import org.apache.flink.runtime.checkpoint.TaskStateStats; import org.apache.flink.runtime.executiongraph.AccessExecutionGraph; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.webmonitor.ExecutionGraphHolder; import org.apache.flink.runtime.webmonitor.history.ArchivedJson; import org.apache.flink.runtime.webmonitor.history.JsonArchivist; import org.junit.Assert; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.ThreadLocalRandom; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class CheckpointStatsDetailsHandlerTest { @Test public void testArchiver() throws IOException { JsonArchivist archivist = new CheckpointStatsDetailsHandler.CheckpointStatsDetailsJsonArchivist(); CompletedCheckpointStats completedCheckpoint = createCompletedCheckpoint(); FailedCheckpointStats failedCheckpoint = createFailedCheckpoint(); List<AbstractCheckpointStats> checkpoints = new ArrayList<>(); checkpoints.add(failedCheckpoint); checkpoints.add(completedCheckpoint); CheckpointStatsHistory history = mock(CheckpointStatsHistory.class); when(history.getCheckpoints()).thenReturn(checkpoints); CheckpointStatsSnapshot snapshot = mock(CheckpointStatsSnapshot.class); when(snapshot.getHistory()).thenReturn(history); AccessExecutionGraph graph = mock(AccessExecutionGraph.class); when(graph.getCheckpointStatsSnapshot()).thenReturn(snapshot); when(graph.getJobID()).thenReturn(new JobID()); ObjectMapper mapper = new ObjectMapper(); Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(graph); Assert.assertEquals(2, archives.size()); Iterator<ArchivedJson> iterator = archives.iterator(); ArchivedJson archive1 = iterator.next(); Assert.assertEquals( "/jobs/" + graph.getJobID() + "/checkpoints/details/" + failedCheckpoint.getCheckpointId(), archive1.getPath()); compareFailedCheckpoint(failedCheckpoint, mapper.readTree(archive1.getJson())); ArchivedJson archive2 = iterator.next(); Assert.assertEquals( "/jobs/" + graph.getJobID() + "/checkpoints/details/" + completedCheckpoint.getCheckpointId(), archive2.getPath()); compareCompletedCheckpoint(completedCheckpoint, mapper.readTree(archive2.getJson())); } @Test public void testGetPaths() { CheckpointStatsDetailsHandler handler = new CheckpointStatsDetailsHandler(mock(ExecutionGraphHolder.class), new CheckpointStatsCache(0)); String[] paths = handler.getPaths(); Assert.assertEquals(1, paths.length); Assert.assertEquals("/jobs/:jobid/checkpoints/details/:checkpointid", paths[0]); } /** * Tests request with illegal checkpoint ID param. */ @Test public void testIllegalCheckpointId() throws Exception { AccessExecutionGraph graph = mock(AccessExecutionGraph.class); CheckpointStatsDetailsHandler handler = new CheckpointStatsDetailsHandler(mock(ExecutionGraphHolder.class), new CheckpointStatsCache(0)); Map<String, String> params = new HashMap<>(); params.put("checkpointid", "illegal checkpoint"); String json = handler.handleRequest(graph, params); assertEquals("{}", json); } /** * Tests request with missing checkpoint ID param. */ @Test public void testNoCheckpointIdParam() throws Exception { AccessExecutionGraph graph = mock(AccessExecutionGraph.class); CheckpointStatsDetailsHandler handler = new CheckpointStatsDetailsHandler(mock(ExecutionGraphHolder.class), new CheckpointStatsCache(0)); String json = handler.handleRequest(graph, Collections.<String, String>emptyMap()); assertEquals("{}", json); } /** * Test lookup of not existing checkpoint in history. */ @Test public void testCheckpointNotFound() throws Exception { CheckpointStatsHistory history = mock(CheckpointStatsHistory.class); when(history.getCheckpointById(anyLong())).thenReturn(null); // not found CheckpointStatsSnapshot snapshot = mock(CheckpointStatsSnapshot.class); when(snapshot.getHistory()).thenReturn(history); AccessExecutionGraph graph = mock(AccessExecutionGraph.class); when(graph.getCheckpointStatsSnapshot()).thenReturn(snapshot); CheckpointStatsDetailsHandler handler = new CheckpointStatsDetailsHandler(mock(ExecutionGraphHolder.class), new CheckpointStatsCache(0)); Map<String, String> params = new HashMap<>(); params.put("checkpointid", "123"); String json = handler.handleRequest(graph, params); assertEquals("{}", json); verify(history, times(1)).getCheckpointById(anyLong()); } /** * Tests a checkpoint details request for an in progress checkpoint. */ @Test public void testCheckpointDetailsRequestInProgressCheckpoint() throws Exception { PendingCheckpointStats checkpoint = mock(PendingCheckpointStats.class); when(checkpoint.getCheckpointId()).thenReturn(1992139L); when(checkpoint.getStatus()).thenReturn(CheckpointStatsStatus.IN_PROGRESS); when(checkpoint.getProperties()).thenReturn(CheckpointProperties.forStandardCheckpoint()); when(checkpoint.getTriggerTimestamp()).thenReturn(1919191900L); when(checkpoint.getLatestAckTimestamp()).thenReturn(1977791901L); when(checkpoint.getStateSize()).thenReturn(111939272822L); when(checkpoint.getEndToEndDuration()).thenReturn(121191L); when(checkpoint.getAlignmentBuffered()).thenReturn(1L); when(checkpoint.getNumberOfSubtasks()).thenReturn(501); when(checkpoint.getNumberOfAcknowledgedSubtasks()).thenReturn(101); List<TaskStateStats> taskStats = new ArrayList<>(); TaskStateStats task1 = createTaskStateStats(); TaskStateStats task2 = createTaskStateStats(); taskStats.add(task1); taskStats.add(task2); when(checkpoint.getAllTaskStateStats()).thenReturn(taskStats); JsonNode rootNode = triggerRequest(checkpoint); assertEquals(checkpoint.getCheckpointId(), rootNode.get("id").asLong()); assertEquals(checkpoint.getStatus().toString(), rootNode.get("status").asText()); assertEquals(checkpoint.getProperties().isSavepoint(), rootNode.get("is_savepoint").asBoolean()); assertEquals(checkpoint.getTriggerTimestamp(), rootNode.get("trigger_timestamp").asLong()); assertEquals(checkpoint.getLatestAckTimestamp(), rootNode.get("latest_ack_timestamp").asLong()); assertEquals(checkpoint.getStateSize(), rootNode.get("state_size").asLong()); assertEquals(checkpoint.getEndToEndDuration(), rootNode.get("end_to_end_duration").asLong()); assertEquals(checkpoint.getAlignmentBuffered(), rootNode.get("alignment_buffered").asLong()); assertEquals(checkpoint.getNumberOfSubtasks(), rootNode.get("num_subtasks").asInt()); assertEquals(checkpoint.getNumberOfAcknowledgedSubtasks(), rootNode.get("num_acknowledged_subtasks").asInt()); verifyTaskNodes(taskStats, rootNode); } /** * Tests a checkpoint details request for a completed checkpoint. */ @Test public void testCheckpointDetailsRequestCompletedCheckpoint() throws Exception { CompletedCheckpointStats checkpoint = createCompletedCheckpoint(); JsonNode rootNode = triggerRequest(checkpoint); compareCompletedCheckpoint(checkpoint, rootNode); verifyTaskNodes(checkpoint.getAllTaskStateStats(), rootNode); } /** * Tests a checkpoint details request for a failed checkpoint. */ @Test public void testCheckpointDetailsRequestFailedCheckpoint() throws Exception { FailedCheckpointStats checkpoint = createFailedCheckpoint(); JsonNode rootNode = triggerRequest(checkpoint); compareFailedCheckpoint(checkpoint, rootNode); verifyTaskNodes(checkpoint.getAllTaskStateStats(), rootNode); } // ------------------------------------------------------------------------ private static CompletedCheckpointStats createCompletedCheckpoint() { CompletedCheckpointStats checkpoint = mock(CompletedCheckpointStats.class); when(checkpoint.getCheckpointId()).thenReturn(1818213L); when(checkpoint.getStatus()).thenReturn(CheckpointStatsStatus.COMPLETED); when(checkpoint.getProperties()).thenReturn(CheckpointProperties.forStandardSavepoint()); when(checkpoint.getTriggerTimestamp()).thenReturn(1818L); when(checkpoint.getLatestAckTimestamp()).thenReturn(11029222L); when(checkpoint.getStateSize()).thenReturn(925281L); when(checkpoint.getEndToEndDuration()).thenReturn(181819L); when(checkpoint.getAlignmentBuffered()).thenReturn(1010198L); when(checkpoint.getNumberOfSubtasks()).thenReturn(181271); when(checkpoint.getNumberOfAcknowledgedSubtasks()).thenReturn(29821); when(checkpoint.isDiscarded()).thenReturn(true); when(checkpoint.getExternalPath()).thenReturn("checkpoint-external-path"); List<TaskStateStats> taskStats = new ArrayList<>(); TaskStateStats task1 = createTaskStateStats(); TaskStateStats task2 = createTaskStateStats(); taskStats.add(task1); taskStats.add(task2); when(checkpoint.getAllTaskStateStats()).thenReturn(taskStats); return checkpoint; } private static void compareCompletedCheckpoint(CompletedCheckpointStats checkpoint, JsonNode rootNode) { assertEquals(checkpoint.getCheckpointId(), rootNode.get("id").asLong()); assertEquals(checkpoint.getStatus().toString(), rootNode.get("status").asText()); assertEquals(checkpoint.getProperties().isSavepoint(), rootNode.get("is_savepoint").asBoolean()); assertEquals(checkpoint.getTriggerTimestamp(), rootNode.get("trigger_timestamp").asLong()); assertEquals(checkpoint.getLatestAckTimestamp(), rootNode.get("latest_ack_timestamp").asLong()); assertEquals(checkpoint.getStateSize(), rootNode.get("state_size").asLong()); assertEquals(checkpoint.getEndToEndDuration(), rootNode.get("end_to_end_duration").asLong()); assertEquals(checkpoint.getAlignmentBuffered(), rootNode.get("alignment_buffered").asLong()); assertEquals(checkpoint.isDiscarded(), rootNode.get("discarded").asBoolean()); assertEquals(checkpoint.getExternalPath(), rootNode.get("external_path").asText()); assertEquals(checkpoint.getNumberOfSubtasks(), rootNode.get("num_subtasks").asInt()); assertEquals(checkpoint.getNumberOfAcknowledgedSubtasks(), rootNode.get("num_acknowledged_subtasks").asInt()); } private static FailedCheckpointStats createFailedCheckpoint() { FailedCheckpointStats checkpoint = mock(FailedCheckpointStats.class); when(checkpoint.getCheckpointId()).thenReturn(1818214L); when(checkpoint.getStatus()).thenReturn(CheckpointStatsStatus.FAILED); when(checkpoint.getProperties()).thenReturn(CheckpointProperties.forStandardSavepoint()); when(checkpoint.getTriggerTimestamp()).thenReturn(1818L); when(checkpoint.getLatestAckTimestamp()).thenReturn(11029222L); when(checkpoint.getStateSize()).thenReturn(925281L); when(checkpoint.getEndToEndDuration()).thenReturn(181819L); when(checkpoint.getAlignmentBuffered()).thenReturn(1010198L); when(checkpoint.getNumberOfSubtasks()).thenReturn(181271); when(checkpoint.getNumberOfAcknowledgedSubtasks()).thenReturn(29821); when(checkpoint.getFailureTimestamp()).thenReturn(123012890312093L); when(checkpoint.getFailureMessage()).thenReturn("failure-message"); List<TaskStateStats> taskStats = new ArrayList<>(); TaskStateStats task1 = createTaskStateStats(); TaskStateStats task2 = createTaskStateStats(); taskStats.add(task1); taskStats.add(task2); when(checkpoint.getAllTaskStateStats()).thenReturn(taskStats); return checkpoint; } private static void compareFailedCheckpoint(FailedCheckpointStats checkpoint, JsonNode rootNode) { assertEquals(checkpoint.getCheckpointId(), rootNode.get("id").asLong()); assertEquals(checkpoint.getStatus().toString(), rootNode.get("status").asText()); assertEquals(checkpoint.getProperties().isSavepoint(), rootNode.get("is_savepoint").asBoolean()); assertEquals(checkpoint.getTriggerTimestamp(), rootNode.get("trigger_timestamp").asLong()); assertEquals(checkpoint.getLatestAckTimestamp(), rootNode.get("latest_ack_timestamp").asLong()); assertEquals(checkpoint.getStateSize(), rootNode.get("state_size").asLong()); assertEquals(checkpoint.getEndToEndDuration(), rootNode.get("end_to_end_duration").asLong()); assertEquals(checkpoint.getAlignmentBuffered(), rootNode.get("alignment_buffered").asLong()); assertEquals(checkpoint.getFailureTimestamp(), rootNode.get("failure_timestamp").asLong()); assertEquals(checkpoint.getFailureMessage(), rootNode.get("failure_message").asText()); assertEquals(checkpoint.getNumberOfSubtasks(), rootNode.get("num_subtasks").asInt()); assertEquals(checkpoint.getNumberOfAcknowledgedSubtasks(), rootNode.get("num_acknowledged_subtasks").asInt()); } private static JsonNode triggerRequest(AbstractCheckpointStats checkpoint) throws Exception { CheckpointStatsHistory history = mock(CheckpointStatsHistory.class); when(history.getCheckpointById(anyLong())).thenReturn(checkpoint); CheckpointStatsSnapshot snapshot = mock(CheckpointStatsSnapshot.class); when(snapshot.getHistory()).thenReturn(history); AccessExecutionGraph graph = mock(AccessExecutionGraph.class); when(graph.getCheckpointStatsSnapshot()).thenReturn(snapshot); CheckpointStatsDetailsHandler handler = new CheckpointStatsDetailsHandler(mock(ExecutionGraphHolder.class), new CheckpointStatsCache(0)); Map<String, String> params = new HashMap<>(); params.put("checkpointid", "123"); String json = handler.handleRequest(graph, params); ObjectMapper mapper = new ObjectMapper(); return mapper.readTree(json); } private static void verifyTaskNodes(Collection<TaskStateStats> tasks, JsonNode parentNode) { for (TaskStateStats task : tasks) { long duration = ThreadLocalRandom.current().nextInt(128); JsonNode taskNode = parentNode.get("tasks").get(task.getJobVertexId().toString()); assertEquals(task.getLatestAckTimestamp(), taskNode.get("latest_ack_timestamp").asLong()); assertEquals(task.getStateSize(), taskNode.get("state_size").asLong()); assertEquals(task.getEndToEndDuration(task.getLatestAckTimestamp() - duration), taskNode.get("end_to_end_duration").asLong()); assertEquals(task.getAlignmentBuffered(), taskNode.get("alignment_buffered").asLong()); assertEquals(task.getNumberOfSubtasks(), taskNode.get("num_subtasks").asInt()); assertEquals(task.getNumberOfAcknowledgedSubtasks(), taskNode.get("num_acknowledged_subtasks").asInt()); } } private static TaskStateStats createTaskStateStats() { ThreadLocalRandom rand = ThreadLocalRandom.current(); TaskStateStats task = mock(TaskStateStats.class); when(task.getJobVertexId()).thenReturn(new JobVertexID()); when(task.getLatestAckTimestamp()).thenReturn(rand.nextLong(1024) + 1); when(task.getStateSize()).thenReturn(rand.nextLong(1024) + 1); when(task.getEndToEndDuration(anyLong())).thenReturn(rand.nextLong(1024) + 1); when(task.getAlignmentBuffered()).thenReturn(rand.nextLong(1024) + 1); when(task.getNumberOfSubtasks()).thenReturn(rand.nextInt(1024) + 1); when(task.getNumberOfAcknowledgedSubtasks()).thenReturn(rand.nextInt(1024) + 1); return task; } }
package com.shapesecurity.functional.data; import com.shapesecurity.functional.F; import com.shapesecurity.functional.F2; import com.shapesecurity.functional.Pair; import com.shapesecurity.functional.Unit; import javax.annotation.CheckReturnValue; import javax.annotation.Nonnull; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.Spliterator; import java.util.Spliterators; import java.util.stream.Stream; import java.util.stream.StreamSupport; import java.util.ArrayList; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collector; @CheckReturnValue public class ImmutableSet<T> implements Iterable<T> { @Nonnull private final HashTable<T, Unit> data; public int length() { return this.data.length; } @Nonnull public Hasher<T> hasher() { return this.data.hasher; } ImmutableSet(@Nonnull HashTable<T, Unit> data) { this.data = data; } @Nonnull public static <T> ImmutableSet<T> empty(@Nonnull Hasher<T> hasher) { return new ImmutableSet<>(HashTable.empty(hasher)); } @Nonnull public static <T> ImmutableSet<T> emptyUsingEquality() { return new ImmutableSet<>(HashTable.emptyUsingEquality()); } @Nonnull public static <T> ImmutableSet<T> emptyUsingIdentity() { return new ImmutableSet<>(HashTable.emptyUsingIdentity()); } @Nonnull public static <T> ImmutableSet<T> from(@Nonnull Hasher<T> hasher, @Nonnull Iterable<T> set) { return empty(hasher).putAll(set); } @Nonnull public static <T> ImmutableSet<T> fromUsingEquality(@Nonnull Iterable<T> set) { return ImmutableSet.<T>emptyUsingEquality().putAll(set); } @Nonnull public static <T> ImmutableSet<T> fromUsingIdentity(@Nonnull Iterable<T> set) { return ImmutableSet.<T>emptyUsingIdentity().putAll(set); } @Nonnull @SafeVarargs public static <T> ImmutableSet<T> ofUsingIdentity(@Nonnull T... items) { return ImmutableSet.<T>emptyUsingIdentity().putArray(items); } @Nonnull @SafeVarargs public static <T> ImmutableSet<T> ofUsingEquality(@Nonnull T... items) { return ImmutableSet.<T>emptyUsingEquality().putArray(items); } @Nonnull @SafeVarargs public static <T> ImmutableSet<T> of(@Nonnull T... items) { return ofUsingEquality(items); } @Deprecated @Nonnull public static <T> ImmutableSet<T> empty() { return ImmutableSet.emptyUsingEquality(); } @Deprecated @Nonnull public static <T> ImmutableSet<T> emptyP() { return ImmutableSet.emptyUsingIdentity(); } @Nonnull public <B extends T> ImmutableSet<T> put(@Nonnull B datum) { return new ImmutableSet<>(this.data.put(datum, Unit.unit)); } @Nonnull public <B extends T> ImmutableSet<T> putAll(@Nonnull Iterable<B> list) { ImmutableSet<T> set = this; for (B item : list) { set = set.put(item); } return set; } // to prevent ABI breaking, this function must exist @Nonnull public <B extends T> ImmutableSet<T> putAll(@Nonnull ImmutableList<B> list) { return this.putAll((Iterable<B>) list); } @SafeVarargs @Nonnull public final <B extends T> ImmutableSet<T> putArray(@Nonnull B... list) { ImmutableSet<T> set = this; for (B b : list) { set = set.put(b); } return set; } public boolean contains(@Nonnull T datum) { return this.data.containsKey(datum); } @Nonnull @SuppressWarnings("unchecked") public <A> ImmutableSet<A> map(@Nonnull F<T, A> f) { return this.foldAbelian((val, acc) -> acc.put(f.apply(val)), ImmutableSet.empty((Hasher<A>) this.data.hasher)); } @Nonnull @SuppressWarnings("unchecked") public <A> ImmutableSet<A> flatMap(@Nonnull F<T, ImmutableSet<A>> f) { return this.foldAbelian((t, acc) -> { ImmutableSet<A> set = f.apply(t); if (!set.data.hasher.equals(acc.data.hasher)) { throw new UnsupportedOperationException("Hasher mismatch in flatMap."); } return acc.union(set); }, ImmutableSet.empty((Hasher<A>) this.data.hasher)); } @Nonnull public ImmutableSet<T> filter(@Nonnull F<T, Boolean> f) { return this.foldAbelian((val, acc) -> f.apply(val) ? acc.put(val) : acc, ImmutableSet.empty(this.data.hasher)); } public ImmutableSet<T> remove(@Nonnull T datum) { return new ImmutableSet<>(this.data.remove(datum)); } @Nonnull public <A> A foldAbelian(@Nonnull F2<T, A, A> f, @Nonnull A init) { return this.data.foldRight((p, acc) -> f.apply(p.left, acc), init); } @Nonnull public ImmutableSet<T> union(@Nonnull ImmutableSet<T> other) { return new ImmutableSet<>(this.data.merge(other.data)); } // Does not guarantee ordering of elements in resulting list. @Nonnull public ImmutableList<T> toList() { return this.foldAbelian((v, acc) -> acc.cons(v), ImmutableList.empty()); } @Nonnull public Set<T> toSet() { if (this.data.hasher != HashTable.equalityHasher()) { throw new UnsupportedOperationException("Cannot call ImmutableSet::toSet on a ImmutableSet without equality hashing."); } Set<T> set = new HashSet<>(); this.forEach(set::add); return set; } @SuppressWarnings("unchecked") @Override public boolean equals(Object other) { return other instanceof ImmutableSet && this.data.length == ((ImmutableSet) other).data.length && this.data.foldLeft((memo, pair) -> memo && ((ImmutableSet) other).data.containsKey(pair.left), true); } @Override @Nonnull public Iterator<T> iterator() { final Iterator<Pair<T, Unit>> mapIterator = this.data.iterator(); return new Iterator<T>() { @Override public boolean hasNext() { return mapIterator.hasNext(); } @Override public T next() { return mapIterator.next().left; } }; } @Nonnull public <V> HashTable<T, V> mapToTable(@Nonnull F<T, V> f) { HashTable<T, V> table = HashTable.empty(this.data.hasher); for (T entry : this) { table = table.put(entry, f.apply(entry)); } return table; } @Nonnull @Override public final Spliterator<T> spliterator() { return Spliterators.spliterator(this.iterator(), this.length(), Spliterator.IMMUTABLE | Spliterator.NONNULL); } @Nonnull public final Stream<T> stream() { return StreamSupport.stream(this.spliterator(), false); } @Nonnull public static <T> Collector<T, ?, ImmutableSet<T>> collector(@Nonnull Hasher<T> hasher) { // we use a list for state because java doesn't support our Hasher type return new Collector<T, ArrayList<T>, ImmutableSet<T>>() { @Override public Supplier<ArrayList<T>> supplier() { return ArrayList::new; } @Override public BiConsumer<ArrayList<T>, T> accumulator() { return ArrayList::add; } @Override public BinaryOperator<ArrayList<T>> combiner() { return (left, right) -> { left.addAll(right); return left; }; } @Override public Function<ArrayList<T>, ImmutableSet<T>> finisher() { return list -> { ImmutableSet<T> set = ImmutableSet.empty(hasher); for (T entry : list) { set = set.put(entry); } return set; }; } @Override public Set<Characteristics> characteristics() { Set<Characteristics> set = new HashSet<>(); set.add(Characteristics.UNORDERED); return set; } }; } @Nonnull public static <T> Collector<T, ?, ImmutableSet<T>> collector() { return ImmutableSet.collectorUsingEquality(); } @Nonnull public static <T> Collector<T, ?, ImmutableSet<T>> collectorUsingEquality() { return ImmutableSet.collector(HashTable.equalityHasher()); } @Nonnull public static <T> Collector<T, ?, ImmutableSet<T>> collectorUsingIdentity() { return ImmutableSet.collector(HashTable.identityHasher()); } }
package org.delft.naward07.Utils.ImageUtils; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import com.google.gson.Gson; /** * Store images' information. Used in HashMap. * This class is too complicated. Try the SimpleImagesInfo class. * * Created by Feng Wang on 14-7-22. */ @Deprecated public class ImagesInfo implements Comparable<ImagesInfo> { private final int HASH = 0; private final int PHASH = 1; private final int HEIGHT = 2; private final int WIDTH = 3; private final int TIME = 4; private final int URL = 5; private final int HOST = 6; private class ImageInfo { private int width; private int height; private String hash; private String pHash; private String url; private String host; ImageInfo(int width, int height, /*String hash,*/ String pHash, String url) { this.width = width; this.height = height; this.pHash = pHash; this.url = url; } ImageInfo(int width, int height, String hash, String pHash, String url) { this(width, height, pHash, url); this.hash = hash; } ImageInfo(int width, int height, String hash, String pHash, String url, String host) { this(width, height, hash, pHash, url); this.host = host; } public String getHost(){ return host; } public String getHash(){ return hash; } public String getpHash(){ return pHash; } @Override public String toString() { // StringBuilder result = new StringBuilder(); // String NEW_LINE = System.getProperty("line.separator"); // // result.append(" Width: " + width + NEW_LINE); // result.append(" Height: " + height + NEW_LINE); // result.append(" Hash: " + hash + NEW_LINE ); // result.append(" pHash: " + pHash + NEW_LINE); // //Note that Collections and Maps also override toString // result.append(" url: " + url + NEW_LINE); Gson gson = new Gson(); return gson.toJson(this); } } private int num = 1; private List<ImageInfo> imageList = new ArrayList<ImageInfo>(); //private String month; private String hash; private String pHash; public ImagesInfo(String hash) { //this.month = month; this.hash = hash; } public ImagesInfo(String hash, String pHash) { //this.month = month; this.hash = hash; this.pHash = pHash; } public void increment() { ++num; } public boolean incrementByHost(String host){ for (ImageInfo ii : imageList){ if (ii.getHost().equals(host)){ return false; } } return true; } public boolean increment(int item, String content){ String method; switch (item){ case HASH: method = "getHash"; break; case PHASH: method = "getpHash"; break; case HEIGHT: case WIDTH: case URL: method = ""; break; case HOST: method = "getHost"; break; default: method = ""; break; } if(method.equals("")){ increment(); return true; } String compare = ""; for (ImageInfo ii : imageList){ try{ // Class[] cc = this.getClass().getDeclaredClasses(); // System.out.println(cc[0].getMethod("getHost").getName()); // //System.out.println(this.getClass().getDeclaredClasses()[1].getClass().getMethod("getHost").getName()); // System.out.println(cc.length); // for(Class ccc : cc){ // System.out.println(ccc.getName()); // System.out.println(ccc.getMethod("getHost").getName()); // } Method m = ii.getClass().getMethod(method); compare = m.invoke(ii).toString(); } catch (NoSuchMethodException e){ e.printStackTrace(); } catch (InvocationTargetException e){ e.printStackTrace(); } catch (IllegalAccessException e){ e.printStackTrace(); } catch (Exception e){ e.printStackTrace(); } if (compare.equals(content)){ return false; } } return true; } public void setNum(int num) { this.num = num; } public int getNum() { return num; } public void updateImagelist(int width, int height, String pHash, String url) { imageList.add(new ImageInfo(width, height, pHash, url)); } public void updateImagelist(int width, int height, String hash, String pHash, String url) { imageList.add(new ImageInfo(width, height, hash, pHash, url)); } public void updateImagelist(int width, int height, String hash, String pHash, String url, String host) { imageList.add(new ImageInfo(width, height, hash, pHash, url, host)); } @Override public int compareTo(ImagesInfo iThat) { return -Integer.compare(this.getNum(), iThat.getNum()); } @Override public String toString() { Gson g = new Gson(); return g.toJson(this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.kubernetes.kubeclient; import org.apache.flink.client.deployment.ClusterSpecification; import org.apache.flink.configuration.BlobServerOptions; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.configuration.RestOptions; import org.apache.flink.core.testutils.FlinkMatchers; import org.apache.flink.kubernetes.KubernetesClientTestBase; import org.apache.flink.kubernetes.KubernetesTestUtils; import org.apache.flink.kubernetes.configuration.KubernetesConfigOptions; import org.apache.flink.kubernetes.configuration.KubernetesConfigOptionsInternal; import org.apache.flink.kubernetes.entrypoint.KubernetesSessionClusterEntrypoint; import org.apache.flink.kubernetes.kubeclient.decorators.ExternalServiceDecorator; import org.apache.flink.kubernetes.kubeclient.factory.KubernetesJobManagerFactory; import org.apache.flink.kubernetes.kubeclient.parameters.KubernetesJobManagerParameters; import org.apache.flink.kubernetes.kubeclient.resources.KubernetesConfigMap; import org.apache.flink.kubernetes.kubeclient.resources.KubernetesPod; import io.fabric8.kubernetes.api.model.ConfigMap; import io.fabric8.kubernetes.api.model.ConfigMapBuilder; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.OwnerReference; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.PodBuilder; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.apps.Deployment; import org.junit.Test; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import static org.apache.flink.kubernetes.utils.Constants.CONFIG_FILE_LOG4J_NAME; import static org.apache.flink.kubernetes.utils.Constants.CONFIG_FILE_LOGBACK_NAME; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests for Fabric implementation of {@link FlinkKubeClient}. */ public class Fabric8FlinkKubeClientTest extends KubernetesClientTestBase { private static final int RPC_PORT = 7123; private static final int BLOB_SERVER_PORT = 8346; private static final double JOB_MANAGER_CPU = 2.0; private static final int JOB_MANAGER_MEMORY = 768; private static final String SERVICE_ACCOUNT_NAME = "service-test"; private static final String TASKMANAGER_POD_NAME = "mock-task-manager-pod"; private static final String TESTING_CONFIG_MAP_NAME = "test-config-map"; private static final String TESTING_CONFIG_MAP_KEY = "test-config-map-key"; private static final String TESTING_CONFIG_MAP_VALUE = "test-config-map-value"; private static final String TESTING_CONFIG_MAP_NEW_VALUE = "test-config-map-new-value"; private static final Map<String, String> TESTING_LABELS = new HashMap<String, String>() { { put("label1", "value1"); put("label2", "value2"); } }; private static final String ENTRY_POINT_CLASS = KubernetesSessionClusterEntrypoint.class.getCanonicalName(); private KubernetesJobManagerSpecification kubernetesJobManagerSpecification; @Override protected void setupFlinkConfig() { super.setupFlinkConfig(); flinkConfig.set(KubernetesConfigOptions.CONTAINER_IMAGE_PULL_POLICY, CONTAINER_IMAGE_PULL_POLICY); flinkConfig.set(KubernetesConfigOptionsInternal.ENTRY_POINT_CLASS, ENTRY_POINT_CLASS); flinkConfig.set(RestOptions.PORT, REST_PORT); flinkConfig.set(JobManagerOptions.PORT, RPC_PORT); flinkConfig.set(BlobServerOptions.PORT, Integer.toString(BLOB_SERVER_PORT)); flinkConfig.set(KubernetesConfigOptions.JOB_MANAGER_CPU, JOB_MANAGER_CPU); flinkConfig.set(KubernetesConfigOptions.JOB_MANAGER_SERVICE_ACCOUNT, SERVICE_ACCOUNT_NAME); } @Override protected void onSetup() throws Exception { super.onSetup(); KubernetesTestUtils.createTemporyFile("some data", flinkConfDir, CONFIG_FILE_LOGBACK_NAME); KubernetesTestUtils.createTemporyFile("some data", flinkConfDir, CONFIG_FILE_LOG4J_NAME); final ClusterSpecification clusterSpecification = new ClusterSpecification.ClusterSpecificationBuilder() .setMasterMemoryMB(JOB_MANAGER_MEMORY) .setTaskManagerMemoryMB(1000) .setSlotsPerTaskManager(3) .createClusterSpecification(); final KubernetesJobManagerParameters kubernetesJobManagerParameters = new KubernetesJobManagerParameters(flinkConfig, clusterSpecification); this.kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification(kubernetesJobManagerParameters); } @Test public void testCreateFlinkMasterComponent() throws Exception { flinkKubeClient.createJobManagerComponent(this.kubernetesJobManagerSpecification); final List<Deployment> resultedDeployments = kubeClient.apps().deployments() .inNamespace(NAMESPACE) .list() .getItems(); assertEquals(1, resultedDeployments.size()); final List<ConfigMap> resultedConfigMaps = kubeClient.configMaps() .inNamespace(NAMESPACE) .list() .getItems(); assertEquals(1, resultedConfigMaps.size()); final List<Service> resultedServices = kubeClient.services() .inNamespace(NAMESPACE) .list() .getItems(); assertEquals(2, resultedServices.size()); testOwnerReferenceSetting(resultedDeployments.get(0), resultedConfigMaps); testOwnerReferenceSetting(resultedDeployments.get(0), resultedServices); } private <T extends HasMetadata> void testOwnerReferenceSetting( HasMetadata ownerReference, List<T> resources) { resources.forEach(resource -> { List<OwnerReference> ownerReferences = resource.getMetadata().getOwnerReferences(); assertEquals(1, ownerReferences.size()); assertEquals(ownerReference.getMetadata().getUid(), ownerReferences.get(0).getUid()); }); } @Test public void testCreateFlinkTaskManagerPod() throws Exception { this.flinkKubeClient.createJobManagerComponent(this.kubernetesJobManagerSpecification); final KubernetesPod kubernetesPod = new KubernetesPod(new PodBuilder() .editOrNewMetadata() .withName("mock-task-manager-pod") .endMetadata() .editOrNewSpec() .endSpec() .build()); this.flinkKubeClient.createTaskManagerPod(kubernetesPod).get(); final Pod resultTaskManagerPod = this.kubeClient.pods().inNamespace(NAMESPACE).withName(TASKMANAGER_POD_NAME).get(); assertEquals( this.kubeClient.apps().deployments().inNamespace(NAMESPACE).list().getItems().get(0).getMetadata().getUid(), resultTaskManagerPod.getMetadata().getOwnerReferences().get(0).getUid()); } @Test public void testStopPod() throws ExecutionException, InterruptedException { final String podName = "pod-for-delete"; final Pod pod = new PodBuilder() .editOrNewMetadata() .withName(podName) .endMetadata() .editOrNewSpec() .endSpec() .build(); this.kubeClient.pods().inNamespace(NAMESPACE).create(pod); assertNotNull(this.kubeClient.pods().inNamespace(NAMESPACE).withName(podName).get()); this.flinkKubeClient.stopPod(podName).get(); assertNull(this.kubeClient.pods().inNamespace(NAMESPACE).withName(podName).get()); } @Test public void testServiceLoadBalancerWithNoIP() { final String hostName = "test-host-name"; mockExpectedServiceFromServerSide(buildExternalServiceWithLoadBalancer(hostName, "")); final Optional<Endpoint> resultEndpoint = flinkKubeClient.getRestEndpoint(CLUSTER_ID); assertThat(resultEndpoint.isPresent(), is(true)); assertThat(resultEndpoint.get().getAddress(), is(hostName)); assertThat(resultEndpoint.get().getPort(), is(REST_PORT)); } @Test public void testServiceLoadBalancerEmptyHostAndIP() { mockExpectedServiceFromServerSide(buildExternalServiceWithLoadBalancer("", "")); final Optional<Endpoint> resultEndpoint = flinkKubeClient.getRestEndpoint(CLUSTER_ID); assertThat(resultEndpoint.isPresent(), is(false)); } @Test public void testServiceLoadBalancerNullHostAndIP() { mockExpectedServiceFromServerSide(buildExternalServiceWithLoadBalancer(null, null)); final Optional<Endpoint> resultEndpoint = flinkKubeClient.getRestEndpoint(CLUSTER_ID); assertThat(resultEndpoint.isPresent(), is(false)); } @Test public void testNodePortService() { mockExpectedServiceFromServerSide(buildExternalServiceWithNodePort()); final Optional<Endpoint> resultEndpoint = flinkKubeClient.getRestEndpoint(CLUSTER_ID); assertThat(resultEndpoint.isPresent(), is(true)); assertThat(resultEndpoint.get().getPort(), is(NODE_PORT)); } @Test public void testClusterIPService() { mockExpectedServiceFromServerSide(buildExternalServiceWithClusterIP()); final Optional<Endpoint> resultEndpoint = flinkKubeClient.getRestEndpoint(CLUSTER_ID); assertThat(resultEndpoint.isPresent(), is(true)); assertThat( resultEndpoint.get().getAddress(), is(ExternalServiceDecorator.getNamespacedExternalServiceName(CLUSTER_ID, NAMESPACE))); assertThat(resultEndpoint.get().getPort(), is(REST_PORT)); } @Test public void testStopAndCleanupCluster() throws Exception { this.flinkKubeClient.createJobManagerComponent(this.kubernetesJobManagerSpecification); final KubernetesPod kubernetesPod = new KubernetesPod(new PodBuilder() .editOrNewMetadata() .withName(TASKMANAGER_POD_NAME) .endMetadata() .editOrNewSpec() .endSpec() .build()); this.flinkKubeClient.createTaskManagerPod(kubernetesPod).get(); assertEquals(1, this.kubeClient.apps().deployments().inNamespace(NAMESPACE).list().getItems().size()); assertEquals(1, this.kubeClient.configMaps().inNamespace(NAMESPACE).list().getItems().size()); assertEquals(2, this.kubeClient.services().inNamespace(NAMESPACE).list().getItems().size()); assertEquals(1, this.kubeClient.pods().inNamespace(NAMESPACE).list().getItems().size()); this.flinkKubeClient.stopAndCleanupCluster(CLUSTER_ID); assertTrue(this.kubeClient.apps().deployments().inNamespace(NAMESPACE).list().getItems().isEmpty()); } @Test public void testCreateConfigMap() throws Exception { final KubernetesConfigMap configMap = buildTestingConfigMap(); this.flinkKubeClient.createConfigMap(configMap).get(); final Optional<KubernetesConfigMap> currentOpt = this.flinkKubeClient.getConfigMap(TESTING_CONFIG_MAP_NAME); assertThat(currentOpt.isPresent(), is(true)); assertThat(currentOpt.get().getData().get(TESTING_CONFIG_MAP_KEY), is(TESTING_CONFIG_MAP_VALUE)); } @Test public void testCreateConfigMapAlreadyExisting() throws Exception { final KubernetesConfigMap configMap = buildTestingConfigMap(); this.flinkKubeClient.createConfigMap(configMap).get(); mockCreateConfigMapAlreadyExisting(configMap.getInternalResource()); configMap.getData().put(TESTING_CONFIG_MAP_KEY, TESTING_CONFIG_MAP_NEW_VALUE); try { this.flinkKubeClient.createConfigMap(configMap).get(); fail("Overwrite an already existing config map should fail with an exception."); } catch (Exception ex) { final String errorMsg = "Failed to create ConfigMap " + TESTING_CONFIG_MAP_NAME; assertThat(ex, FlinkMatchers.containsMessage(errorMsg)); } // Create failed we should still get the old value final Optional<KubernetesConfigMap> currentOpt = this.flinkKubeClient.getConfigMap(TESTING_CONFIG_MAP_NAME); assertThat(currentOpt.isPresent(), is(true)); assertThat(currentOpt.get().getData().get(TESTING_CONFIG_MAP_KEY), is(TESTING_CONFIG_MAP_VALUE)); } @Test public void testDeleteConfigMapByLabels() throws Exception { this.flinkKubeClient.createConfigMap(buildTestingConfigMap()).get(); assertThat(this.flinkKubeClient.getConfigMap(TESTING_CONFIG_MAP_NAME).isPresent(), is(true)); this.flinkKubeClient.deleteConfigMapsByLabels(TESTING_LABELS).get(); assertThat(this.flinkKubeClient.getConfigMap(TESTING_CONFIG_MAP_NAME).isPresent(), is(false)); } @Test public void testDeleteConfigMapByName() throws Exception { this.flinkKubeClient.createConfigMap(buildTestingConfigMap()).get(); assertThat(this.flinkKubeClient.getConfigMap(TESTING_CONFIG_MAP_NAME).isPresent(), is(true)); this.flinkKubeClient.deleteConfigMap(TESTING_CONFIG_MAP_NAME).get(); assertThat(this.flinkKubeClient.getConfigMap(TESTING_CONFIG_MAP_NAME).isPresent(), is(false)); } @Test public void testCheckAndUpdateConfigMap() throws Exception { this.flinkKubeClient.createConfigMap(buildTestingConfigMap()); // Checker pass final boolean updated = this.flinkKubeClient.checkAndUpdateConfigMap( TESTING_CONFIG_MAP_NAME, c -> { c.getData().put(TESTING_CONFIG_MAP_KEY, TESTING_CONFIG_MAP_NEW_VALUE); return Optional.of(c); }).get(); assertThat(updated, is(true)); final Optional<KubernetesConfigMap> configMapOpt = this.flinkKubeClient.getConfigMap(TESTING_CONFIG_MAP_NAME); assertThat(configMapOpt.isPresent(), is(true)); assertThat(configMapOpt.get().getData().get(TESTING_CONFIG_MAP_KEY), is(TESTING_CONFIG_MAP_NEW_VALUE)); } @Test public void testCheckAndUpdateConfigMapWithEmptyResult() throws Exception { this.flinkKubeClient.createConfigMap(buildTestingConfigMap()); // Checker not pass and return empty result final boolean updated = this.flinkKubeClient.checkAndUpdateConfigMap( TESTING_CONFIG_MAP_NAME, c -> Optional.empty()).get(); assertThat(updated, is(false)); final Optional<KubernetesConfigMap> configMapOpt = this.flinkKubeClient.getConfigMap(TESTING_CONFIG_MAP_NAME); assertThat(configMapOpt.isPresent(), is(true)); assertThat(configMapOpt.get().getData().get(TESTING_CONFIG_MAP_KEY), is(TESTING_CONFIG_MAP_VALUE)); } @Test public void testCheckAndUpdateConfigMapWhenConfigMapNotExist() { try { final CompletableFuture<Boolean> future = this.flinkKubeClient.checkAndUpdateConfigMap( TESTING_CONFIG_MAP_NAME, c -> Optional.empty()); future.get(); fail("CheckAndUpdateConfigMap should fail with an exception when the ConfigMap does not exist."); } catch (Exception ex) { final String errMsg = "Cannot retry checkAndUpdateConfigMap with configMap " + TESTING_CONFIG_MAP_NAME + " because it does not exist."; assertThat(ex, FlinkMatchers.containsMessage(errMsg)); // Should not retry when ConfigMap does not exist. assertThat(ex, FlinkMatchers.containsMessage( "Stopped retrying the operation because the error is not retryable.")); } } @Test public void testCheckAndUpdateConfigMapWhenReplaceConfigMapFailed() throws Exception { final int configuredRetries = flinkConfig.getInteger(KubernetesConfigOptions.KUBERNETES_TRANSACTIONAL_OPERATION_MAX_RETRIES); final KubernetesConfigMap configMap = buildTestingConfigMap(); this.flinkKubeClient.createConfigMap(configMap).get(); mockReplaceConfigMapFailed(configMap.getInternalResource()); final AtomicInteger retries = new AtomicInteger(0); try { this.flinkKubeClient.checkAndUpdateConfigMap(TESTING_CONFIG_MAP_NAME, c -> { retries.incrementAndGet(); return Optional.of(configMap); }).get(); fail("CheckAndUpdateConfigMap should fail with exception when number of retries has been exhausted."); } catch (Exception ex) { assertThat(ex, FlinkMatchers.containsMessage("Could not complete the " + "operation. Number of retries has been exhausted.")); assertThat(retries.get(), is(configuredRetries + 1)); } } private KubernetesConfigMap buildTestingConfigMap() { final Map<String, String> data = new HashMap<>(); data.put(TESTING_CONFIG_MAP_KEY, TESTING_CONFIG_MAP_VALUE); return new KubernetesConfigMap(new ConfigMapBuilder() .withNewMetadata() .withName(TESTING_CONFIG_MAP_NAME) .withLabels(TESTING_LABELS) .endMetadata() .withData(data).build()); } }
/* * Copyright (c) 2010 - 2011, Jan Stender, Bjoern Kolbeck, Mikael Hoegqvist, * Felix Hupfeld, Felix Langner, Zuse Institute Berlin * * Licensed under the BSD License, see LICENSE file for details. * */ package org.xtreemfs.babudb.replication.control; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.util.LinkedList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import org.xtreemfs.babudb.config.ReplicationConfig; import org.xtreemfs.babudb.log.SyncListener; import org.xtreemfs.babudb.lsmdb.LSN; import org.xtreemfs.babudb.replication.LockableService; import org.xtreemfs.babudb.replication.TopLayer; import org.xtreemfs.babudb.replication.service.ServiceToControlInterface; import org.xtreemfs.foundation.LifeCycleListener; import org.xtreemfs.foundation.LifeCycleThread; import org.xtreemfs.foundation.buffer.ASCIIString; import org.xtreemfs.foundation.flease.Flease; import org.xtreemfs.foundation.flease.FleaseStage; import org.xtreemfs.foundation.flease.FleaseViewChangeListenerInterface; import org.xtreemfs.foundation.flease.comm.FleaseMessage; import org.xtreemfs.foundation.logging.Logging; /** * Contains the control logic for steering the replication process. * * @author flangner * @since 02/24/2010 */ public class ControlLayer extends TopLayer { /** designation of the flease-cell */ private final static ASCIIString REPLICATION_CELL = new ASCIIString("replication"); /** always access {@link Flease} from here */ private final FleaseStage fleaseStage; private final List<InetSocketAddress> fleaseParticipants; /** * component to ensure {@link Flease}'s requirement of loosely synchronized * clocks */ private final TimeDriftDetector timeDriftDetector; /** interface to the underlying layer */ private final ServiceToControlInterface serviceInterface; /** the local address used for the net-communication */ private final InetSocketAddress thisAddress; /** listener and storage for the up-to-date lease informations */ private final FleaseHolder leaseHolder; /** thread to execute failover requests */ private final FailoverTaskRunner failoverTaskRunner; /** services that have to be locked during failover */ private LockableService replicationInterface; private final AtomicBoolean initialFailoverObserved = new AtomicBoolean(false); /** Used by ProxyRequestHandler to find out if it should reject requests in the first place since a failover is in progress. */ private volatile boolean failoverInProgress = true; public ControlLayer(ServiceToControlInterface serviceLayer, ReplicationConfig config) throws IOException { // ---------------------------------- // initialize the time drift detector // ---------------------------------- timeDriftDetector = new TimeDriftDetector( this, serviceLayer.getParticipantOverview().getConditionClients(), config.getFleaseConfig().getDMax()); // ---------------------------------- // initialize the replication // controller // ---------------------------------- thisAddress = config.getInetSocketAddress(); failoverTaskRunner = new FailoverTaskRunner(); serviceInterface = serviceLayer; leaseHolder = new FleaseHolder(REPLICATION_CELL, this); // ---------------------------------- // initialize Flease // ---------------------------------- File bDir = new File(config.getBabuDBConfig().getBaseDir()); if (!bDir.exists()) bDir.mkdirs(); fleaseParticipants = new LinkedList<InetSocketAddress>(config.getParticipants()); fleaseStage = new FleaseStage(config.getFleaseConfig(), config.getBabuDBConfig().getBaseDir(), new FleaseMessageSender(serviceLayer.getParticipantOverview(), config.getInetSocketAddress()), false, new FleaseViewChangeListenerInterface() { @Override public void viewIdChangeEvent(ASCIIString cellId, int viewId) { throw new UnsupportedOperationException("Not supported yet."); } }, leaseHolder, null); } /* * overridden methods */ /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.control.ControlLayerInterface#lockReplication() */ @Override public void lockReplication() throws InterruptedException { replicationInterface.lock(); } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.TopLayer#unlockReplication() */ @Override public void unlockReplication() { replicationInterface.unlock(); } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.control.ControlLayerInterface#getLeaseHolder() */ @Override public InetSocketAddress getLeaseHolder(int timeout) throws InterruptedException { return leaseHolder.getLeaseHolderAddress(timeout); } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.control.ControlLayerInterface#getThisAddress() */ @Override public InetSocketAddress getThisAddress() { return thisAddress; } /* * (non-Javadoc) * @see java.lang.Thread#start() */ @Override public void start() { timeDriftDetector.start(); failoverTaskRunner.start(); fleaseStage.start(); try { fleaseStage.waitForStartup(); } catch (Throwable e) { listener.crashPerformed(e); } joinFlease(); } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.control.ControlLayerInterface#waitForInitialFailover() */ @Override public void waitForInitialFailover() throws InterruptedException { synchronized (initialFailoverObserved) { if (!initialFailoverObserved.get()) { initialFailoverObserved.wait(); if (!initialFailoverObserved.get()) { throw new InterruptedException("Waiting for the initial failover was interrupted."); } } } } /* * (non-Javadoc) * @see org.xtreemfs.babudb.replication.Layer#shutdown() */ @Override public void shutdown() { exitFlease(); timeDriftDetector.shutdown(); fleaseStage.shutdown(); try { fleaseStage.waitForShutdown(); } catch (Throwable e) { listener.crashPerformed(e); } failoverTaskRunner.shutdown(); synchronized (initialFailoverObserved) { initialFailoverObserved.notify(); } } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.Layer#_setLifeCycleListener(org.xtreemfs.foundation.LifeCycleListener) */ @Override public void _setLifeCycleListener(LifeCycleListener listener) { failoverTaskRunner.setLifeCycleListener(listener); timeDriftDetector.setLifeCycleListener(listener); fleaseStage.setLifeCycleListener(listener); } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.Layer#asyncShutdown() */ @Override public void asyncShutdown() { exitFlease(); failoverTaskRunner.shutdown(); timeDriftDetector.shutdown(); fleaseStage.shutdown(); synchronized (initialFailoverObserved) { initialFailoverObserved.notify(); } } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.control.TimeDriftDetector.TimeDriftListener#driftDetected(java.lang.String) */ @Override public void driftDetected(String driftedParticipants) { Logging.logMessage(Logging.LEVEL_WARN, Logging.Category.replication, this, "Clock skew between replicas is too high. This can result in inconsistent replicas. Please make sure that the local clocks are regularly synchronized e.g., by running a NTP daemon on each machine. Details: %s", driftedParticipants); } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.FleaseMessageReceiver#receive( * org.xtreemfs.foundation.flease.comm.FleaseMessage) */ @Override public void receive(FleaseMessage message) { fleaseStage.receiveMessage(message); } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.TopLayer#registerReplicationInterface( * org.xtreemfs.babudb.replication.LockableService) */ @Override public void registerReplicationControl(LockableService service) { replicationInterface = service; } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.control.FleaseEventListener#updateLeaseHolder( * java.net.InetSocketAddress) */ @Override public void updateLeaseHolder(InetSocketAddress newLeaseHolder) { failoverInProgress = true; failoverTaskRunner.queueFailoverRequest(newLeaseHolder); } /* (non-Javadoc) * @see org.xtreemfs.babudb.replication.control.ControlLayerInterface#isFailoverInProgress() */ @Override public boolean isFailoverInProgress() { return failoverInProgress; } /* * private methods */ /** * Method to participate at {@link Flease}. */ private void joinFlease() { fleaseStage.openCell(REPLICATION_CELL, fleaseParticipants, false); } /** * Method to exclude this BabuDB instance from {@link Flease}. */ private void exitFlease() { fleaseStage.closeCell(REPLICATION_CELL, true); } /** * This thread enables the replication to handle failover requests asynchronously to incoming * Flease messages. It also ensures, that there will be only one failover at a time. * * @author flangner * @since 02/21/2011 */ private final class FailoverTaskRunner extends LifeCycleThread { private final AtomicReference<InetSocketAddress> failoverRequest = new AtomicReference<InetSocketAddress>(null); private boolean quit = true; private InetSocketAddress currentLeaseHolder = null; private FailoverTaskRunner() { super("FailOverT@" + thisAddress.getPort()); } /** * Enqueue a new failover request. * * @param address - of the new replication master candidate. */ void queueFailoverRequest(InetSocketAddress address) { synchronized (failoverRequest) { if (failoverRequest.compareAndSet(null, address)) { Logging.logMessage(Logging.LEVEL_INFO, this, "Server %s is initiating failover with new master candidate %s.", thisAddress.toString(), address.toString()); failoverRequest.notify(); } } } /* (non-Javadoc) * @see java.lang.Thread#start() */ @Override public synchronized void start() { super.start(); } /* (non-Javadoc) * @see org.xtreemfs.foundation.LifeCycleThread#shutdown() */ @Override public void shutdown() { quit = true; interrupt(); } /* (non-Javadoc) * @see java.lang.Thread#run() */ @Override public void run() { quit = false; notifyStarted(); InetSocketAddress newLeaseHolder = null; try { while (!quit) { synchronized (failoverRequest) { if (failoverRequest.get() == null) { failoverRequest.wait(); } newLeaseHolder = failoverRequest.getAndSet(null); } try { // only do a failover if one is required if (!newLeaseHolder.equals(currentLeaseHolder)) { // switch master -> slave if (thisAddress.equals(currentLeaseHolder)) { becomeSlave(newLeaseHolder); // switch slave -> master } else if (thisAddress.equals(newLeaseHolder)) { becomeMaster(); // switch slave -> slave (stay slave) } else { /* we got nothing to do */ } } // update current leaseHolder if (currentLeaseHolder == null) { synchronized (initialFailoverObserved) { initialFailoverObserved.set(true); initialFailoverObserved.notify(); } } currentLeaseHolder = newLeaseHolder; failoverInProgress = false; } catch (Exception e) { if (!quit) { Logging.logMessage(Logging.LEVEL_WARN, this, "Processing a failover " + "did not succeed, because: ", e.getMessage()); Logging.logError(Logging.LEVEL_WARN, this, e); leaseHolder.reset(); } } } } catch (InterruptedException e) { if (!quit) { notifyCrashed(e); } } notifyStopped(); } /** * This server has to become the new master. * * @throws Exception */ private void becomeMaster() throws Exception { Logging.logMessage(Logging.LEVEL_DEBUG, this, "Becoming the replication master."); final AtomicBoolean finished = new AtomicBoolean(false); final AtomicReference<Exception> exception = new AtomicReference<Exception>(null); // synchronize with other servers serviceInterface.synchronize(new SyncListener() { @Override public void synced(LSN lsn) { Logging.logMessage(Logging.LEVEL_DEBUG, this, "Master failover succeeded @LSN(%s).", lsn.toString()); serviceInterface.reset(); synchronized (finished) { if (finished.compareAndSet(false, true)) { finished.notify(); } else { assert(false); } } } @Override public void failed(Exception ex) { Logging.logMessage(Logging.LEVEL_WARN, this, "Master failover did not succeed! Reseting the local lease and " + "waiting for a new impulse from FLease. Reason: %s", ex.getMessage()); Logging.logError(Logging.LEVEL_DEBUG, this, ex); synchronized (finished) { if (finished.compareAndSet(false, true)) { exception.set(ex); finished.notify(); } else { assert(false); } } } }, thisAddress.getPort()); synchronized (finished) { while (!finished.get()) { finished.wait(); } if (exception.get() != null) { throw exception.get(); } } // if a new failover request arrives while synchronization is still waiting for a stable // state to be established (SyncListener), the listener will be marked as failed when // the new master address is set Logging.logMessage(Logging.LEVEL_INFO, this, "Failover succeeded! %s has become the new master.", thisAddress.toString()); } /** * Another server has become the master and this one has to obey. * * @param masterAddress * @throws InterruptedException */ private void becomeSlave(InetSocketAddress masterAddress) throws InterruptedException { serviceInterface.reset(); Logging.logMessage(Logging.LEVEL_INFO, this, "Becoming a slave for %s.", masterAddress.toString()); // user requests may only be permitted on slaves that have been synchronized with the // master, which is only possible after the master they obey internally has been changed // by this method } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext.Lifetime; import java.io.IOException; import java.util.List; import java.util.Map; public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> { public static final class MultiBucketAggregatorWrapper extends Aggregator { private final BigArrays bigArrays; private final Aggregator parent; private final AggregatorFactory<?> factory; private final Aggregator first; ObjectArray<Aggregator> aggregators; ObjectArray<LeafBucketCollector> collectors; MultiBucketAggregatorWrapper(BigArrays bigArrays, SearchContext context, Aggregator parent, AggregatorFactory<?> factory, Aggregator first) { this.bigArrays = bigArrays; this.parent = parent; this.factory = factory; this.first = first; context.addReleasable(this, Lifetime.PHASE); aggregators = bigArrays.newObjectArray(1); aggregators.set(0, first); collectors = bigArrays.newObjectArray(1); } public Class<?> getWrappedClass() { return first.getClass(); } @Override public String name() { return first.name(); } @Override public SearchContext context() { return first.context(); } @Override public Aggregator parent() { return first.parent(); } @Override public ScoreMode scoreMode() { return first.scoreMode(); } @Override public Aggregator subAggregator(String name) { throw new UnsupportedOperationException(); } @Override public void preCollection() throws IOException { for (long i = 0; i < aggregators.size(); ++i) { final Aggregator aggregator = aggregators.get(i); if (aggregator != null) { aggregator.preCollection(); } } } @Override public void postCollection() throws IOException { for (long i = 0; i < aggregators.size(); ++i) { final Aggregator aggregator = aggregators.get(i); if (aggregator != null) { aggregator.postCollection(); } } } @Override public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx) { for (long i = 0; i < collectors.size(); ++i) { collectors.set(i, null); } return new LeafBucketCollector() { Scorable scorer; @Override public void setScorer(Scorable scorer) throws IOException { this.scorer = scorer; } @Override public void collect(int doc, long bucket) throws IOException { collectors = bigArrays.grow(collectors, bucket + 1); LeafBucketCollector collector = collectors.get(bucket); if (collector == null) { aggregators = bigArrays.grow(aggregators, bucket + 1); Aggregator aggregator = aggregators.get(bucket); if (aggregator == null) { aggregator = factory.create(parent, true); aggregator.preCollection(); aggregators.set(bucket, aggregator); } collector = aggregator.getLeafCollector(ctx); if (scorer != null) { // Passing a null scorer can cause unexpected NPE at a later time, // which can't not be directly linked to the fact that a null scorer has been supplied. collector.setScorer(scorer); } collectors.set(bucket, collector); } collector.collect(doc, 0); } }; } @Override public InternalAggregation buildAggregation(long bucket) throws IOException { if (bucket < aggregators.size()) { Aggregator aggregator = aggregators.get(bucket); if (aggregator != null) { return aggregator.buildAggregation(0); } } return buildEmptyAggregation(); } @Override public InternalAggregation buildEmptyAggregation() { return first.buildEmptyAggregation(); } @Override public void close() { Releasables.close(aggregators, collectors); } } protected final String name; protected final AggregatorFactory<?> parent; protected final AggregatorFactories factories; protected final Map<String, Object> metaData; protected final SearchContext context; /** * Constructs a new aggregator factory. * * @param name * The aggregation name * @throws IOException * if an error occurs creating the factory */ public AggregatorFactory(String name, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException { this.name = name; this.context = context; this.parent = parent; this.factories = subFactoriesBuilder.build(context, this); this.metaData = metaData; } public String name() { return name; } public void doValidate() { } protected abstract Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException; /** * Creates the aggregator * * @param parent * The parent aggregator (if this is a top level factory, the * parent will be {@code null}) * @param collectsFromSingleBucket * If true then the created aggregator will only be collected * with {@code 0} as a bucket ordinal. Some factories can take * advantage of this in order to return more optimized * implementations. * * @return The created aggregator */ public final Aggregator create(Aggregator parent, boolean collectsFromSingleBucket) throws IOException { return createInternal(parent, collectsFromSingleBucket, this.factories.createPipelineAggregators(), this.metaData); } public AggregatorFactory<?> getParent() { return parent; } /** * Utility method. Given an {@link AggregatorFactory} that creates * {@link Aggregator}s that only know how to collect bucket {@code 0}, this * returns an aggregator that can collect any bucket. */ protected static Aggregator asMultiBucketAggregator(final AggregatorFactory<?> factory, final SearchContext context, final Aggregator parent) throws IOException { final Aggregator first = factory.create(parent, true); final BigArrays bigArrays = context.bigArrays(); return new MultiBucketAggregatorWrapper(bigArrays, context, parent, factory, first); } }
/* * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test * @bug 4434723 4482726 4559072 4638365 4795550 5081340 5103988 6253145 * 6984545 * @summary Test FileChannel.transferFrom and transferTo * @library .. */ import java.io.*; import java.net.*; import java.nio.*; import java.nio.channels.*; import java.nio.channels.spi.SelectorProvider; import java.nio.file.StandardOpenOption; import java.nio.file.FileAlreadyExistsException; import java.util.Random; public class Transfer { private static Random generator = new Random(); private static int[] testSizes = { 0, 10, 1023, 1024, 1025, 2047, 2048, 2049 }; public static void main(String[] args) throws Exception { testFileChannel(); for (int i=0; i<testSizes.length; i++) testReadableByteChannel(testSizes[i]); xferTest02(); // for bug 4482726 xferTest03(); // for bug 4559072 xferTest04(); // for bug 4638365 xferTest05(); // for bug 4638365 xferTest06(); // for bug 5081340 xferTest07(); // for bug 5103988 xferTest08(); // for bug 6253145 xferTest09(); // for bug 6984545 } private static void testFileChannel() throws Exception { File source = File.createTempFile("source", null); source.deleteOnExit(); File sink = File.createTempFile("sink", null); sink.deleteOnExit(); FileOutputStream fos = new FileOutputStream(source); FileChannel sourceChannel = fos.getChannel(); sourceChannel.write(ByteBuffer.wrap( "Use the source, Luke!".getBytes())); sourceChannel.close(); FileInputStream fis = new FileInputStream(source); sourceChannel = fis.getChannel(); RandomAccessFile raf = new RandomAccessFile(sink, "rw"); FileChannel sinkChannel = raf.getChannel(); long oldSinkPosition = sinkChannel.position(); long oldSourcePosition = sourceChannel.position(); long bytesWritten = sinkChannel.transferFrom(sourceChannel, 0, 10); if (bytesWritten != 10) throw new RuntimeException("Transfer failed"); if (sourceChannel.position() == oldSourcePosition) throw new RuntimeException("Source position didn't change"); if (sinkChannel.position() != oldSinkPosition) throw new RuntimeException("Sink position changed"); if (sinkChannel.size() != 10) throw new RuntimeException("Unexpected sink size"); bytesWritten = sinkChannel.transferFrom(sourceChannel, 1000, 10); if (bytesWritten > 0) throw new RuntimeException("Wrote past file size"); sourceChannel.close(); sinkChannel.close(); source.delete(); sink.delete(); } private static void testReadableByteChannel(int size) throws Exception { SelectorProvider sp = SelectorProvider.provider(); Pipe p = sp.openPipe(); Pipe.SinkChannel sink = p.sink(); Pipe.SourceChannel source = p.source(); sink.configureBlocking(false); ByteBuffer outgoingdata = ByteBuffer.allocateDirect(size + 10); byte[] someBytes = new byte[size + 10]; generator.nextBytes(someBytes); outgoingdata.put(someBytes); outgoingdata.flip(); int totalWritten = 0; while (totalWritten < size + 10) { int written = sink.write(outgoingdata); if (written < 0) throw new Exception("Write failed"); totalWritten += written; } File f = File.createTempFile("blah"+size, null); f.deleteOnExit(); RandomAccessFile raf = new RandomAccessFile(f, "rw"); FileChannel fc = raf.getChannel(); long oldPosition = fc.position(); long bytesWritten = fc.transferFrom(source, 0, size); fc.force(true); if (bytesWritten != size) throw new RuntimeException("Transfer failed"); if (fc.position() != oldPosition) throw new RuntimeException("Position changed"); if (fc.size() != size) throw new RuntimeException("Unexpected sink size "+ fc.size()); fc.close(); sink.close(); source.close(); f.delete(); } public static void xferTest02() throws Exception { byte[] srcData = new byte[5000]; for (int i=0; i<5000; i++) srcData[i] = (byte)generator.nextInt(); // get filechannel for the source file. File source = File.createTempFile("source", null); source.deleteOnExit(); RandomAccessFile raf1 = new RandomAccessFile(source, "rw"); FileChannel fc1 = raf1.getChannel(); // write out data to the file channel long bytesWritten = 0; while (bytesWritten < 5000) { bytesWritten = fc1.write(ByteBuffer.wrap(srcData)); } // get filechannel for the dst file. File dest = File.createTempFile("dest", null); dest.deleteOnExit(); RandomAccessFile raf2 = new RandomAccessFile(dest, "rw"); FileChannel fc2 = raf2.getChannel(); int bytesToWrite = 3000; int startPosition = 1000; bytesWritten = fc1.transferTo(startPosition, bytesToWrite, fc2); fc1.close(); fc2.close(); raf1.close(); raf2.close(); source.delete(); dest.delete(); } public static void xferTest03() throws Exception { byte[] srcData = new byte[] {1,2,3,4} ; // get filechannel for the source file. File source = File.createTempFile("source", null); source.deleteOnExit(); RandomAccessFile raf1 = new RandomAccessFile(source, "rw"); FileChannel fc1 = raf1.getChannel(); fc1.truncate(0); // write out data to the file channel int bytesWritten = 0; while (bytesWritten < 4) { bytesWritten = fc1.write(ByteBuffer.wrap(srcData)); } // get filechannel for the dst file. File dest = File.createTempFile("dest", null); dest.deleteOnExit(); RandomAccessFile raf2 = new RandomAccessFile(dest, "rw"); FileChannel fc2 = raf2.getChannel(); fc2.truncate(0); fc1.transferTo(0, srcData.length + 1, fc2); if (fc2.size() > 4) throw new Exception("xferTest03 failed"); fc1.close(); fc2.close(); raf1.close(); raf2.close(); source.delete(); dest.delete(); } // Test transferTo with large file public static void xferTest04() throws Exception { // Windows and Linux can't handle the really large file sizes for a // truncate or a positional write required by the test for 4563125 String osName = System.getProperty("os.name"); if (!osName.startsWith("SunOS")) return; File source = File.createTempFile("blah", null); source.deleteOnExit(); long testSize = ((long)Integer.MAX_VALUE) * 2; initTestFile(source, 10); RandomAccessFile raf = new RandomAccessFile(source, "rw"); FileChannel fc = raf.getChannel(); fc.write(ByteBuffer.wrap("Use the source!".getBytes()), testSize - 40); fc.close(); raf.close(); File sink = File.createTempFile("sink", null); sink.deleteOnExit(); FileInputStream fis = new FileInputStream(source); FileChannel sourceChannel = fis.getChannel(); raf = new RandomAccessFile(sink, "rw"); FileChannel sinkChannel = raf.getChannel(); long bytesWritten = sourceChannel.transferTo(testSize -40, 10, sinkChannel); if (bytesWritten != 10) { throw new RuntimeException("Transfer test 4 failed " + bytesWritten); } sourceChannel.close(); sinkChannel.close(); source.delete(); sink.delete(); } // Test transferFrom with large file public static void xferTest05() throws Exception { // Create a source file & large sink file for the test File source = File.createTempFile("blech", null); source.deleteOnExit(); initTestFile(source, 100); // Create the sink file as a sparse file if possible File sink = null; FileChannel fc = null; while (fc == null) { sink = File.createTempFile("sink", null); // re-create as a sparse file sink.delete(); try { fc = FileChannel.open(sink.toPath(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE, StandardOpenOption.SPARSE); } catch (FileAlreadyExistsException ignore) { // someone else got it } } sink.deleteOnExit(); long testSize = ((long)Integer.MAX_VALUE) * 2; try { fc.write(ByteBuffer.wrap("Use the source!".getBytes()), testSize - 40); } catch (IOException e) { // Can't set up the test, abort it System.err.println("xferTest05 was aborted."); return; } finally { fc.close(); } // Get new channels for the source and sink and attempt transfer FileChannel sourceChannel = new FileInputStream(source).getChannel(); try { FileChannel sinkChannel = new RandomAccessFile(sink, "rw").getChannel(); try { long bytesWritten = sinkChannel.transferFrom(sourceChannel, testSize - 40, 10); if (bytesWritten != 10) { throw new RuntimeException("Transfer test 5 failed " + bytesWritten); } } finally { sinkChannel.close(); } } finally { sourceChannel.close(); } source.delete(); sink.delete(); } static void checkFileData(File file, String expected) throws Exception { FileInputStream fis = new FileInputStream(file); Reader r = new BufferedReader(new InputStreamReader(fis, "ASCII")); StringBuilder sb = new StringBuilder(); int c; while ((c = r.read()) != -1) sb.append((char)c); String contents = sb.toString(); if (! contents.equals(expected)) throw new Exception("expected: " + expected + ", got: " + contents); r.close(); } // Test transferFrom asking for more bytes than remain in source public static void xferTest06() throws Exception { String data = "Use the source, Luke!"; File source = File.createTempFile("source", null); source.deleteOnExit(); File sink = File.createTempFile("sink", null); sink.deleteOnExit(); FileOutputStream fos = new FileOutputStream(source); fos.write(data.getBytes("ASCII")); fos.close(); FileChannel sourceChannel = new RandomAccessFile(source, "rw").getChannel(); sourceChannel.position(7); long remaining = sourceChannel.size() - sourceChannel.position(); FileChannel sinkChannel = new RandomAccessFile(sink, "rw").getChannel(); long n = sinkChannel.transferFrom(sourceChannel, 0L, sourceChannel.size()); // overflow if (n != remaining) throw new Exception("n == " + n + ", remaining == " + remaining); sinkChannel.close(); sourceChannel.close(); checkFileData(source, data); checkFileData(sink, data.substring(7,data.length())); source.delete(); } // Test transferTo to non-blocking socket channel public static void xferTest07() throws Exception { File source = File.createTempFile("source", null); source.deleteOnExit(); FileChannel sourceChannel = new RandomAccessFile(source, "rw") .getChannel(); sourceChannel.position(32000L) .write(ByteBuffer.wrap("The End".getBytes())); // The sink is a non-blocking socket channel ServerSocketChannel ssc = ServerSocketChannel.open(); ssc.socket().bind(new InetSocketAddress(0)); InetSocketAddress sa = new InetSocketAddress( InetAddress.getLocalHost(), ssc.socket().getLocalPort()); SocketChannel sink = SocketChannel.open(sa); sink.configureBlocking(false); SocketChannel other = ssc.accept(); long size = sourceChannel.size(); // keep sending until congested long n; do { n = sourceChannel.transferTo(0, size, sink); } while (n > 0); sourceChannel.close(); sink.close(); other.close(); ssc.close(); source.delete(); } // Test transferTo with file positions larger than 2 and 4GB public static void xferTest08() throws Exception { // Creating a sparse 6GB file on Windows takes too long String osName = System.getProperty("os.name"); if (osName.startsWith("Windows")) return; final long G = 1024L * 1024L * 1024L; // Create 6GB file File file = File.createTempFile("source", null); file.deleteOnExit(); RandomAccessFile raf = new RandomAccessFile(file, "rw"); FileChannel fc = raf.getChannel(); try { fc.write(ByteBuffer.wrap("0123456789012345".getBytes("UTF-8")), 6*G); } catch (IOException x) { System.err.println("Unable to create test file:" + x); fc.close(); return; } // Setup looback connection and echo server ServerSocketChannel ssc = ServerSocketChannel.open(); ssc.socket().bind(new InetSocketAddress(0)); InetAddress lh = InetAddress.getLocalHost(); InetSocketAddress isa = new InetSocketAddress(lh, ssc.socket().getLocalPort()); SocketChannel source = SocketChannel.open(isa); SocketChannel sink = ssc.accept(); Thread thr = new Thread(new EchoServer(sink)); thr.start(); // Test data is array of positions and counts long testdata[][] = { { 2*G-1, 1 }, { 2*G-1, 10 }, // across 2GB boundary { 2*G, 1 }, { 2*G, 10 }, { 2*G+1, 1 }, { 4*G-1, 1 }, { 4*G-1, 10 }, // across 4GB boundary { 4*G, 1 }, { 4*G, 10 }, { 4*G+1, 1 }, { 5*G-1, 1 }, { 5*G-1, 10 }, { 5*G, 1 }, { 5*G, 10 }, { 5*G+1, 1 }, { 6*G, 1 }, }; ByteBuffer sendbuf = ByteBuffer.allocateDirect(100); ByteBuffer readbuf = ByteBuffer.allocateDirect(100); try { byte value = 0; for (int i=0; i<testdata.length; i++) { long position = testdata[(int)i][0]; long count = testdata[(int)i][1]; // generate bytes for (long j=0; j<count; j++) { sendbuf.put(++value); } sendbuf.flip(); // write to file and transfer to echo server fc.write(sendbuf, position); fc.transferTo(position, count, source); // read from echo server long nread = 0; while (nread < count) { int n = source.read(readbuf); if (n < 0) throw new RuntimeException("Premature EOF!"); nread += n; } // check reply from echo server readbuf.flip(); sendbuf.flip(); if (!readbuf.equals(sendbuf)) throw new RuntimeException("Echo'ed bytes do not match!"); readbuf.clear(); sendbuf.clear(); } } finally { source.close(); ssc.close(); fc.close(); file.delete(); } } // Test that transferFrom with FileChannel source that is not readable // throws NonReadableChannelException static void xferTest09() throws Exception { File source = File.createTempFile("source", null); source.deleteOnExit(); File target = File.createTempFile("target", null); target.deleteOnExit(); FileChannel fc1 = new FileOutputStream(source).getChannel(); FileChannel fc2 = new RandomAccessFile(target, "rw").getChannel(); try { fc2.transferFrom(fc1, 0L, 0); throw new RuntimeException("NonReadableChannelException expected"); } catch (NonReadableChannelException expected) { } finally { fc1.close(); fc2.close(); } } /** * Creates file blah of specified size in bytes. */ private static void initTestFile(File blah, long size) throws Exception { if (blah.exists()) blah.delete(); FileOutputStream fos = new FileOutputStream(blah); BufferedWriter awriter = new BufferedWriter(new OutputStreamWriter(fos, "8859_1")); for(int i=0; i<size; i++) { awriter.write("e"); } awriter.flush(); awriter.close(); } /** * Simple in-process server to echo bytes read by a given socket channel */ static class EchoServer implements Runnable { private SocketChannel sc; public EchoServer(SocketChannel sc) { this.sc = sc; } public void run() { ByteBuffer bb = ByteBuffer.allocateDirect(1024); try { for (;;) { int n = sc.read(bb); if (n < 0) break; bb.flip(); while (bb.remaining() > 0) { sc.write(bb); } bb.clear(); } } catch (IOException x) { x.printStackTrace(); } finally { try { sc.close(); } catch (IOException ignore) { } } } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.annotator.intentions.dynamic; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.actionSystem.ex.DataConstantsEx; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.wm.ToolWindow; import com.intellij.openapi.wm.ToolWindowAnchor; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.openapi.wm.impl.ToolWindowManagerImpl; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.refactoring.listeners.RefactoringElementListenerProvider; import com.intellij.refactoring.listeners.RefactoringListenerManager; import com.intellij.ui.*; import com.intellij.ui.content.ContentManager; import com.intellij.ui.content.Content; import com.intellij.ui.treeStructure.treetable.ListTreeTableModelOnColumns; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.Function; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.AbstractTableCellEditor; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.GroovyBundle; import org.jetbrains.plugins.groovy.GroovyFileType; import org.jetbrains.plugins.groovy.GroovyIcons; import org.jetbrains.plugins.groovy.annotator.intentions.QuickfixUtil; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.elements.*; import org.jetbrains.plugins.groovy.debugger.fragments.GroovyCodeFragment; import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementFactory; import org.jetbrains.plugins.groovy.lang.psi.api.types.GrTypeElement; import javax.swing.*; import javax.swing.event.CellEditorListener; import javax.swing.event.ChangeEvent; import javax.swing.table.TableCellRenderer; import javax.swing.tree.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; /** * User: Dmitry.Krasilschikov * Date: 09.01.2008 */ public class DynamicToolWindowWrapper { private final Project myProject; private ToolWindow myToolWindow = null; public DynamicToolWindowWrapper(Project project) { myProject = project; } public static DynamicToolWindowWrapper getInstance(Project project) { return ServiceManager.getService(project, DynamicToolWindowWrapper.class); } public static final String DYNAMIC_TOOLWINDOW_ID = GroovyBundle.message("dynamic.tool.window.id"); private JPanel myTreeTablePanel; private JPanel myBigPanel; private ListTreeTableModelOnColumns myTreeTableModel; private static final int CLASS_OR_ELEMENT_NAME_COLUMN = 0; private static final int TYPE_COLUMN = 1; private static final String[] myColumnNames = {"Dynamic element", "Type"}; private MyTreeTable myTreeTable; private static final Logger LOG = Logger.getInstance("org.jetbrains.plugins.groovy.annotator.intentions.dynamic.DynamicToolWindowWrapper"); public TreeTable getTreeTable() { getToolWindow(); return myTreeTable; } public ToolWindow getToolWindow() { if (myToolWindow == null) { myToolWindow = ToolWindowManager.getInstance(myProject).registerToolWindow(DYNAMIC_TOOLWINDOW_ID, true, ToolWindowAnchor.RIGHT); myToolWindow.setIcon(GroovyIcons.DYNAMIC_PROPERTY_TOOL_WINDOW_ICON); myToolWindow.setTitle(GroovyBundle.message("dynamic.window")); myToolWindow.setToHideOnEmptyContent(true); final JPanel panel = buildBigPanel(); final ContentManager contentManager = myToolWindow.getContentManager(); final Content content = contentManager.getFactory().createContent(panel, "", false); content.setPreferredFocusableComponent(myTreeTable); contentManager.addContent(content); } return myToolWindow; } private JPanel buildBigPanel() { myBigPanel = new JPanel(new BorderLayout()); myBigPanel.setBackground(UIUtil.getFieldForegroundColor()); final DynamicFilterComponent filter = new DynamicFilterComponent(GroovyBundle.message("dynamic.toolwindow.property.fiter"), 10); filter.setBackground(UIUtil.getLabelBackground()); myBigPanel.add(new JLabel(GroovyBundle.message("dynamic.toolwindow.search.elements")), BorderLayout.NORTH); myBigPanel.add(filter, BorderLayout.NORTH); myTreeTablePanel = new JPanel(new BorderLayout()); rebuildTreePanel(); myBigPanel.add(myTreeTablePanel); myBigPanel.setPreferredSize(new Dimension(200, myBigPanel.getHeight())); myBigPanel.revalidate(); return myBigPanel; } public void rebuildTreePanel() { DefaultMutableTreeNode rootNode = new DefaultMutableTreeNode(); buildTree(rootNode); rebuildTreeView(rootNode, false); } private void rebuildTreeView(DefaultMutableTreeNode root, boolean expandAll) { PsiDocumentManager.getInstance(myProject).commitAllDocuments(); myTreeTablePanel.removeAll(); final JScrollPane treeTable = createTable(root); if (expandAll) { TreeUtil.expandAll(myTreeTable.getTree()); } myTreeTablePanel.add(treeTable); myTreeTablePanel.revalidate(); } private DefaultMutableTreeNode buildTree(DefaultMutableTreeNode rootNode) { final Collection<DClassElement> containingClasses = DynamicManager.getInstance(myProject).getAllContainingClasses(); DefaultMutableTreeNode containingClassNode; for (DClassElement containingClassElement : containingClasses) { containingClassNode = new DefaultMutableTreeNode(containingClassElement); final Collection<DPropertyElement> properties = DynamicManager.getInstance(myProject).findDynamicPropertiesOfClass(containingClassElement.getName()); // if (properties.length == 0) continue; DefaultMutableTreeNode propertyTreeNode; for (DPropertyElement property : properties) { propertyTreeNode = new DefaultMutableTreeNode(property); containingClassNode.add(propertyTreeNode); } DefaultMutableTreeNode methodTreeNode; final Set<DMethodElement> methods = containingClassElement.getMethods(); for (DMethodElement methodElement : methods) { final String[] psiTypes = QuickfixUtil.getArgumentsTypes(methodElement.getPairs()); final DMethodElement method = DynamicManager.getInstance(myProject) .findConcreteDynamicMethod(containingClassElement.getName(), methodElement.getName(), psiTypes); methodTreeNode = new DefaultMutableTreeNode(method); containingClassNode.add(methodTreeNode); } rootNode.add(containingClassNode); } return rootNode; } private JScrollPane createTable(final MutableTreeNode myTreeRoot) { ColumnInfo[] columnInfos = {new ClassColumnInfo(myColumnNames[CLASS_OR_ELEMENT_NAME_COLUMN]), new PropertyTypeColumnInfo(myColumnNames[TYPE_COLUMN])}; myTreeTableModel = new ListTreeTableModelOnColumns(myTreeRoot, columnInfos); myTreeTable = new MyTreeTable(myTreeTableModel); final MyColoredTreeCellRenderer treeCellRenderer = new MyColoredTreeCellRenderer(); myTreeTable.setDefaultRenderer(String.class, new TableCellRenderer() { public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { if (value instanceof String) { final GrTypeElement typeElement; try { typeElement = GroovyPsiElementFactory.getInstance(myProject).createTypeElement(((String)value)); if (typeElement != null){ String shortName = typeElement.getType().getPresentableText(); return new JLabel(shortName); } } catch (IncorrectOperationException e) { LOG.debug("Type cannot be created", e); } return new JLabel(QuickfixUtil.shortenType((String)value)); } return new JLabel(); } }); myTreeTable.setTreeCellRenderer(treeCellRenderer); myTreeTable.setRootVisible(false); myTreeTable.setSelectionMode(DefaultTreeSelectionModel.CONTIGUOUS_TREE_SELECTION); final MyPropertyTypeCellEditor typeCellEditor = new MyPropertyTypeCellEditor(); typeCellEditor.addCellEditorListener(new CellEditorListener() { public void editingStopped(ChangeEvent e) { final TreeTableTree tree = getTree(); String newTypeValue = ((MyPropertyTypeCellEditor)e.getSource()).getCellEditorValue(); if (newTypeValue == null || tree == null) { myTreeTable.editingStopped(e); return; } try { GrTypeElement typeElement = GroovyPsiElementFactory.getInstance(myProject).createTypeElement(newTypeValue); if (typeElement != null) { String canonical = typeElement.getType().getCanonicalText(); if (canonical != null) newTypeValue = canonical; } } catch (IncorrectOperationException ex) { //do nothing in case bad string is entered } final TreePath editingTypePath = tree.getSelectionPath(); if (editingTypePath == null) return; final TreePath editingClassPath = editingTypePath.getParentPath(); Object oldTypeValue = myTreeTable.getValueAt(tree.getRowForPath(editingTypePath), TYPE_COLUMN); if (!(oldTypeValue instanceof String)) { myTreeTable.editingStopped(e); return; } final Object editingPropertyObject = myTreeTable.getValueAt(tree.getRowForPath(editingTypePath), CLASS_OR_ELEMENT_NAME_COLUMN); final Object editingClassObject = myTreeTable.getValueAt(tree.getRowForPath(editingClassPath), CLASS_OR_ELEMENT_NAME_COLUMN); if (!(editingPropertyObject instanceof DItemElement) || !(editingClassObject instanceof DClassElement)) { myTreeTable.editingStopped(e); return; } final DItemElement dynamicElement = (DItemElement)editingPropertyObject; final String name = dynamicElement.getName(); final String className = ((DClassElement)editingClassObject).getName(); if (dynamicElement instanceof DPropertyElement) { DynamicManager.getInstance(myProject).replaceDynamicPropertyType(className, name, (String)oldTypeValue, newTypeValue); } else if (dynamicElement instanceof DMethodElement) { final List<MyPair> myPairList = ((DMethodElement)dynamicElement).getPairs(); DynamicManager.getInstance(myProject).replaceDynamicMethodType(className, name, myPairList, (String)oldTypeValue, newTypeValue); } } public void editingCanceled(ChangeEvent e) { System.out.println("editing canceled"); myTreeTable.editingCanceled(e); } }); RefactoringListenerManager.getInstance(myProject).addListenerProvider(new RefactoringElementListenerProvider() { @Nullable public RefactoringElementListener getListener(final PsiElement element) { if (element instanceof PsiClass) { final String qualifiedName = ((PsiClass)element).getQualifiedName(); return new RefactoringElementListener() { public void elementMoved(PsiElement newElement) { renameElement(qualifiedName, newElement); } public void elementRenamed(PsiElement newElement) { renameElement(qualifiedName, newElement); } private void renameElement(String oldClassName, PsiElement newElement) { if (newElement instanceof PsiClass) { final String newClassName = ((PsiClass)newElement).getQualifiedName(); final DRootElement rootElement = DynamicManager.getInstance(myProject).getRootElement(); final DClassElement oldClassElement = rootElement.getClassElement(oldClassName); final TreeNode oldClassNode = TreeUtil.findNodeWithObject((DefaultMutableTreeNode)myTreeRoot, oldClassElement); DynamicManager.getInstance(myProject).replaceClassName(oldClassElement, newClassName); myTreeTableModel.nodeChanged(oldClassNode); } } }; } return null; } }); myTreeTable.setDefaultEditor(String.class, typeCellEditor); myTreeTable.registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent event) { deleteRow(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0), JComponent.WHEN_FOCUSED); myTreeTable.registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent event) { final int selectionRow = myTreeTable.getTree().getLeadSelectionRow(); myTreeTable.editCellAt(selectionRow, CLASS_OR_ELEMENT_NAME_COLUMN, event); } }, KeyStroke.getKeyStroke(KeyEvent.VK_F2, 0), JComponent.WHEN_FOCUSED); myTreeTable.registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent event) { final int selectionRow = myTreeTable.getTree().getLeadSelectionRow(); myTreeTable.editCellAt(selectionRow, TYPE_COLUMN, event); } }, KeyStroke.getKeyStroke(KeyEvent.VK_F2, KeyEvent.CTRL_MASK), JComponent.WHEN_FOCUSED); // todo use "myTreeTable.setAutoCreateRowSorter(true);" since 1.6 myTreeTable.getTree().setShowsRootHandles(true); myTreeTable.getTableHeader().setReorderingAllowed(false); myTreeTable.setPreferredScrollableViewportSize(new Dimension(300, myTreeTable.getRowHeight() * 10)); myTreeTable.getColumn(myColumnNames[CLASS_OR_ELEMENT_NAME_COLUMN]).setPreferredWidth(200); myTreeTable.getColumn(myColumnNames[TYPE_COLUMN]).setPreferredWidth(160); JScrollPane scrollpane = ScrollPaneFactory.createScrollPane(myTreeTable); scrollpane.setPreferredSize(new Dimension(600, 400)); return scrollpane; } private void deleteRow() { final int[] rows = myTreeTable.getSelectedRows(); boolean isShowDialog = true; final int rowsCount = rows.length; int i = 0; final TreeTableTree tree = myTreeTable.getTree(); for (TreePath selectionPath : tree.getSelectionPaths()) { if (rowsCount > 1) isShowDialog = false; if (i++ == 0) isShowDialog = true; //class final TreePath parent = selectionPath.getParentPath(); if (parent.getParentPath() == null) { //selectionPath is class final Object classRow = selectionPath.getLastPathComponent(); if (!(classRow instanceof DefaultMutableTreeNode)) return; if (!removeClass(((DefaultMutableTreeNode)classRow), isShowDialog, rowsCount)) return; } else { //selectionPath is dynamic item final Object classRow = parent.getLastPathComponent(); final Object dynamicRow = selectionPath.getLastPathComponent(); if (!(classRow instanceof DefaultMutableTreeNode)) return; if (!(dynamicRow instanceof DefaultMutableTreeNode)) return; final DefaultMutableTreeNode dynamicItemNode = (DefaultMutableTreeNode)dynamicRow; final DefaultMutableTreeNode classNode = (DefaultMutableTreeNode)classRow; if (classNode.getChildCount() == 1) { if (!removeClass(classNode, isShowDialog, rowsCount)) return; } else { if (!removeDynamicElement(dynamicItemNode, isShowDialog, rowsCount)) return; } } } DynamicManager.getInstance(myProject).fireChange(); } private boolean removeClass(DefaultMutableTreeNode classNode, boolean isShowDialog, int rowsCount) { final TreeNode rootObject = classNode.getParent(); return rootObject instanceof DefaultMutableTreeNode && removeDynamicElement(classNode, isShowDialog, rowsCount); } private boolean removeDynamicElement(DefaultMutableTreeNode child, boolean isShowDialog, int rowsCount) { Object namedElement = child.getUserObject(); if (!(namedElement instanceof DNamedElement)) return false; if (isShowDialog) { int result; if (rowsCount > 1) { result = Messages.showOkCancelDialog(myBigPanel, GroovyBundle.message("are.you.sure.to.delete.elements", String.valueOf(rowsCount)), GroovyBundle.message("dynamic.element.deletion"), Messages.getQuestionIcon()); } else { result = Messages.showOkCancelDialog(myBigPanel, GroovyBundle.message("are.you.sure.to.delete.dynamic.property", ((DNamedElement)namedElement).getName()), GroovyBundle.message("dynamic.property.deletion"), Messages.getQuestionIcon()); } if (result != DialogWrapper.OK_EXIT_CODE) return false; } removeNamedElement(((DNamedElement)namedElement)); /*final Object selectionNode = selectionPath.getLastPathComponent(); if (!(selectionNode instanceof DefaultMutableTreeNode)) return false; DefaultMutableTreeNode toSelect = (parent.getChildAfter(child) != null || parent.getChildCount() == 1 ? ((DefaultMutableTreeNode) selectionNode).getNextNode() : ((DefaultMutableTreeNode) selectionNode).getPreviousNode()); // DefaultMutableTreeNode toSelect = toSelect != null ? (DefaultMutableTreeNode) toSelect.getLastPathComponent() : null; removeFromParent(parent, child); if (toSelect != null) { setSelectedNode(toSelect, myProject); }*/ return true; } private void removeNamedElement(DNamedElement namedElement) { if (namedElement instanceof DClassElement) { DynamicManager.getInstance(myProject).removeClassElement((DClassElement)namedElement); } else if (namedElement instanceof DItemElement) { DynamicManager.getInstance(myProject).removeItemElement((DItemElement)namedElement); } } public void setSelectedNode(DefaultMutableTreeNode node) { JTree tree = myTreeTable.getTree(); TreePath path = new TreePath(node.getPath()); tree.expandPath(path.getParentPath()); int row = tree.getRowForPath(path); myTreeTable.getSelectionModel().setSelectionInterval(row, row); myTreeTable.scrollRectToVisible(myTreeTable.getCellRect(row, 0, true)); ((ToolWindowManagerImpl)ToolWindowManager.getInstance(myProject)).requestFocus(myTreeTable, true); } public void removeFromParent(DefaultMutableTreeNode parent, DefaultMutableTreeNode child) { int idx = myTreeTableModel.getIndexOfChild(parent, child); child.removeFromParent(); myTreeTableModel.nodesWereRemoved(parent, new int[]{idx}, new TreeNode[]{child}); } static class PropertyTypeColumnInfo extends ColumnInfo<DefaultMutableTreeNode, String> { public PropertyTypeColumnInfo(String name) { super(name); } public boolean isCellEditable(DefaultMutableTreeNode node) { final Object value = node.getUserObject(); return !(value instanceof DClassElement); } public String valueOf(DefaultMutableTreeNode treeNode) { Object userObject = treeNode.getUserObject(); if (userObject instanceof DItemElement) return ((DItemElement)userObject).getType(); return null; } } class ClassColumnInfo extends ColumnInfo<DefaultMutableTreeNode, DNamedElement> { public ClassColumnInfo(String name) { super(name); } public boolean isCellEditable(DefaultMutableTreeNode treeNode) { final Object userObject = treeNode.getUserObject(); return userObject instanceof DPropertyElement; } public Class getColumnClass() { return TreeTableModel.class; } public DNamedElement valueOf(DefaultMutableTreeNode treeNode) { Object userObject = treeNode.getUserObject(); if (userObject instanceof DClassElement) return ((DClassElement)userObject); if (userObject instanceof DPropertyElement) return ((DPropertyElement)userObject); if (userObject instanceof DMethodElement) return ((DMethodElement)userObject); return null; } } class DynamicFilterComponent extends FilterComponent { public DynamicFilterComponent(@NonNls String propertyName, int historySize) { super(propertyName, historySize); } public void filter() { DefaultMutableTreeNode rootNode = new DefaultMutableTreeNode(); buildTree(rootNode); String filterText; List<DefaultMutableTreeNode> classes = new ArrayList<DefaultMutableTreeNode>(); List<DefaultMutableTreeNode> dynamicNodes = new ArrayList<DefaultMutableTreeNode>(); if (rootNode.isLeaf()) return; DefaultMutableTreeNode classNode = (DefaultMutableTreeNode)rootNode.getFirstChild(); while (classNode != null) { if (classNode.isLeaf()) { classNode = (DefaultMutableTreeNode)rootNode.getChildAfter(classNode); continue; } DefaultMutableTreeNode dynamicNode = (DefaultMutableTreeNode)classNode.getFirstChild(); while (dynamicNode != null) { final Object childObject = dynamicNode.getUserObject(); if (!(childObject instanceof DItemElement)) break; filterText = getFilter(); if (filterText == null || "".equals(filterText)) { ((DItemElement)childObject).setHightlightedText(""); dynamicNodes.add(dynamicNode); dynamicNode = (DefaultMutableTreeNode)classNode.getChildAfter(dynamicNode); continue; } final String name = (((DItemElement)childObject)).getName(); if (name.contains(filterText)) { ((DItemElement)childObject).setHightlightedText(filterText); dynamicNodes.add(dynamicNode); } dynamicNode = (DefaultMutableTreeNode)classNode.getChildAfter(dynamicNode); } if (!dynamicNodes.isEmpty()) { classes.add(classNode); } classNode.removeAllChildren(); for (DefaultMutableTreeNode node : dynamicNodes) { classNode.add(node); } dynamicNodes.clear(); classNode = (DefaultMutableTreeNode)rootNode.getChildAfter(classNode); } rootNode.removeAllChildren(); for (DefaultMutableTreeNode aClass : classes) { rootNode.add(aClass); } classes.clear(); rebuildTreeView(rootNode, true); myBigPanel.invalidate(); } } public ListTreeTableModelOnColumns getTreeTableModel() { getToolWindow(); return myTreeTableModel; } private static class MyColoredTreeCellRenderer extends ColoredTreeCellRenderer { public void customizeCellRenderer(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { value = ((DefaultMutableTreeNode)value).getUserObject(); setPaintFocusBorder(false); if (!(value instanceof DNamedElement)) return; if (value instanceof DClassElement) { final String containingClassName = ((DClassElement)value).getName(); // append(className, SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES); final String classShortName = QuickfixUtil.shortenType(containingClassName); append(classShortName, SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES); } if (value instanceof DItemElement) { final DItemElement itemElement = ((DItemElement)value); final String substringToHighlight = itemElement.getHightlightedText(); final String name = itemElement.getName(); if (substringToHighlight != null) { appendHighlightName(substringToHighlight, name); } else { appendName(name); } if (value instanceof DMethodElement) { appendMethodParameters(name, (DMethodElement)value); } else if (value instanceof DPropertyElement) { setToolTipText(name); } } } private void appendHighlightName(String substringToHighlight, String name) { final int begin = name.indexOf(substringToHighlight); // if (name.length() <= begin) return; final String first = name.substring(0, begin); append(first, SimpleTextAttributes.SIMPLE_CELL_ATTRIBUTES); final TextAttributes textAttributes = TextAttributes.ERASE_MARKER; textAttributes.setBackgroundColor(UIUtil.getListSelectionBackground()); append(substringToHighlight, SimpleTextAttributes.fromTextAttributes(textAttributes)); append(name.substring(first.length() + substringToHighlight.length()), SimpleTextAttributes.SIMPLE_CELL_ATTRIBUTES); } private void appendName(String name) { append(name, SimpleTextAttributes.SIMPLE_CELL_ATTRIBUTES); } private void appendMethodParameters(final String name, DMethodElement value) { StringBuilder buffer = new StringBuilder(); buffer.append("("); final String[] types = mapToUnqualified(QuickfixUtil.getArgumentsNames(value.getPairs())); for (int i = 0; i < types.length; i++) { if (i != 0) buffer.append(", "); String type = types[i]; buffer.append(type); } buffer.append(")"); append(buffer.toString(), SimpleTextAttributes.SIMPLE_CELL_ATTRIBUTES); setToolTipText(name + buffer.toString()); } private static String[] mapToUnqualified(final String[] argumentsNames) { return ContainerUtil.map2Array(argumentsNames, String.class, new Function<String, String>() { public String fun(final String s) { if (s == null) return null; int index = s.lastIndexOf("."); if (index > 0 && index < s.length() - 1) return s.substring(index + 1); return s; } }); } } private class MyPropertyTypeCellEditor extends AbstractTableCellEditor { final EditorTextField field; public MyPropertyTypeCellEditor() { final Document document = PsiDocumentManager.getInstance(myProject).getDocument(new GroovyCodeFragment(myProject, "")); field = new EditorTextField(document, myProject, GroovyFileType.GROOVY_FILE_TYPE); } public String getCellEditorValue() { return field.getText(); } public Component getTableCellEditorComponent(JTable table, Object value, boolean isSelected, int row, int column) { if (value instanceof String) { field.setText(((String)value)); } return field; } } @Nullable private TreeTableTree getTree() { return myTreeTable != null ? myTreeTable.getTree() : null; } public class MyTreeTable extends TreeTable implements DataProvider { public MyTreeTable(TreeTableModel treeTableModel) { super(treeTableModel); } @Nullable public Object getData(@NonNls String dataId) { if (DataConstantsEx.PSI_ELEMENT.equals(dataId)) { return getSelectedElement(); } else if (DataConstantsEx.PSI_FILE.equals(dataId)) { final PsiElement element = getSelectedElement(); if (element == null) return null; return element.getContainingFile(); } return null; } private PsiElement getSelectedElement() { final TreePath path = getTree().getSelectionPath(); if (path == null) return null; final Object selectedObject = path.getLastPathComponent(); if (!(selectedObject instanceof DefaultMutableTreeNode)) return null; final DefaultMutableTreeNode selectedNode = (DefaultMutableTreeNode)selectedObject; final Object userObject = selectedNode.getUserObject(); if (!(userObject instanceof DNamedElement)) return null; if (userObject instanceof DClassElement) { final DClassElement classElement = (DClassElement)userObject; try { final GrTypeElement typeElement = GroovyPsiElementFactory.getInstance(myProject).createTypeElement(classElement.getName()); PsiType type = typeElement.getType(); if (type instanceof PsiPrimitiveType) { type = ((PsiPrimitiveType)type).getBoxedType(PsiManager.getInstance(myProject), GlobalSearchScope.allScope(myProject)); } if (!(type instanceof PsiClassType)) return null; return ((PsiClassType)type).resolve(); } catch (IncorrectOperationException e) { return null; } } else if (userObject instanceof DItemElement) { final DItemElement itemElement = (DItemElement)userObject; final TreeNode parentNode = selectedNode.getParent(); if (!(parentNode instanceof DefaultMutableTreeNode)) return null; final Object classObject = ((DefaultMutableTreeNode)parentNode).getUserObject(); if (!(classObject instanceof DClassElement)) return null; final String className = ((DClassElement)classObject).getName(); return itemElement.getPsi(PsiManager.getInstance(myProject), className); } return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; /** * Helps tests make segments. */ public class IndexBuilder { private static final int ROWS_PER_INDEX_FOR_MERGING = 1; private static final int DEFAULT_MAX_ROWS = Integer.MAX_VALUE; private IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMetrics(new CountAggregatorFactory("count")) .build(); private SegmentWriteOutMediumFactory segmentWriteOutMediumFactory = OffHeapMemorySegmentWriteOutMediumFactory.instance(); private IndexMerger indexMerger = TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory); private File tmpDir; private IndexSpec indexSpec = new IndexSpec(); private int maxRows = DEFAULT_MAX_ROWS; private final List<InputRow> rows = new ArrayList<>(); private IndexBuilder() { } public static IndexBuilder create() { return new IndexBuilder(); } public IndexBuilder schema(IncrementalIndexSchema schema) { this.schema = schema; return this; } public IndexBuilder segmentWriteOutMediumFactory(SegmentWriteOutMediumFactory segmentWriteOutMediumFactory) { this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory; this.indexMerger = TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory); return this; } public IndexBuilder indexSpec(IndexSpec indexSpec) { this.indexSpec = indexSpec; return this; } public IndexBuilder tmpDir(File tmpDir) { this.tmpDir = tmpDir; return this; } public IndexBuilder rows(Iterable<InputRow> rows) { this.rows.clear(); Iterables.addAll(this.rows, rows); return this; } public IncrementalIndex buildIncrementalIndex() { return buildIncrementalIndexWithRows(schema, maxRows, rows); } public QueryableIndex buildMMappedIndex() { Preconditions.checkNotNull(indexMerger, "indexMerger"); Preconditions.checkNotNull(tmpDir, "tmpDir"); try (final IncrementalIndex incrementalIndex = buildIncrementalIndex()) { return TestHelper.getTestIndexIO().loadIndex( indexMerger.persist( incrementalIndex, new File(tmpDir, StringUtils.format("testIndex-%s", ThreadLocalRandom.current().nextInt(Integer.MAX_VALUE))), indexSpec, null ) ); } catch (IOException e) { throw new RuntimeException(e); } } public QueryableIndex buildMMappedMergedIndex() { IndexMerger indexMerger = TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory); Preconditions.checkNotNull(tmpDir, "tmpDir"); final List<QueryableIndex> persisted = new ArrayList<>(); try { for (int i = 0; i < rows.size(); i += ROWS_PER_INDEX_FOR_MERGING) { persisted.add( TestHelper.getTestIndexIO().loadIndex( indexMerger.persist( buildIncrementalIndexWithRows( schema, maxRows, rows.subList(i, Math.min(rows.size(), i + ROWS_PER_INDEX_FOR_MERGING)) ), new File(tmpDir, StringUtils.format("testIndex-%s", UUID.randomUUID().toString())), indexSpec, null ) ) ); } final QueryableIndex merged = TestHelper.getTestIndexIO().loadIndex( indexMerger.merge( Lists.transform( persisted, new Function<QueryableIndex, IndexableAdapter>() { @Override public IndexableAdapter apply(QueryableIndex input) { return new QueryableIndexIndexableAdapter(input); } } ), true, Iterables.toArray( Iterables.transform( Arrays.asList(schema.getMetrics()), new Function<AggregatorFactory, AggregatorFactory>() { @Override public AggregatorFactory apply(AggregatorFactory input) { return input.getCombiningFactory(); } } ), AggregatorFactory.class ), new File(tmpDir, StringUtils.format("testIndex-%s", UUID.randomUUID())), indexSpec ) ); for (QueryableIndex index : persisted) { index.close(); } return merged; } catch (IOException e) { throw new RuntimeException(e); } } private static IncrementalIndex buildIncrementalIndexWithRows( IncrementalIndexSchema schema, int maxRows, Iterable<InputRow> rows ) { Preconditions.checkNotNull(schema, "schema"); final IncrementalIndex incrementalIndex = new IncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(maxRows) .buildOnheap(); for (InputRow row : rows) { try { incrementalIndex.add(row); } catch (IndexSizeExceededException e) { throw new RuntimeException(e); } } return incrementalIndex; } }
/* IndexPage.java - starting web page for GramWord * @(#) $Id: 57d01d0860aef0c2f2783647be70c3c381710c86 $ * 2017-05-29: javadoc 1.8 * 2016-09-22: IBANFilter removed 2016-09-21: simple handler deprecated * 2016-09-20, Dr. Georg Fischer */ /* * Copyright 2016 Dr. Georg Fischer <punctum at punctum dot kom> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teherba.gramword.web; import org.teherba.common.web.BasePage; import java.io.PrintWriter; import java.io.StringReader; import java.io.Serializable; import java.util.Iterator; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.fileupload.FileItem; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; /** Xtrans main dialog page * @author Dr. Georg Fischer */ public class IndexPage implements Serializable { public final static String CVSID = "@(#) $Id: 57d01d0860aef0c2f2783647be70c3c381710c86 $"; public final static long serialVersionUID = 19470629; /** log4j logger (category) */ private Logger log; /** No-args Constructor */ public IndexPage() { log = LogManager.getLogger(IndexPage.class.getName()); } // Constructor /** Output the main dialog page for Xtrans * @param request request with header fields * @param response response with writer * @param basePage refrence to common methods and error messages */ public void dialog(HttpServletRequest request, HttpServletResponse response , BasePage basePage ) { try { String language = basePage.getFormField("lang" ); String encoding = basePage.getFormField("enc" ); String filter = basePage.getFormField("filter" ); String format = basePage.getFormField("format" ); String grammar = basePage.getFormField("grammar" ); String infile = basePage.getFormField("infile" ); String strategy = basePage.getFormField("strat" ); FileItem fileItem = basePage.getFormFile(0); PrintWriter out = basePage.writeHeader(request, response, language); out.write("<title>" + basePage.getAppName() + " Main Page</title>\n"); out.write(" <script src=\"script.js\" type=\"text/javascript\">\n"); out.write(" </script>\n"); out.write("</head>\n<body>\n"); out.write("<!--lang=\"" + language + "\", enc=\"" + encoding + "\", filter=\"" + filter + "\", format=\"" + format + "\", grammar=\"" + grammar + "\", infile=\"" + infile + "\", strat=\"" + strategy + "\"-->\n"); String[] optEnc = new String [] /* 0 */ { "ISO-8859-1" /* 1 */ , "UTF-8" } ; String[] enEnc = new String [] /* 0 */ { "ISO-8859-1" /* 1 */ , "UTF-8" } ; String[] optFilter = new String [] /* 0 */ { "queue" /* 1 */ , "bibleref" /* 2 */ , "konto" /* 3 */ , "number" /* 4 */ , "wordtype" } ; String[] enFilter = new String [] /* 0 */ { "queue" /* 1 */ , "bibleref" /* 2 */ , "konto" /* 3 */ , "number" /* 4 */ , "wordtype" } ; String[] optFormat = new String [] /* 0 */ { "html" /* 1 */ , "text" /* 2 */ , "dict" } ; String[] enFormat = new String [] /* 0 */ { "HTML" /* 1 */ , "Text" /* 2 */ , "Dictionary" } ; String[] optGrammar = new String [] /* 0 */ { "de" // /* 1 */ , "en" } ; String[] enGrammar = new String [] /* 0 */ { "Deutsch" // /* 1 */ , "English" } ; String[] optStrat = new String [] /* 0 */ { "all" /* 1 */ , "prsplit" /* 2 */ , "sasplit" } ; String[] enStrat = new String [] /* 0 */ { "all" /* 1 */ , "prsplit" /* 2 */ , "sasplit" } ; int index = 0; out.write(" <h2>GramWord</h2>\n"); out.write(" <p><strong>GramWord</strong> is a Java package which uses a relational\n"); out.write(" (MySql) database\n"); out.write(" to recognize a limited set of German words.\n"); out.write(" </p><p>\n"); out.write(" Sets of common words, names,\n"); out.write(" roots and endings of verbs, substantives, adjectives and adverbs,\n"); out.write(" together with their grammatical type and conjugation/declination\n"); out.write(" are preloaded from dictionary files into database tables.\n"); out.write(" </p><p>\n"); out.write(" Several decision algorithms use these tables to determine the\n"); out.write(" grammatical type of the words in a text. In the HTML output,\n"); out.write(" the recognized words are shown in different colors.\n"); out.write(" </p>\n"); out.write(" <strong>Short Example</strong> (sentence from \"Don Quijote\")\n"); out.write("<blockquote>\n"); out.write("<span class=\"Pr\" morph=\"\">Nachdem</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Aj\" morph=\"Qant\">alle</span> <span class=\"Aj\" morph=\"Qant\">diese</span> <span class=\"Sb\" morph=\"Pl\">Vorkehrungen</span> <span class=\"Vb\" morph=\"SPa0\">getroffen</span>, <span class=\"Vb\" morph=\"SIp11\">wollte</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Un\" morph=\"\">nicht</span>\n"); out.write("<span class=\"Aj\" morph=\"Cmpr\">l&#xe4;nger</span> <span class=\"Vb\" morph=\"SIn0\">warten</span>, <span class=\"Vb\" morph=\"SIn0\">sein</span> <span class=\"Vb\" morph=\"RtWeak\">Vorhaben</span> <span class=\"Pr\" morph=\"Shor\">ins</span> <span class=\"Sb\" morph=\"SgNt\">Werk</span> <span class=\"Pr\" morph=\"Prim\">zu</span> <span class=\"Vb\" morph=\"RtWeak\">setzen</span>; <span class=\"Pn\" morph=\"SgPersNomvNt3\">es</span> dr&#xe4;ngte <span class=\"Pn\" morph=\"SgPersAccv3Ms\">ihn</span>\n"); out.write("<span class=\"Av\" morph=\"\">dazu</span> <span class=\"Ar\" morph=\"DetmNomvSgMs\">der</span> <span class=\"Sb\" morph=\"SgMs\">Gedanke</span> <span class=\"Pr\" morph=\"Prim\">an</span> <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> <span class=\"Sb\" morph=\"SgFm\">Entbehrung</span>, <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> <span class=\"Sb\" morph=\"SgFm\">Welt</span> <span class=\"Pr\" morph=\"\">durch</span> <span class=\"Vb\" morph=\"SIn0\">sein</span>\n"); out.write("<span class=\"Vb\" morph=\"RtWeak\">Z&#xf6;gern</span> <span class=\"Vb\" morph=\"SCs13\">erleide</span>, derart <span class=\"Vb\" morph=\"SIp91\">waren</span> <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> Unbilden, <span class=\"Pn\" morph=\"Relt\">denen</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Pr\" morph=\"Prim\">zu</span> <span class=\"Vb\" morph=\"RtWeak\">steuern</span>,\n"); out.write("<span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> <span class=\"Sb\" morph=\"Pl\">Ungerechtigkeiten</span>, <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Vb\" morph=\"SCs93\">zurechtzubringen</span>, <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> Ungeb&#xfc;hr,\n"); out.write("<span class=\"Ar\" morph=\"DetmNomvSgMs\">der</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Vb\" morph=\"SCs93\">abzuhelfen</span>, <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> Mi&#xdf;br&#xe4;uche, <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> wiedergutzumachen,\n"); out.write("<span class=\"Aj\" morph=\"Root\">kurz</span>, <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> <span class=\"Sb\" morph=\"Pl\">Pflichten</span>, <span class=\"Pn\" morph=\"Relt\">denen</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Pr\" morph=\"Prim\">zu</span> <span class=\"Vb\" morph=\"RtWeak\">gen&#xfc;gen</span> <span class=\"Vb\" morph=\"SIp13\">gedachte</span>. <span class=\"Cj\" morph=\"\">Und</span> <span class=\"Un\" morph=\"\">so</span>, <span class=\"Un\" morph=\"\">ohne</span>\n"); out.write("irgendeinem <span class=\"Pr\" morph=\"Prim\">von</span> <span class=\"Pn\" morph=\"SgPersGenv3Ms\">seiner</span> Absicht <span class=\"Sb\" morph=\"SgFm\">Kunde</span> <span class=\"Pr\" morph=\"Prim\">zu</span> <span class=\"Vb\" morph=\"SIn0\">geben</span> <span class=\"Cj\" morph=\"\">und</span> <span class=\"Un\" morph=\"\">ohne</span> <span class=\"Cj\" morph=\"\">da&#xdf;</span>\n"); out.write("<span class=\"Pn\" morph=\"UndtNomv\">jemand</span> <span class=\"Pn\" morph=\"SgPersAccv3Ms\">ihn</span> <span class=\"Vb\" morph=\"SIp11\">sah</span>, bewehrte <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Pn\" morph=\"ReflSg3\">sich</span> <span class=\"Ar\" morph=\"UndtGenvSgMs\">eines</span> <span class=\"Nm\" morph=\"PersSurn\">Morgens</span> <span class=\"Pr\" morph=\"Prim\">vor</span> Anbruch <span class=\"Ar\" morph=\"DetmGenvSgMs\">des</span>\n"); out.write("<span class=\"Sb\" morph=\"SgGe\">Tages</span> - <span class=\"Pn\" morph=\"SgPersNomvNt3\">es</span> <span class=\"Vb\" morph=\"SIp11\">war</span> <span class=\"Ar\" morph=\"UndtGenvSgFm\">einer</span> <span class=\"Ar\" morph=\"DetmNomvSgMs\">der</span> <span class=\"Vb\" morph=\"SIn0\">hei&#xdf;en</span> Julitage - <span class=\"Pr\" morph=\"Prim\">mit</span> <span class=\"Pn\" morph=\"SgPersGenv3Ms\">seiner</span> <span class=\"Aj\" morph=\"Qant\">ganzen</span>\n"); out.write("<span class=\"Sb\" morph=\"SgFm\">R&#xfc;stung</span>, <span class=\"Vb\" morph=\"SIp11\">stieg</span> <span class=\"Pr\" morph=\"Prim\">auf</span> <span class=\"Ar\" morph=\"DetmDatvPl\">den</span> <span class=\"Nm\" morph=\"FmZool\">Rosinante</span>, <span class=\"Pr\" morph=\"\">nachdem</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Pn\" morph=\"SgPossDatvPl3\">seinen</span>\n"); out.write("zusammengeflickten Turnierhelm aufgesetzt, fa&#xdf;te <span class=\"Pn\" morph=\"SgPossNomvFm3\">seine</span> <span class=\"Sb\" morph=\"SgFm\">Tartsche</span>\n"); out.write("<span class=\"Pr\" morph=\"Prim\">in</span> <span class=\"Ar\" morph=\"DetmDatvPl\">den</span> <span class=\"Sb\" morph=\"SgMsBody\">Arm</span>, <span class=\"Vb\" morph=\"SIp11\">nahm</span> <span class=\"Pn\" morph=\"SgPossDatvPl3\">seinen</span> <span class=\"Sb\" morph=\"SgMs\">Speer</span> <span class=\"Cj\" morph=\"\">und</span> <span class=\"Vb\" morph=\"SIp11\">zog</span> <span class=\"Pr\" morph=\"\">durch</span> <span class=\"Ar\" morph=\"DetmNomvSgFm\">die</span> Hinterpforte\n"); out.write("<span class=\"Pn\" morph=\"SgPossGenvMs3\">seines</span> <span class=\"Sb\" morph=\"SgGe\">Hofes</span> <span class=\"Pr\" morph=\"\">hinaus</span> <span class=\"Pr\" morph=\"Shor\">aufs</span> <span class=\"Sb\" morph=\"SgNt\">Feld</span>, <span class=\"Pr\" morph=\"Prim\">mit</span> <span class=\"Aj\" morph=\"XC\">gewaltiger</span> <span class=\"Sb\" morph=\"SgFm\">Befriedigung</span> <span class=\"Cj\" morph=\"\">und</span>\n"); out.write("Herzensfreude <span class=\"Av\" morph=\"ModlAnct\">darob</span>, <span class=\"Pr\" morph=\"Prim\">mit</span> <span class=\"Ir\" morph=\"Prim\">wie</span> <span class=\"Nm\" morph=\"PersSurn\">gro&#xdf;er</span> <span class=\"Sb\" morph=\"SgFm\">Leichtigkeit</span> <span class=\"Pn\" morph=\"SgPersNomvMs3\">er</span> <span class=\"Vb\" morph=\"SIn0\">sein</span>\n"); out.write("<span class=\"Aj\" morph=\"XP\">l&#xf6;bliches</span> <span class=\"Vb\" morph=\"RtWeak\">Vorhaben</span> <span class=\"Vb\" morph=\"SCt93\">auszuf&#xfc;hren</span> <span class=\"Vb\" morph=\"SPa0\">begonnen</span>.\n"); out.write("</blockquote>\n"); out.write("<form action=\"servlet\" method=\"POST\" enctype=\"multipart/form-data\">\n"); out.write(" <input type = \"hidden\" name=\"view\" value=\"index2\" />\n"); out.write(" <table cellpadding=\"4\" border=\"0\">\n"); out.write(" <tr valign=\"top\">\n"); out.write(" <td colspan=\"5\"><strong>Source file to be uploaded: </strong>"); out.write(" <input name=\"infile\" type=\"file\" style=\"font-family: Courier, monospace\" "); out.write(" maxsize=\"256\" size=\"80\" value=\"" + infile + "\" />\n"); out.write(" </td>\n"); out.write(" </tr>\n"); out.write(" <tr valign=\"top\">\n"); out.write(" <td><strong>Source<br />Encoding</strong><br />\n"); out.write(" <select name=\"enc\" size=\"" + optEnc.length + "\">\n"); index = 0; while (index < optEnc.length) { out.write(" <option value=\"" + optEnc[index] + "\"" + (optEnc[index].equals(encoding) ? " selected" : "") + ">" + enEnc[index] + "</option>\n"); index ++; } // while index out.write(" </select>\n"); out.write(" </td>\n"); out.write(" <td><strong>Target<br />Format</strong><br />\n"); out.write(" <select name=\"format\" size=\"" + optFormat.length + "\">\n"); index = 0; while (index < optFormat.length) { out.write(" <option value=\"" + optFormat[index] + "\"" + (optFormat[index].equals(format) ? " selected" : "") + ">" + enFormat[index] + "</option>\n"); index ++; } // while index out.write(" </select>\n"); out.write(" </td>\n"); out.write(" <td><strong>Filter<br />to be used</strong><br />\n"); out.write(" <select name=\"filter\" size=\"" + optFilter.length + "\">\n"); index = 0; while (index < optFilter.length) { out.write(" <option value=\"" + optFilter[index] + "\"" + (optFilter[index].equals(filter) ? " selected" : "") + ">" + enFilter[index] + "</option>\n"); index ++; } // while index out.write(" </select>\n"); out.write(" </td>\n"); out.write(" <td><strong>Source<br />Grammar</strong><br />\n"); out.write(" <select name=\"grammar\" size=\""+ optGrammar.length + "\">\n"); index = 0; while (index < optGrammar.length) { out.write(" <option value=\"" + optGrammar[index] + "\"" + (optGrammar[index].equals(language) ? " selected" : "") + ">" + enGrammar[index] + "</option>\n"); index ++; } // while index out.write(" </select>\n"); out.write(" </td>\n"); out.write(" <td><strong>Strategy</strong><br />\n"); out.write(" <select name=\"strat\" size=\"" + optStrat.length + "\">\n"); index = 0; while (index < optStrat.length) { out.write(" <option value=\"" + optStrat[index] + "\"" + (optStrat[index].equals(strategy) ? " selected" : "") + ">" + enStrat[index] + "</option>\n"); index ++; } // while index out.write(" </select>\n"); out.write(" </td>\n"); out.write(" <td><input type=\"submit\" value=\"Submit\" /></td>\n"); out.write(" </tr>\n"); out.write(" </table>\n"); out.write("</form>\n"); basePage.writeAuxiliaryLinks(language, "main"); out.write("<h4>Applications of <em>QueueTransformer</em></h4>\n"); out.write("<table border=\"0\">\n"); out.write(" <tr><td>bibleref:</td><td>Table of Luther's pericopes - <a href=\"bibleref/luther_perikope.htm\">(original)</a> and with\n"); out.write(" <a href=\"bibleref/luther_perikope.html\">autolinked</a> bible references</td></tr>\n"); out.write(" <tr><td>&nbsp;</td><td><a href=\"bibleref/wiki_perikope.htm\">Table of pericopes</a> from de.wikipedia -\n"); out.write(" <a href=\"bibleref/wiki_perikope.html\">autolinked</a></td></tr>\n"); out.write(" <tr><td>konto:</td><td>Autolinked German bank ids with <a href=\"konto.html\">check/correction</a>\n"); out.write(" of account numbers nearby</td></tr>\n"); out.write(" <tr><td>number:</td><td>Parsing of German number words in <a href=\"number.html\">Genesis 5</a></td></tr>\n"); out.write(" <tr><td>wordtype:</td><td>Don Quixote <a href=\"wordtype/quixote0.html\">(original)</a>, with\n"); out.write(" <a href=\"queue.html\">green</a> uppercase words, and with <a href=\"wordtype.html\">colored word types</a>.</td></tr>\n"); out.write("</table>\n"); basePage.writeTrailer(language, "quest"); } catch (Exception exc) { log.error(exc.getMessage(), exc); } } // dialog //================ // Main method //================ /** Test driver * @param args language code: "en", "de" */ public static void main(String[] args) { IndexPage help = new IndexPage(); System.out.println("no messages"); } // main } // IndexPage
/* * $Id$ */ /* Copyright (c) 2000-2003 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.test; import java.io.*; import java.util.*; import java.net.*; import java.security.MessageDigest; import org.lockss.app.*; import org.lockss.config.Configuration; import org.lockss.daemon.*; import org.lockss.util.*; import org.lockss.plugin.*; import org.lockss.plugin.base.*; import org.lockss.state.*; import org.lockss.test.*; import java.math.BigInteger; /** * Minimal fully functional plugin capable of serving a little static content. */ public class StaticContentPlugin extends BasePlugin implements PluginTestable { static Logger log = Logger.getLogger("StaticContentPlugin"); Map cuMap = new HashMap(); public StaticContentPlugin() { } public String getVersion() { throw new UnsupportedOperationException("Not implemented"); } public String getPluginName() { return "Static Content"; } public List getSupportedTitles() { throw new UnsupportedOperationException("Not implemented"); } public List getLocalAuConfigDescrs() { return Collections.EMPTY_LIST; // throw new UnsupportedOperationException("Not implemented"); } protected ArchivalUnit createAu0(Configuration auConfig) throws ArchivalUnit.ConfigurationException { return new SAU(this); } public void registerArchivalUnit(ArchivalUnit au) { aus.add(au); } public void unregisterArchivalUnit(ArchivalUnit au) { aus.remove(au); } public class SAU extends BaseArchivalUnit { protected SAU(Plugin myPlugin) { super(myPlugin); } protected String makeName() { return "Static Content AU"; } protected String makeStartUrl() { throw new UnsupportedOperationException("Not Implemented"); } public CachedUrlSet makeCachedUrlSet( CachedUrlSetSpec cuss) { return new SCUS(this, cuss); } public CachedUrl makeCachedUrl(String url) { CachedUrl res = (CachedUrl)cuMap.get(url); log.debug("makeCachedUrl(" + url + ") = " + res); return (CachedUrl)cuMap.get(url); } public org.lockss.plugin.UrlCacher makeUrlCacher(String url) { throw new UnsupportedOperationException("Not implemented"); } public boolean shouldBeCached(String url) { return cuMap.containsKey(url); } public Collection getUrlStems() { throw new UnsupportedOperationException("Not implemented"); } public CachedUrlSet cachedUrlSetFactory(ArchivalUnit owner, CachedUrlSetSpec cuss) { throw new UnsupportedOperationException("Not implemented"); } public CachedUrl cachedUrlFactory(CachedUrlSet owner, String url) { throw new UnsupportedOperationException("Not implemented"); } public UrlCacher urlCacherFactory(CachedUrlSet owner, String url) { throw new UnsupportedOperationException("Not implemented"); } public String getManifestPage() { throw new UnsupportedOperationException("Not Implemented"); } public FilterRule getFilterRule(String mimeType) { throw new UnsupportedOperationException("Not implemented"); } /** * Create a CU with content and store it in AU * @param owner the CUS owner * @param url the url * @param type the type * @param contents the contents */ public void storeCachedUrl(CachedUrlSet owner, String url, String type, String contents) { SCU scu = new SCU(owner, url, type, contents); cuMap.put(scu.getUrl(), scu); } public void storeCachedUrl(String url, String type, String contents) { storeCachedUrl(null, url, type, contents); } public String toString() { return "[sau: " + cuMap + "]"; } protected CrawlRule makeRule() { throw new UnsupportedOperationException("Not implemented"); } /** * loadDefiningConfig * * @param config Configuration */ protected void loadAuConfigDescrs(Configuration config) { } public List<PermissionChecker> makePermissionCheckers() { throw new UnsupportedOperationException("Not implemented"); } public Collection<String> getStartUrls() { throw new UnsupportedOperationException("Not implemented"); } public int getRefetchDepth() { throw new UnsupportedOperationException("Not implemented"); } public LoginPageChecker getLoginPageChecker() { throw new UnsupportedOperationException("Not implemented"); } public String getCookiePolicy() { throw new UnsupportedOperationException("Not implemented"); } } public class SCU extends BaseCachedUrl { private String contents = null; private CIProperties props = new CIProperties(); public SCU(CachedUrlSet owner, String url) { super(null, url); } /** * Create a CachedUrl with content * @param owner the CUS owner * @param url the url * @param type the type * @param contents the contents */ public SCU(CachedUrlSet owner, String url, String type, String contents) { this(owner, url); setContents(contents); props.setProperty(CachedUrl.PROPERTY_CONTENT_TYPE, type); } private void setContents(String s) { contents = s; props.setProperty("Content-Length", ""+s.length()); } public String getUrl() { return url; } public boolean hasContent() { return contents != null; } public boolean isLeaf() { throw new UnsupportedOperationException("Not implemented"); } public InputStream getUnfilteredInputStream() { return new StringInputStream(contents); } public InputStream openForHashing() { return getUnfilteredInputStream(); } protected InputStream getFilteredStream() { throw new UnsupportedOperationException("Not implemented"); } public Reader openForReading() { throw new UnsupportedOperationException("Not implemented"); } public long getContentSize() { return contents == null ? 0 : contents.length(); } public CIProperties getProperties() { return props; } } class SCUS extends BaseCachedUrlSet { public SCUS(ArchivalUnit owner, CachedUrlSetSpec spec) { super(owner, spec); } public void storeActualHashDuration(long elapsed, Exception err) { throw new UnsupportedOperationException("Not implemented"); } public Iterator flatSetIterator() { throw new UnsupportedOperationException("Not implemented"); } public Iterator contentHashIterator() { throw new UnsupportedOperationException("Not implemented"); } public boolean isLeaf() { throw new UnsupportedOperationException("Not implemented"); } public CachedUrlSetHasher getContentHasher(MessageDigest digest) { throw new UnsupportedOperationException("Not implemented"); } public CachedUrlSetHasher getNameHasher(MessageDigest digest) { throw new UnsupportedOperationException("Not implemented"); } public long estimatedHashDuration() { return 1000; } } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Red Hat, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.node_monitors; import hudson.Extension; import hudson.model.Computer; import hudson.remoting.Callable; import jenkins.security.MasterToSlaveCallable; import net.sf.json.JSONObject; import org.kohsuke.stapler.StaplerRequest; import java.io.IOException; import java.io.Serializable; import java.util.Map; import java.util.Map.Entry; import java.util.logging.Logger; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.export.ExportedBean; /** * Monitors the round-trip response time to this agent. * * @author Kohsuke Kawaguchi */ public class ResponseTimeMonitor extends NodeMonitor { @Extension public static final AbstractNodeMonitorDescriptor<Data> DESCRIPTOR = new AbstractAsyncNodeMonitorDescriptor<Data>() { @Override protected Callable<Data,IOException> createCallable(Computer c) { return new Step1(get(c)); } @Override protected Map<Computer, Data> monitor() throws InterruptedException { Result<Data> base = monitorDetailed(); Map<Computer, Data> monitoringData = base.getMonitoringData(); for (Entry<Computer, Data> e : monitoringData.entrySet()) { Computer c = e.getKey(); Data d = e.getValue(); if (base.getSkipped().contains(c)) { assert d == null; continue; } if (d ==null) { // if we failed to monitor, put in the special value that indicates a failure e.setValue(d=new Data(get(c),-1L)); } if(d.hasTooManyTimeouts() && !isIgnored()) { // unlike other monitors whose failure still allow us to communicate with the agent, // the failure in this monitor indicates that we are just unable to make any requests // to this agent. So we should severe the connection, as opposed to marking it temporarily // off line, which still keeps the underlying channel open. c.disconnect(d); LOGGER.warning(Messages.ResponseTimeMonitor_MarkedOffline(c.getName())); } } return monitoringData; } public String getDisplayName() { return Messages.ResponseTimeMonitor_DisplayName(); } @Override public NodeMonitor newInstance(StaplerRequest req, JSONObject formData) throws FormException { return new ResponseTimeMonitor(); } }; private static final class Step1 extends MasterToSlaveCallable<Data,IOException> { private Data cur; private Step1(Data cur) { this.cur = cur; } public Data call() { // this method must be being invoked locally, which means the roundtrip time is zero and zero forever return new Data(cur,0); } private Object writeReplace() { return new Step2(cur); } private static final long serialVersionUID = 1L; } private static final class Step2 extends MasterToSlaveCallable<Step3,IOException> { private final Data cur; private final long start = System.currentTimeMillis(); public Step2(Data cur) { this.cur = cur; } public Step3 call() { // this method must be being invoked locally, which means the roundtrip time is zero and zero forever return new Step3(cur,start); } private static final long serialVersionUID = 1L; } private static final class Step3 implements Serializable { private final Data cur; private final long start; private Step3(Data cur, long start) { this.cur = cur; this.start = start; } private Object readResolve() { long end = System.currentTimeMillis(); return new Data(cur,(end-start)); } private static final long serialVersionUID = 1L; } /** * Immutable representation of the monitoring data. */ @ExportedBean public static final class Data extends MonitorOfflineCause implements Serializable { /** * Record of the past 5 times. -1 if time out. Otherwise in milliseconds. * Old ones first. */ private final long[] past5; private Data(Data old, long newDataPoint) { if(old==null) past5 = new long[] {newDataPoint}; else { past5 = new long[Math.min(5,old.past5.length+1)]; int copyLen = past5.length - 1; System.arraycopy(old.past5, old.past5.length-copyLen, this.past5, 0, copyLen); past5[past5.length-1] = newDataPoint; } } /** * Computes the recurrence of the time out */ private int failureCount() { int cnt=0; for(int i=past5.length-1; i>=0 && past5[i]<0; i--, cnt++) ; return cnt; } /** * Computes the average response time, by taking the time out into account. */ @Exported public long getAverage() { long total=0; for (long l : past5) { if(l<0) total += TIMEOUT; else total += l; } return total/past5.length; } public boolean hasTooManyTimeouts() { return failureCount()>=5; } /** * String rendering of the data */ @Override public String toString() { // StringBuilder buf = new StringBuilder(); // for (long l : past5) { // if(buf.length()>0) buf.append(','); // buf.append(l); // } // return buf.toString(); int fc = failureCount(); if(fc>0) return Messages.ResponseTimeMonitor_TimeOut(fc); return getAverage()+"ms"; } @Override public Class<? extends NodeMonitor> getTrigger() { return ResponseTimeMonitor.class; } private static final long serialVersionUID = 1L; } /** * Time out interval in milliseconds. */ private static final long TIMEOUT = 5000; private static final Logger LOGGER = Logger.getLogger(ResponseTimeMonitor.class.getName()); }
package liquibase.integration.spring; import liquibase.Contexts; import liquibase.LabelExpression; import liquibase.Liquibase; import liquibase.configuration.ConfigurationProperty; import liquibase.configuration.GlobalConfiguration; import liquibase.configuration.LiquibaseConfiguration; import liquibase.database.Database; import liquibase.database.DatabaseConnection; import liquibase.database.DatabaseFactory; import liquibase.database.OfflineConnection; import liquibase.database.jvm.JdbcConnection; import liquibase.exception.DatabaseException; import liquibase.exception.LiquibaseException; import liquibase.logging.LogFactory; import liquibase.logging.Logger; import liquibase.resource.ClassLoaderResourceAccessor; import liquibase.resource.ResourceAccessor; import liquibase.util.StringUtils; import liquibase.util.file.FilenameUtils; import org.springframework.beans.factory.BeanNameAware; import org.springframework.beans.factory.InitializingBean; import org.springframework.context.ResourceLoaderAware; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.core.io.support.ResourcePatternUtils; import javax.sql.DataSource; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.net.URLConnection; import java.sql.Connection; import java.sql.SQLException; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.Manifest; /** * A Spring-ified wrapper for Liquibase. * <p/> * Example Configuration: * <p/> * <p/> * This Spring configuration example will cause liquibase to run automatically when the Spring context is initialized. It will load * <code>db-changelog.xml</code> from the classpath and apply it against <code>myDataSource</code>. * <p/> * <p/> * * <pre> * &lt;bean id=&quot;myLiquibase&quot; * class=&quot;liquibase.spring.SpringLiquibase&quot; * &gt; * * &lt;property name=&quot;dataSource&quot; ref=&quot;myDataSource&quot; /&gt; * * &lt;property name=&quot;changeLog&quot; value=&quot;classpath:db-changelog.xml&quot; /&gt; * * &lt;/bean&gt; * * </pre> * * @author Rob Schoening */ public class SpringLiquibase implements InitializingBean, BeanNameAware, ResourceLoaderAware { public class SpringResourceOpener extends ClassLoaderResourceAccessor { private String parentFile; public SpringResourceOpener(String parentFile) { this.parentFile = parentFile; } @Override protected void init() { super.init(); try { Resource[] resources = ResourcePatternUtils.getResourcePatternResolver(getResourceLoader()).getResources(""); if (resources.length == 0 || resources.length == 1 && !resources[0].exists()) { //sometimes not able to look up by empty string, try all the liquibase packages Set<String> liquibasePackages = new HashSet<String>(); for (Resource manifest : ResourcePatternUtils.getResourcePatternResolver(getResourceLoader()).getResources("META-INF/MANIFEST.MF")) { if (manifest.exists()) { InputStream inputStream = null; try { inputStream = manifest.getInputStream(); Manifest manifestObj = new Manifest(inputStream); Attributes attributes = manifestObj.getAttributes("Liquibase-Package"); if (attributes != null) { for (Object attr : attributes.values()) { String packages = "\\s*,\\s*"; ; for (String fullPackage : attr.toString().split(packages)) { liquibasePackages.add(fullPackage.split("\\.")[0]); } } } } finally { if (inputStream != null) { inputStream.close(); } } } } if (liquibasePackages.size() == 0) { LogFactory.getInstance().getLog().warning("No Liquibase-Packages entry found in MANIFEST.MF. Using fallback of entire 'liquibase' package"); liquibasePackages.add("liquibase"); } for (String foundPackage : liquibasePackages) { resources = ResourcePatternUtils.getResourcePatternResolver(getResourceLoader()).getResources(foundPackage); for (Resource res : resources) { addRootPath(res.getURL()); } } } else { for (Resource res : resources) { addRootPath(res.getURL()); } } } catch (IOException e) { LogFactory.getInstance().getLog().warning("Error initializing SpringLiquibase", e); } } @Override public Set<String> list(String relativeTo, String path, boolean includeFiles, boolean includeDirectories, boolean recursive) throws IOException { if (path == null) { return null; } Set<String> returnSet = new HashSet<String>(); String tempFile = FilenameUtils.concat(FilenameUtils.getFullPath(relativeTo), path); Resource[] resources = ResourcePatternUtils.getResourcePatternResolver(getResourceLoader()).getResources(adjustClasspath(tempFile)); for (Resource res : resources) { Set<String> list = super.list(null, res.getURL().toExternalForm(), includeFiles, includeDirectories, recursive); if (list != null) { returnSet.addAll(list); } } return returnSet; } @Override public Set<InputStream> getResourcesAsStream(String path) throws IOException { if (path == null) { return null; } Set<InputStream> returnSet = new HashSet<InputStream>(); Resource[] resources = ResourcePatternUtils.getResourcePatternResolver(getResourceLoader()).getResources(adjustClasspath(path)); if (resources == null || resources.length == 0) { return null; } for (Resource resource : resources) { LogFactory.getInstance().getLog().debug("Opening "+resource.getURL().toExternalForm()+" as "+path); URLConnection connection = resource.getURL().openConnection(); connection.setUseCaches(false); returnSet.add(connection.getInputStream()); } return returnSet; } public Resource getResource(String file) { return getResourceLoader().getResource(adjustClasspath(file)); } private String adjustClasspath(String file) { if (file == null) { return null; } return isPrefixPresent(parentFile) && !isPrefixPresent(file) ? ResourceLoader.CLASSPATH_URL_PREFIX + file : file; } public boolean isPrefixPresent(String file) { if (file == null) { return false; } if (file.startsWith("classpath") || file.startsWith("file:") || file.startsWith("url:")) { return true; } return false; } @Override public ClassLoader toClassLoader() { return getResourceLoader().getClassLoader(); } } protected String beanName; protected ResourceLoader resourceLoader; protected DataSource dataSource; protected final Logger log = LogFactory.getLogger(SpringLiquibase.class.getName()); protected String changeLog; protected String contexts; protected String labels; protected String tag; protected Map<String, String> parameters; protected String defaultSchema; protected boolean dropFirst = false; protected boolean shouldRun = true; protected File rollbackFile; /** * Ignores classpath prefix during changeset comparison. * This is particularly useful if Liquibase is run in different ways. * * For instance, if Maven plugin is used to run changesets, as in: * <code> * &lt;configuration&gt; * ... * &lt;changeLogFile&gt;path/to/changelog&lt;/changeLogFile&gt; * &lt;/configuration&gt; * </code> * * And {@link SpringLiquibase} is configured like: * <code> * SpringLiquibase springLiquibase = new SpringLiquibase(); * springLiquibase.setChangeLog("classpath:path/to/changelog"); * </code> * * or, in equivalent XML configuration: * <code> * &lt;bean id="springLiquibase" class="liquibase.integration.spring.SpringLiquibase"&gt; * &lt;property name="changeLog" value="path/to/changelog" /&gt; * &lt;/bean&gt; * </code> * * {@link Liquibase#listUnrunChangeSets(Contexts, )} will * always, by default, return changesets, regardless of their * execution by Maven. * Maven-executed changeset path name are not be prepended by * "classpath:" whereas the ones parsed via SpringLiquibase are. * * To avoid this issue, just set ignoreClasspathPrefix to true. */ private boolean ignoreClasspathPrefix = true; public SpringLiquibase() { super(); } public boolean isDropFirst() { return dropFirst; } public void setDropFirst(boolean dropFirst) { this.dropFirst = dropFirst; } public void setShouldRun(boolean shouldRun) { this.shouldRun = shouldRun; } public String getDatabaseProductName() throws DatabaseException { Connection connection = null; Database database = null; String name = "unknown"; try { connection = getDataSource().getConnection(); database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(connection)); name = database.getDatabaseProductName(); } catch (SQLException e) { throw new DatabaseException(e); } finally { if (database != null) { database.close(); } else if (connection != null) { try { if (!connection.getAutoCommit()) { connection.rollback(); } connection.close(); } catch (Exception e) { log.warning("problem closing connection", e); } } } return name; } /** * The DataSource that liquibase will use to perform the migration. * * @return */ public DataSource getDataSource() { return dataSource; } /** * The DataSource that liquibase will use to perform the migration. */ public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } /** * Returns a Resource that is able to resolve to a file or classpath resource. * * @return */ public String getChangeLog() { return changeLog; } /** * Sets a Spring Resource that is able to resolve to a file or classpath resource. * An example might be <code>classpath:db-changelog.xml</code>. */ public void setChangeLog(String dataModel) { this.changeLog = dataModel; } public String getContexts() { return contexts; } public void setContexts(String contexts) { this.contexts = contexts; } public String getLabels() { return labels; } public void setLabels(String labels) { this.labels = labels; } public String getTag() { return tag; } public void setTag(String tag) { this.tag = tag; } public String getDefaultSchema() { return defaultSchema; } public void setDefaultSchema(String defaultSchema) { this.defaultSchema = defaultSchema; } /** * Executed automatically when the bean is initialized. */ @Override public void afterPropertiesSet() throws LiquibaseException { ConfigurationProperty shouldRunProperty = LiquibaseConfiguration.getInstance().getProperty(GlobalConfiguration.class, GlobalConfiguration.SHOULD_RUN); if (!shouldRunProperty.getValue(Boolean.class)) { LogFactory.getLogger().info("Liquibase did not run because "+ LiquibaseConfiguration.getInstance().describeValueLookupLogic(shouldRunProperty)+" was set to false"); return; } if (!shouldRun) { LogFactory.getLogger().info("Liquibase did not run because 'shouldRun' " + "property was set to false on " + getBeanName() + " Liquibase Spring bean."); return; } Connection c = null; Liquibase liquibase = null; try { c = getDataSource().getConnection(); liquibase = createLiquibase(c); generateRollbackFile(liquibase); performUpdate(liquibase); } catch (SQLException e) { throw new DatabaseException(e); } finally { Database database = null; if (liquibase != null) { database = liquibase.getDatabase(); } if (database != null) { database.close(); } } } private void generateRollbackFile(Liquibase liquibase) throws LiquibaseException { if (rollbackFile != null) { FileWriter output = null; try { output = new FileWriter(rollbackFile); if (tag != null) { liquibase.futureRollbackSQL(tag, new Contexts(getContexts()), new LabelExpression(getLabels()), output); } else { liquibase.futureRollbackSQL(new Contexts(getContexts()), new LabelExpression(getLabels()), output); } } catch (IOException e) { throw new LiquibaseException("Unable to generate rollback file.", e); } finally { try { if (output != null) { output.close(); } } catch (IOException e) { log.severe("Error closing output", e); } } } } protected void performUpdate(Liquibase liquibase) throws LiquibaseException { if (tag != null) { liquibase.update(tag, new Contexts(getContexts()), new LabelExpression(getLabels())); } else { liquibase.update(new Contexts(getContexts()), new LabelExpression(getLabels())); } } protected Liquibase createLiquibase(Connection c) throws LiquibaseException { SpringResourceOpener resourceAccessor = createResourceOpener(); Liquibase liquibase = new Liquibase(getChangeLog(), resourceAccessor, createDatabase(c, resourceAccessor)); liquibase.setIgnoreClasspathPrefix(isIgnoreClasspathPrefix()); if (parameters != null) { for (Map.Entry<String, String> entry : parameters.entrySet()) { liquibase.setChangeLogParameter(entry.getKey(), entry.getValue()); } } if (isDropFirst()) { liquibase.dropAll(); } return liquibase; } /** * Subclasses may override this method add change some database settings such as * default schema before returning the database object. * * @param c * @return a Database implementation retrieved from the {@link DatabaseFactory}. * @throws DatabaseException */ protected Database createDatabase(Connection c, ResourceAccessor resourceAccessor) throws DatabaseException { DatabaseConnection liquibaseConnection; if (c == null) { log.warning("Null connection returned by liquibase datasource. Using offline unknown database"); liquibaseConnection = new OfflineConnection("offline:unknown", resourceAccessor); } else { liquibaseConnection = new JdbcConnection(c); } Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(liquibaseConnection); if (StringUtils.trimToNull(this.defaultSchema) != null) { database.setDefaultSchemaName(this.defaultSchema); } return database; } public void setChangeLogParameters(Map<String, String> parameters) { this.parameters = parameters; } /** * Create a new resourceOpener. */ protected SpringResourceOpener createResourceOpener() { return new SpringResourceOpener(getChangeLog()); } /** * Spring sets this automatically to the instance's configured bean name. */ @Override public void setBeanName(String name) { this.beanName = name; } /** * Gets the Spring-name of this instance. * * @return */ public String getBeanName() { return beanName; } @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } public ResourceLoader getResourceLoader() { return resourceLoader; } public void setRollbackFile(File rollbackFile) { this.rollbackFile = rollbackFile; } public boolean isIgnoreClasspathPrefix() { return ignoreClasspathPrefix; } public void setIgnoreClasspathPrefix(boolean ignoreClasspathPrefix) { this.ignoreClasspathPrefix = ignoreClasspathPrefix; } @Override public String toString() { return getClass().getName() + "(" + this.getResourceLoader().toString() + ")"; } }
// $ANTLR 3.1.3 Mar 17, 2009 19:23:44 WIG.g 2009-09-02 22:58:08 //-------------------------------------- // UTGB Project // // WIGParser.java // Since: Aug 28, 2009 // //-------------------------------------- package org.utgenome.format.wig; import org.antlr.runtime.*; import java.util.Stack; import java.util.List; import java.util.ArrayList; import org.antlr.runtime.tree.*; public class WIGParser extends Parser { public static final String[] tokenNames = new String[] { "<invalid>", "<EOR>", "<DOWN>", "<UP>", "Description", "Name", "Value", "Attribute", "Eq", "Dot", "Digit", "HexDigit", "UnicodeChar", "EscapeSequence", "StringChar", "Int", "Frac", "Exp", "WhiteSpace", "StringChars", "String", "Integer", "Double", "Number", "QName" }; public static final int Digit=10; public static final int Frac=16; public static final int HexDigit=11; public static final int Eq=8; public static final int Exp=17; public static final int Int=15; public static final int Description=4; public static final int UnicodeChar=12; public static final int StringChar=14; public static final int Name=5; public static final int String=20; public static final int Attribute=7; public static final int Dot=9; public static final int StringChars=19; public static final int EscapeSequence=13; public static final int QName=24; public static final int EOF=-1; public static final int Integer=21; public static final int Value=6; public static final int Double=22; public static final int WhiteSpace=18; public static final int Number=23; // delegates // delegators public WIGParser(TokenStream input) { this(input, new RecognizerSharedState()); } public WIGParser(TokenStream input, RecognizerSharedState state) { super(input, state); } protected TreeAdaptor adaptor = new CommonTreeAdaptor(); public void setTreeAdaptor(TreeAdaptor adaptor) { this.adaptor = adaptor; } public TreeAdaptor getTreeAdaptor() { return adaptor; } public String[] getTokenNames() { return WIGParser.tokenNames; } public String getGrammarFileName() { return "WIG.g"; } public static class description_return extends ParserRuleReturnScope { Object tree; public Object getTree() { return tree; } }; // $ANTLR start "description" // WIG.g:101:1: description : descriptionName ( attribute )* -> ^( Description descriptionName ( attribute )* ) ; public final WIGParser.description_return description() throws RecognitionException { WIGParser.description_return retval = new WIGParser.description_return(); retval.start = input.LT(1); Object root_0 = null; WIGParser.descriptionName_return descriptionName1 = null; WIGParser.attribute_return attribute2 = null; RewriteRuleSubtreeStream stream_attribute=new RewriteRuleSubtreeStream(adaptor,"rule attribute"); RewriteRuleSubtreeStream stream_descriptionName=new RewriteRuleSubtreeStream(adaptor,"rule descriptionName"); try { // WIG.g:101:12: ( descriptionName ( attribute )* -> ^( Description descriptionName ( attribute )* ) ) // WIG.g:101:14: descriptionName ( attribute )* { pushFollow(FOLLOW_descriptionName_in_description424); descriptionName1=descriptionName(); state._fsp--; stream_descriptionName.add(descriptionName1.getTree()); // WIG.g:101:30: ( attribute )* loop1: do { int alt1=2; int LA1_0 = input.LA(1); if ( (LA1_0==QName) ) { alt1=1; } switch (alt1) { case 1 : // WIG.g:101:30: attribute { pushFollow(FOLLOW_attribute_in_description426); attribute2=attribute(); state._fsp--; stream_attribute.add(attribute2.getTree()); } break; default : break loop1; } } while (true); // AST REWRITE // elements: attribute, descriptionName // token labels: // rule labels: retval // token list labels: // rule list labels: // wildcard labels: retval.tree = root_0; RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); root_0 = (Object)adaptor.nil(); // 101:41: -> ^( Description descriptionName ( attribute )* ) { // WIG.g:101:44: ^( Description descriptionName ( attribute )* ) { Object root_1 = (Object)adaptor.nil(); root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(Description, "Description"), root_1); adaptor.addChild(root_1, stream_descriptionName.nextTree()); // WIG.g:101:74: ( attribute )* while ( stream_attribute.hasNext() ) { adaptor.addChild(root_1, stream_attribute.nextTree()); } stream_attribute.reset(); adaptor.addChild(root_0, root_1); } } retval.tree = root_0; } retval.stop = input.LT(-1); retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } finally { } return retval; } // $ANTLR end "description" public static class descriptionName_return extends ParserRuleReturnScope { Object tree; public Object getTree() { return tree; } }; // $ANTLR start "descriptionName" // WIG.g:104:1: descriptionName : QName -> Name[$QName.text] ; public final WIGParser.descriptionName_return descriptionName() throws RecognitionException { WIGParser.descriptionName_return retval = new WIGParser.descriptionName_return(); retval.start = input.LT(1); Object root_0 = null; Token QName3=null; Object QName3_tree=null; RewriteRuleTokenStream stream_QName=new RewriteRuleTokenStream(adaptor,"token QName"); try { // WIG.g:104:16: ( QName -> Name[$QName.text] ) // WIG.g:104:18: QName { QName3=(Token)match(input,QName,FOLLOW_QName_in_descriptionName450); stream_QName.add(QName3); // AST REWRITE // elements: // token labels: // rule labels: retval // token list labels: // rule list labels: // wildcard labels: retval.tree = root_0; RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); root_0 = (Object)adaptor.nil(); // 104:24: -> Name[$QName.text] { adaptor.addChild(root_0, (Object)adaptor.create(Name, (QName3!=null?QName3.getText():null))); } retval.tree = root_0; } retval.stop = input.LT(-1); retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } finally { } return retval; } // $ANTLR end "descriptionName" public static class attribute_return extends ParserRuleReturnScope { Object tree; public Object getTree() { return tree; } }; // $ANTLR start "attribute" // WIG.g:106:1: attribute : QName Eq attributeValue -> ^( Attribute Name[$QName.text] attributeValue ) ; public final WIGParser.attribute_return attribute() throws RecognitionException { WIGParser.attribute_return retval = new WIGParser.attribute_return(); retval.start = input.LT(1); Object root_0 = null; Token QName4=null; Token Eq5=null; WIGParser.attributeValue_return attributeValue6 = null; Object QName4_tree=null; Object Eq5_tree=null; RewriteRuleTokenStream stream_QName=new RewriteRuleTokenStream(adaptor,"token QName"); RewriteRuleTokenStream stream_Eq=new RewriteRuleTokenStream(adaptor,"token Eq"); RewriteRuleSubtreeStream stream_attributeValue=new RewriteRuleSubtreeStream(adaptor,"rule attributeValue"); try { // WIG.g:106:10: ( QName Eq attributeValue -> ^( Attribute Name[$QName.text] attributeValue ) ) // WIG.g:106:12: QName Eq attributeValue { QName4=(Token)match(input,QName,FOLLOW_QName_in_attribute464); stream_QName.add(QName4); Eq5=(Token)match(input,Eq,FOLLOW_Eq_in_attribute466); stream_Eq.add(Eq5); pushFollow(FOLLOW_attributeValue_in_attribute468); attributeValue6=attributeValue(); state._fsp--; stream_attributeValue.add(attributeValue6.getTree()); // AST REWRITE // elements: attributeValue // token labels: // rule labels: retval // token list labels: // rule list labels: // wildcard labels: retval.tree = root_0; RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); root_0 = (Object)adaptor.nil(); // 106:36: -> ^( Attribute Name[$QName.text] attributeValue ) { // WIG.g:106:39: ^( Attribute Name[$QName.text] attributeValue ) { Object root_1 = (Object)adaptor.nil(); root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(Attribute, "Attribute"), root_1); adaptor.addChild(root_1, (Object)adaptor.create(Name, (QName4!=null?QName4.getText():null))); adaptor.addChild(root_1, stream_attributeValue.nextTree()); adaptor.addChild(root_0, root_1); } } retval.tree = root_0; } retval.stop = input.LT(-1); retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } finally { } return retval; } // $ANTLR end "attribute" public static class attributeValue_return extends ParserRuleReturnScope { Object tree; public Object getTree() { return tree; } }; // $ANTLR start "attributeValue" // WIG.g:108:1: attributeValue : ( QName | String | Number ) -> Value[$attributeValue.text] ; public final WIGParser.attributeValue_return attributeValue() throws RecognitionException { WIGParser.attributeValue_return retval = new WIGParser.attributeValue_return(); retval.start = input.LT(1); Object root_0 = null; Token QName7=null; Token String8=null; Token Number9=null; Object QName7_tree=null; Object String8_tree=null; Object Number9_tree=null; RewriteRuleTokenStream stream_String=new RewriteRuleTokenStream(adaptor,"token String"); RewriteRuleTokenStream stream_Number=new RewriteRuleTokenStream(adaptor,"token Number"); RewriteRuleTokenStream stream_QName=new RewriteRuleTokenStream(adaptor,"token QName"); try { // WIG.g:108:15: ( ( QName | String | Number ) -> Value[$attributeValue.text] ) // WIG.g:108:17: ( QName | String | Number ) { // WIG.g:108:17: ( QName | String | Number ) int alt2=3; switch ( input.LA(1) ) { case QName: { alt2=1; } break; case String: { alt2=2; } break; case Number: { alt2=3; } break; default: NoViableAltException nvae = new NoViableAltException("", 2, 0, input); throw nvae; } switch (alt2) { case 1 : // WIG.g:108:18: QName { QName7=(Token)match(input,QName,FOLLOW_QName_in_attributeValue487); stream_QName.add(QName7); } break; case 2 : // WIG.g:108:26: String { String8=(Token)match(input,String,FOLLOW_String_in_attributeValue491); stream_String.add(String8); } break; case 3 : // WIG.g:108:35: Number { Number9=(Token)match(input,Number,FOLLOW_Number_in_attributeValue495); stream_Number.add(Number9); } break; } // AST REWRITE // elements: // token labels: // rule labels: retval // token list labels: // rule list labels: // wildcard labels: retval.tree = root_0; RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null); root_0 = (Object)adaptor.nil(); // 108:43: -> Value[$attributeValue.text] { adaptor.addChild(root_0, (Object)adaptor.create(Value, input.toString(retval.start,input.LT(-1)))); } retval.tree = root_0; } retval.stop = input.LT(-1); retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } finally { } return retval; } // $ANTLR end "attributeValue" // Delegated rules public static final BitSet FOLLOW_descriptionName_in_description424 = new BitSet(new long[]{0x0000000001000002L}); public static final BitSet FOLLOW_attribute_in_description426 = new BitSet(new long[]{0x0000000001000002L}); public static final BitSet FOLLOW_QName_in_descriptionName450 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_QName_in_attribute464 = new BitSet(new long[]{0x0000000000000100L}); public static final BitSet FOLLOW_Eq_in_attribute466 = new BitSet(new long[]{0x0000000001900000L}); public static final BitSet FOLLOW_attributeValue_in_attribute468 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_QName_in_attributeValue487 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_String_in_attributeValue491 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_Number_in_attributeValue495 = new BitSet(new long[]{0x0000000000000002L}); }
/* * Copyright (c) 2001, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.tools.javac.jvm; import java.util.*; import com.sun.tools.javac.tree.*; import com.sun.tools.javac.util.*; import com.sun.tools.javac.util.List; import com.sun.tools.javac.tree.JCTree.*; import com.sun.tools.javac.tree.EndPosTable; /** This class contains the CharacterRangeTable for some method * and the hashtable for mapping trees or lists of trees to their * ending positions. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own risk. * This code and its internal interfaces are subject to change or * deletion without notice.</b> */ public class CRTable implements CRTFlags { private final boolean crtDebug = false; /** The list of CRTable entries. */ private ListBuffer<CRTEntry> entries = new ListBuffer<CRTEntry>(); /** The hashtable for source positions. */ private Map<Object,SourceRange> positions = new HashMap<Object,SourceRange>(); /** The object for ending positions stored in the parser. */ private EndPosTable endPosTable; /** The tree of the method this table is intended for. * We should traverse this tree to get source ranges. */ JCTree.JCMethodDecl methodTree; /** Constructor */ public CRTable(JCTree.JCMethodDecl tree, EndPosTable endPosTable) { this.methodTree = tree; this.endPosTable = endPosTable; } /** Create a new CRTEntry and add it to the entries. * @param tree The tree or the list of trees for which * we are storing the code pointers. * @param flags The set of flags designating type of the entry. * @param startPc The starting code position. * @param endPc The ending code position. */ public void put(Object tree, int flags, int startPc, int endPc) { entries.append(new CRTEntry(tree, flags, startPc, endPc)); } /** Compute source positions and write CRT to the databuf. * @param databuf The buffer to write bytecodes to. */ public int writeCRT(ByteBuffer databuf, Position.LineMap lineMap, Log log) { int crtEntries = 0; // compute source positions for the method new SourceComputer().csp(methodTree); for (List<CRTEntry> l = entries.toList(); l.nonEmpty(); l = l.tail) { CRTEntry entry = l.head; // eliminate entries that do not produce bytecodes: // for example, empty blocks and statements if (entry.startPc == entry.endPc) continue; SourceRange pos = positions.get(entry.tree); Assert.checkNonNull(pos, "CRT: tree source positions are undefined"); if ((pos.startPos == Position.NOPOS) || (pos.endPos == Position.NOPOS)) continue; if (crtDebug) { System.out.println("Tree: " + entry.tree + ", type:" + getTypes(entry.flags)); System.out.print("Start: pos = " + pos.startPos + ", pc = " + entry.startPc); } // encode startPos into line/column representation int startPos = encodePosition(pos.startPos, lineMap, log); if (startPos == Position.NOPOS) continue; if (crtDebug) { System.out.print("End: pos = " + pos.endPos + ", pc = " + (entry.endPc - 1)); } // encode endPos into line/column representation int endPos = encodePosition(pos.endPos, lineMap, log); if (endPos == Position.NOPOS) continue; // write attribute databuf.appendChar(entry.startPc); // 'endPc - 1' because endPc actually points to start of the next command databuf.appendChar(entry.endPc - 1); databuf.appendInt(startPos); databuf.appendInt(endPos); databuf.appendChar(entry.flags); crtEntries++; } return crtEntries; } /** Return the number of the entries. */ public int length() { return entries.length(); } /** Return string describing flags enabled. */ private String getTypes(int flags) { String types = ""; if ((flags & CRT_STATEMENT) != 0) types += " CRT_STATEMENT"; if ((flags & CRT_BLOCK) != 0) types += " CRT_BLOCK"; if ((flags & CRT_ASSIGNMENT) != 0) types += " CRT_ASSIGNMENT"; if ((flags & CRT_FLOW_CONTROLLER) != 0) types += " CRT_FLOW_CONTROLLER"; if ((flags & CRT_FLOW_TARGET) != 0) types += " CRT_FLOW_TARGET"; if ((flags & CRT_INVOKE) != 0) types += " CRT_INVOKE"; if ((flags & CRT_CREATE) != 0) types += " CRT_CREATE"; if ((flags & CRT_BRANCH_TRUE) != 0) types += " CRT_BRANCH_TRUE"; if ((flags & CRT_BRANCH_FALSE) != 0) types += " CRT_BRANCH_FALSE"; return types; } /** Source file positions in CRT are integers in the format: * {@literal line-number << LINESHIFT + column-number } */ private int encodePosition(int pos, Position.LineMap lineMap, Log log) { int line = lineMap.getLineNumber(pos); int col = lineMap.getColumnNumber(pos); int new_pos = Position.encodePosition(line, col); if (crtDebug) { System.out.println(", line = " + line + ", column = " + col + ", new_pos = " + new_pos); } if (new_pos == Position.NOPOS) log.warning(pos, "position.overflow", line); return new_pos; } /* ************************************************************************ * Traversal methods *************************************************************************/ /** * This class contains methods to compute source positions for trees. * Extends Tree.Visitor to traverse the abstract syntax tree. */ class SourceComputer extends JCTree.Visitor { /** The result of the tree traversal methods. */ SourceRange result; /** Visitor method: compute source positions for a single node. */ public SourceRange csp(JCTree tree) { if (tree == null) return null; tree.accept(this); if (result != null) { positions.put(tree, result); } return result; } /** Visitor method: compute source positions for a list of nodes. */ public SourceRange csp(List<? extends JCTree> trees) { if ((trees == null) || !(trees.nonEmpty())) return null; SourceRange list_sr = new SourceRange(); for (List<? extends JCTree> l = trees; l.nonEmpty(); l = l.tail) { list_sr.mergeWith(csp(l.head)); } positions.put(trees, list_sr); return list_sr; } /** Visitor method: compute source positions for * a list of case blocks of switch statements. */ public SourceRange cspCases(List<JCCase> trees) { if ((trees == null) || !(trees.nonEmpty())) return null; SourceRange list_sr = new SourceRange(); for (List<JCCase> l = trees; l.nonEmpty(); l = l.tail) { list_sr.mergeWith(csp(l.head)); } positions.put(trees, list_sr); return list_sr; } /** Visitor method: compute source positions for * a list of catch clauses in try statements. */ public SourceRange cspCatchers(List<JCCatch> trees) { if ((trees == null) || !(trees.nonEmpty())) return null; SourceRange list_sr = new SourceRange(); for (List<JCCatch> l = trees; l.nonEmpty(); l = l.tail) { list_sr.mergeWith(csp(l.head)); } positions.put(trees, list_sr); return list_sr; } public void visitMethodDef(JCMethodDecl tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.body)); result = sr; } public void visitVarDef(JCVariableDecl tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); csp(tree.vartype); sr.mergeWith(csp(tree.init)); result = sr; } public void visitSkip(JCSkip tree) { // endPos is the same as startPos for the empty statement SourceRange sr = new SourceRange(startPos(tree), startPos(tree)); result = sr; } public void visitBlock(JCBlock tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); csp(tree.stats); // doesn't compare because block's ending position is defined result = sr; } public void visitDoLoop(JCDoWhileLoop tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.body)); sr.mergeWith(csp(tree.cond)); result = sr; } public void visitWhileLoop(JCWhileLoop tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.cond)); sr.mergeWith(csp(tree.body)); result = sr; } public void visitForLoop(JCForLoop tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.init)); sr.mergeWith(csp(tree.cond)); sr.mergeWith(csp(tree.step)); sr.mergeWith(csp(tree.body)); result = sr; } public void visitForeachLoop(JCEnhancedForLoop tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.var)); sr.mergeWith(csp(tree.expr)); sr.mergeWith(csp(tree.body)); result = sr; } public void visitLabelled(JCLabeledStatement tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.body)); result = sr; } public void visitSwitch(JCSwitch tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.selector)); sr.mergeWith(cspCases(tree.cases)); result = sr; } public void visitCase(JCCase tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.pat)); sr.mergeWith(csp(tree.stats)); result = sr; } public void visitSynchronized(JCSynchronized tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.lock)); sr.mergeWith(csp(tree.body)); result = sr; } public void visitTry(JCTry tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.resources)); sr.mergeWith(csp(tree.body)); sr.mergeWith(cspCatchers(tree.catchers)); sr.mergeWith(csp(tree.finalizer)); result = sr; } public void visitCatch(JCCatch tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.param)); sr.mergeWith(csp(tree.body)); result = sr; } public void visitConditional(JCConditional tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.cond)); sr.mergeWith(csp(tree.truepart)); sr.mergeWith(csp(tree.falsepart)); result = sr; } public void visitIf(JCIf tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.cond)); sr.mergeWith(csp(tree.thenpart)); sr.mergeWith(csp(tree.elsepart)); result = sr; } public void visitExec(JCExpressionStatement tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.expr)); result = sr; } public void visitBreak(JCBreak tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); result = sr; } public void visitContinue(JCContinue tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); result = sr; } public void visitReturn(JCReturn tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.expr)); result = sr; } public void visitThrow(JCThrow tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.expr)); result = sr; } public void visitAssert(JCAssert tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.cond)); sr.mergeWith(csp(tree.detail)); result = sr; } public void visitApply(JCMethodInvocation tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.meth)); sr.mergeWith(csp(tree.args)); result = sr; } public void visitNewClass(JCNewClass tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.encl)); sr.mergeWith(csp(tree.clazz)); sr.mergeWith(csp(tree.args)); sr.mergeWith(csp(tree.def)); result = sr; } public void visitNewArray(JCNewArray tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.elemtype)); sr.mergeWith(csp(tree.dims)); sr.mergeWith(csp(tree.elems)); result = sr; } public void visitParens(JCParens tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.expr)); result = sr; } public void visitAssign(JCAssign tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.lhs)); sr.mergeWith(csp(tree.rhs)); result = sr; } public void visitAssignop(JCAssignOp tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.lhs)); sr.mergeWith(csp(tree.rhs)); result = sr; } public void visitUnary(JCUnary tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.arg)); result = sr; } public void visitBinary(JCBinary tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.lhs)); sr.mergeWith(csp(tree.rhs)); result = sr; } public void visitTypeCast(JCTypeCast tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.clazz)); sr.mergeWith(csp(tree.expr)); result = sr; } public void visitTypeTest(JCInstanceOf tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.expr)); sr.mergeWith(csp(tree.clazz)); result = sr; } public void visitIndexed(JCArrayAccess tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.indexed)); sr.mergeWith(csp(tree.index)); result = sr; } public void visitSelect(JCFieldAccess tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.selected)); result = sr; } public void visitIdent(JCIdent tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); result = sr; } public void visitLiteral(JCLiteral tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); result = sr; } public void visitTypeIdent(JCPrimitiveTypeTree tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); result = sr; } public void visitTypeArray(JCArrayTypeTree tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.elemtype)); result = sr; } public void visitTypeApply(JCTypeApply tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.clazz)); sr.mergeWith(csp(tree.arguments)); result = sr; } @Override public void visitLetExpr(LetExpr tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.defs)); sr.mergeWith(csp(tree.expr)); result = sr; } public void visitTypeParameter(JCTypeParameter tree) { SourceRange sr = new SourceRange(startPos(tree), endPos(tree)); sr.mergeWith(csp(tree.bounds)); result = sr; } public void visitWildcard(JCWildcard tree) { result = null; } public void visitErroneous(JCErroneous tree) { result = null; } public void visitTree(JCTree tree) { Assert.error(); } /** The start position of given tree. */ public int startPos(JCTree tree) { if (tree == null) return Position.NOPOS; return TreeInfo.getStartPos(tree); } /** The end position of given tree, if it has * defined endpos, NOPOS otherwise. */ public int endPos(JCTree tree) { if (tree == null) return Position.NOPOS; return TreeInfo.getEndPos(tree, endPosTable); } } /** This class contains a CharacterRangeTableEntry. */ static class CRTEntry { /** A tree or a list of trees to obtain source positions. */ Object tree; /** The flags described in the CharacterRangeTable spec. */ int flags; /** The starting code position of this entry. */ int startPc; /** The ending code position of this entry. */ int endPc; /** Constructor */ CRTEntry(Object tree, int flags, int startPc, int endPc) { this.tree = tree; this.flags = flags; this.startPc = startPc; this.endPc = endPc; } } /** This class contains source positions * for some tree or list of trees. */ static class SourceRange { /** The starting source position. */ int startPos; /** The ending source position. */ int endPos; /** Constructor */ SourceRange() { startPos = Position.NOPOS; endPos = Position.NOPOS; } /** Constructor */ SourceRange(int startPos, int endPos) { this.startPos = startPos; this.endPos = endPos; } /** Compare the starting and the ending positions * of the source range and combines them assigning * the widest range to this. */ SourceRange mergeWith(SourceRange sr) { if (sr == null) return this; if (startPos == Position.NOPOS) startPos = sr.startPos; else if (sr.startPos != Position.NOPOS) startPos = (startPos < sr.startPos ? startPos : sr.startPos); if (endPos == Position.NOPOS) endPos = sr.endPos; else if (sr.endPos != Position.NOPOS) endPos = (endPos > sr.endPos ? endPos : sr.endPos); return this; } } }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.lang.compiler.model; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.asakusafw.lang.compiler.common.util.EnumUtil; /** * Represents a property name. */ public class PropertyName { /** * Represents an empty name. */ public static final String EMPTY_NAME = "_"; //$NON-NLS-1$ static final Option[] EMPTY_OPTIONS = new Option[0]; static final Pattern TRIM_PATTERN = Pattern.compile("[ _]*(.*?)[ _]*"); //$NON-NLS-1$ private final List<String> words; /** * Creates a new instance. * @param words words in the name */ public PropertyName(List<String> words) { this.words = normalize(words); } /** * Creates a new instance. * @param nameString the property name string * @return the created instance */ public static PropertyName of(String nameString) { return of(nameString, EMPTY_OPTIONS); } /** * Creates a new instance. * @param nameString the property name string * @param options options for building property name * @return the created instance */ public static PropertyName of(String nameString, Option... options) { if (nameString.isEmpty()) { throw new IllegalArgumentException("nameString must not be empty"); //$NON-NLS-1$ } String s = trimNameString(nameString); if (s.indexOf('_') >= 0 || s.toUpperCase().equals(s)) { String[] segments = s.split("_"); //$NON-NLS-1$ return build(Arrays.asList(segments), options); } else { List<String> segments = new ArrayList<>(); int start = 0; for (int i = 1, n = s.length(); i < n; i++) { if (Character.isUpperCase(s.charAt(i))) { segments.add(s.substring(start, i)); start = i; } } segments.add(s.substring(start)); return build(segments, options); } } private static PropertyName build(List<String> segments, Option[] options) { Set<Option> opts = EnumUtil.freeze(options); return new PropertyName(apply(opts, segments)); } private static List<String> apply(Set<Option> opts, List<String> segments) { if (segments.isEmpty()) { return segments; } List<String> results = segments; if (opts.contains(Option.KEEP_CASELESS_WORDS) == false) { results = combineCaselessSegments(segments); } return results; } private static List<String> combineCaselessSegments(List<String> segments) { List<String> applied = new ArrayList<>(); applied.add(segments.get(0)); for (int i = 1, n = segments.size(); i < n; i++) { String s = segments.get(i); if (s.isEmpty()) { continue; } if (hasCase(s) == false) { int last = applied.size() - 1; assert last >= 0; applied.set(last, applied.get(last) + s); } else { applied.add(s); } } return applied; } private static boolean hasCase(String s) { assert s.isEmpty() == false; char first = s.charAt(0); int type = Character.getType(first); return type == Character.UPPERCASE_LETTER || type == Character.LOWERCASE_LETTER; } private static String trimNameString(String nameString) { Matcher matcher = TRIM_PATTERN.matcher(nameString); if (matcher.matches()) { return matcher.group(1); } return nameString; } private static List<String> normalize(List<String> segments) { List<String> results = new ArrayList<>(); for (String segment : segments) { if (segment.isEmpty() == false) { results.add(segment.toLowerCase(Locale.ENGLISH)); } } return Collections.unmodifiableList(results); } /** * Returns whether this name is empty or not. * @return {@code true} if this name is empty, otherwise {@code false} */ public boolean isEmpty() { return words.isEmpty(); } /** * Returns in this name. * @return the words (lower case) */ public List<String> getWords() { return words; } /** * Returns a property name which the specified is inserted into head of this name. * The method does not modifies this object. * @param word the first word * @return the modified name */ public PropertyName addFirst(String word) { List<String> results = new ArrayList<>(); results.add(word); results.addAll(words); return new PropertyName(results); } /** * Returns a property name which the specified is inserted into tail of this name. * The method does not modifies this object. * @param word the last word * @return the modified name */ public PropertyName addLast(String word) { List<String> results = new ArrayList<>(); results.addAll(words); results.add(word); return new PropertyName(results); } /** * Returns a property name which the first word is removed from this. * The method does not modifies this object. * @return the modified name */ public PropertyName removeFirst() { if (words.isEmpty()) { throw new IllegalStateException(); } return new PropertyName(words.subList(1, words.size())); } /** * Returns a property name which the first word is removed from this. * The method does not modifies this object. * @return the modified name */ public PropertyName removeLast() { if (words.isEmpty()) { throw new IllegalStateException(); } return new PropertyName(words.subList(0, words.size() - 1)); } /** * Returns the name string as {@code snake_case}. * This may returns {@link #EMPTY_NAME} if this name {@link #isEmpty() is empty}. * @return the property name */ public String toName() { if (words.isEmpty()) { return EMPTY_NAME; } StringBuilder buf = new StringBuilder(); buf.append(words.get(0)); for (int i = 1, n = words.size(); i < n; i++) { buf.append('_'); buf.append(words.get(i)); } return buf.toString(); } /** * Returns the name string as {@code camelCase}. * This may returns {@link #EMPTY_NAME} if this name {@link #isEmpty() is empty}. * @return the property name */ public String toMemberName() { if (words.isEmpty()) { return EMPTY_NAME; } StringBuilder buf = new StringBuilder(); buf.append(words.get(0)); for (int i = 1, n = words.size(); i < n; i++) { buf.append(capitalize(words.get(i))); } return buf.toString(); } private String capitalize(String segment) { assert segment != null; StringBuilder buf = new StringBuilder(segment.toLowerCase(Locale.ENGLISH)); buf.setCharAt(0, Character.toUpperCase(buf.charAt(0))); return buf.toString(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + words.hashCode(); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } PropertyName other = (PropertyName) obj; if (!words.equals(other.words)) { return false; } return true; } @Override public String toString() { return toName(); } /** * Represents options of {@link PropertyName}. */ public enum Option { /** * do not combine segments which consist of case-less word. */ KEEP_CASELESS_WORDS, } }
package com.example.android.sunshine.app; import android.content.Context; import android.database.Cursor; import android.os.Bundle; import android.support.v4.view.ViewCompat; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.bumptech.glide.Glide; import com.example.android.sunshine.app.data.WeatherContract; /** * {@link ForecastAdapter} exposes a list of weather forecasts * from a {@link android.database.Cursor} to a {@link android.widget.ListView}. */ public class ForecastAdapter extends RecyclerView.Adapter<ForecastAdapter.ForecastAdapterViewHolder> { private static final String LOG_TAG = ForecastAdapter.class.getName(); // public static final int VIEW_TYPE_COUNT = 2; private final int VIEW_TYPE_TODAY = 0; private final int VIEW_TYPE_FUTURE_DAY = 1; private boolean mUseTodayLayout; private Cursor mCursor; final private Context mContext; final private ForecastAdapterOnClickHandler mClickHandler; final private View mEmptyView; final private ItemChoiceManager mICM; public class ForecastAdapterViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener { public final ImageView iconView; public final TextView dateView; public final TextView descriptionView; public final TextView highTempView; public final TextView lowTempView; public ForecastAdapterViewHolder(View view) { super(view); this.dateView = (TextView) view.findViewById(R.id.list_item_date_textview); this.iconView = (ImageView) view.findViewById(R.id.list_item_icon); this.descriptionView = (TextView) view.findViewById(R.id.list_item_forecast_textview); this.highTempView = (TextView) view.findViewById(R.id.list_item_high_textview); this.lowTempView = (TextView) view.findViewById(R.id.list_item_low_textview); view.setOnClickListener(this); } @Override public void onClick(View view) { int position = getAdapterPosition(); mCursor.moveToPosition(position); int dateColumnIndex = mCursor.getColumnIndex(WeatherContract.WeatherEntry.COLUMN_DATE); mClickHandler.onClick(mCursor.getLong(dateColumnIndex), this); mICM.onClick(this); } } public ForecastAdapter(Context context, ForecastAdapterOnClickHandler dh, View emptyView, int choiceMode) { mContext = context; mClickHandler = dh; mEmptyView = emptyView; mICM = new ItemChoiceManager(this); mICM.setChoiceMode(choiceMode); } public static interface ForecastAdapterOnClickHandler { void onClick(Long date, ForecastAdapterViewHolder vh); } /* This takes advantage of the fact that the viewGroup passed to onCreateViewHolder is the RecyclerView that will be used to contain the view, so that it can get the current ItemSelectionManager from the view. One could implement this pattern without modifying RecyclerView by taking advantage of the view tag to store the ItemChoiceManager. */ @Override public ForecastAdapterViewHolder onCreateViewHolder(ViewGroup viewGroup, int viewType) { if (viewGroup instanceof RecyclerView) { int layoutId = -1; if (viewType == VIEW_TYPE_TODAY) { layoutId = R.layout.list_item_forecast_today; } else if (viewType == VIEW_TYPE_FUTURE_DAY) { layoutId = R.layout.list_item_forecast; } View view = LayoutInflater.from(viewGroup.getContext()).inflate(layoutId, viewGroup, false); view.setFocusable(true); return new ForecastAdapterViewHolder(view); } else { throw new RuntimeException("Not bound to RecyclerView"); } } @Override public void onBindViewHolder(ForecastAdapterViewHolder forecastAdapterViewHolder, int position) { mCursor.moveToPosition(position); int weatherId = mCursor.getInt(ForecastFragment.COL_WEATHER_CONDITION_ID); int defaultImage; boolean useLongToday; // colorful for today, gray for other days if (getItemViewType(position) == VIEW_TYPE_TODAY) { defaultImage = Utility.getArtResourceForWeatherCondition(weatherId); useLongToday = true; } else { defaultImage = Utility.getIconResourceForWeatherCondition(weatherId); useLongToday = false; } if (Utility.usingLocalGraphics(mContext)) { forecastAdapterViewHolder.iconView.setImageResource(defaultImage); } else { Glide.with(mContext) .load(Utility.getArtUrlForWeatherCondition(mContext, weatherId)) .error(defaultImage) .crossFade() .into(forecastAdapterViewHolder.iconView); } // this enables better animations. even if we lose state due to a device rotation, // the animator can use this to re-find the original view ViewCompat.setTransitionName(forecastAdapterViewHolder.iconView, "iconView" + position); // // date long dateInMillis = mCursor.getLong(ForecastFragment.COL_WEATHER_DATE); forecastAdapterViewHolder.dateView.setText(Utility.getFriendlyDayString(mContext, dateInMillis, useLongToday)); // // //description String description = Utility.getStringForWeatherCondition(mContext, weatherId); forecastAdapterViewHolder.descriptionView.setText(description); forecastAdapterViewHolder.descriptionView.setContentDescription(mContext.getString( R.string.a11y_forecast, description)); // // // For accessibility, we don't want a content description for the icon field // // because the information is repeated in the description view and the icon // // is not individually selectable // // //high temperature String high = Utility.formatTemperature(mContext, mCursor.getDouble(ForecastFragment.COL_WEATHER_MAX_TEMP)); forecastAdapterViewHolder.highTempView.setText(high); forecastAdapterViewHolder.highTempView.setContentDescription( mContext.getString(R.string.a11y_high_temp, high)); // // //low temperature String low = Utility.formatTemperature(mContext, mCursor.getDouble(ForecastFragment.COL_WEATHER_MIN_TEMP)); forecastAdapterViewHolder.lowTempView.setText(low); forecastAdapterViewHolder.lowTempView.setContentDescription( mContext.getString(R.string.a11y_low_temp, low)); mICM.onBindViewHolder(forecastAdapterViewHolder, position); } public void setmUseTodayLayout(boolean mUseTodayLayout) { this.mUseTodayLayout = mUseTodayLayout; } public void onRestoreInstanceState(Bundle savedInstanceState){ mICM.onRestoreInstanceState(savedInstanceState); } public void onSaveInstanceState(Bundle outState) { mICM.onSaveInstanceState(outState); } public int getSelectedItemPosition() { return mICM.getSelectedItemPosition(); } @Override public int getItemViewType(int position) { return (position == 0 && mUseTodayLayout) ? VIEW_TYPE_TODAY : VIEW_TYPE_FUTURE_DAY; } public void swapCursor(Cursor newCursor) { mCursor = newCursor; notifyDataSetChanged(); mEmptyView.setVisibility(getItemCount() == 0 ? View.VISIBLE : View.GONE); } public Cursor getCursor() { return mCursor; } public void selectView(RecyclerView.ViewHolder viewHolder) { if ( viewHolder instanceof ForecastAdapterViewHolder ) { ForecastAdapterViewHolder vfh = (ForecastAdapterViewHolder)viewHolder; vfh.onClick(vfh.itemView); } } @Override public int getItemCount() { return mCursor == null ? 0 : mCursor.getCount(); } }
/** */ package substationStandard.LNNodes.LNGroupC.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import substationStandard.Dataclasses.ACT; import substationStandard.Dataclasses.ING; import substationStandard.Dataclasses.SPS; import substationStandard.LNNodes.LNGroupC.CPOW; import substationStandard.LNNodes.LNGroupC.LNGroupCPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>CPOW</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link substationStandard.LNNodes.LNGroupC.impl.CPOWImpl#getTmExc <em>Tm Exc</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupC.impl.CPOWImpl#getStrPOW <em>Str POW</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupC.impl.CPOWImpl#getOpOpn <em>Op Opn</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupC.impl.CPOWImpl#getOpCls <em>Op Cls</em>}</li> * <li>{@link substationStandard.LNNodes.LNGroupC.impl.CPOWImpl#getMaxDlTmms <em>Max Dl Tmms</em>}</li> * </ul> * * @generated */ public class CPOWImpl extends GroupCImpl implements CPOW { /** * The cached value of the '{@link #getTmExc() <em>Tm Exc</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTmExc() * @generated * @ordered */ protected SPS tmExc; /** * The cached value of the '{@link #getStrPOW() <em>Str POW</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStrPOW() * @generated * @ordered */ protected SPS strPOW; /** * The cached value of the '{@link #getOpOpn() <em>Op Opn</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOpOpn() * @generated * @ordered */ protected ACT opOpn; /** * The cached value of the '{@link #getOpCls() <em>Op Cls</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOpCls() * @generated * @ordered */ protected ACT opCls; /** * The cached value of the '{@link #getMaxDlTmms() <em>Max Dl Tmms</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMaxDlTmms() * @generated * @ordered */ protected ING maxDlTmms; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected CPOWImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return LNGroupCPackage.Literals.CPOW; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SPS getTmExc() { if (tmExc != null && tmExc.eIsProxy()) { InternalEObject oldTmExc = (InternalEObject)tmExc; tmExc = (SPS)eResolveProxy(oldTmExc); if (tmExc != oldTmExc) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupCPackage.CPOW__TM_EXC, oldTmExc, tmExc)); } } return tmExc; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SPS basicGetTmExc() { return tmExc; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTmExc(SPS newTmExc) { SPS oldTmExc = tmExc; tmExc = newTmExc; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupCPackage.CPOW__TM_EXC, oldTmExc, tmExc)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SPS getStrPOW() { if (strPOW != null && strPOW.eIsProxy()) { InternalEObject oldStrPOW = (InternalEObject)strPOW; strPOW = (SPS)eResolveProxy(oldStrPOW); if (strPOW != oldStrPOW) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupCPackage.CPOW__STR_POW, oldStrPOW, strPOW)); } } return strPOW; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SPS basicGetStrPOW() { return strPOW; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStrPOW(SPS newStrPOW) { SPS oldStrPOW = strPOW; strPOW = newStrPOW; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupCPackage.CPOW__STR_POW, oldStrPOW, strPOW)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ACT getOpOpn() { if (opOpn != null && opOpn.eIsProxy()) { InternalEObject oldOpOpn = (InternalEObject)opOpn; opOpn = (ACT)eResolveProxy(oldOpOpn); if (opOpn != oldOpOpn) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupCPackage.CPOW__OP_OPN, oldOpOpn, opOpn)); } } return opOpn; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ACT basicGetOpOpn() { return opOpn; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOpOpn(ACT newOpOpn) { ACT oldOpOpn = opOpn; opOpn = newOpOpn; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupCPackage.CPOW__OP_OPN, oldOpOpn, opOpn)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ACT getOpCls() { if (opCls != null && opCls.eIsProxy()) { InternalEObject oldOpCls = (InternalEObject)opCls; opCls = (ACT)eResolveProxy(oldOpCls); if (opCls != oldOpCls) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupCPackage.CPOW__OP_CLS, oldOpCls, opCls)); } } return opCls; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ACT basicGetOpCls() { return opCls; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOpCls(ACT newOpCls) { ACT oldOpCls = opCls; opCls = newOpCls; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupCPackage.CPOW__OP_CLS, oldOpCls, opCls)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ING getMaxDlTmms() { if (maxDlTmms != null && maxDlTmms.eIsProxy()) { InternalEObject oldMaxDlTmms = (InternalEObject)maxDlTmms; maxDlTmms = (ING)eResolveProxy(oldMaxDlTmms); if (maxDlTmms != oldMaxDlTmms) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, LNGroupCPackage.CPOW__MAX_DL_TMMS, oldMaxDlTmms, maxDlTmms)); } } return maxDlTmms; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ING basicGetMaxDlTmms() { return maxDlTmms; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMaxDlTmms(ING newMaxDlTmms) { ING oldMaxDlTmms = maxDlTmms; maxDlTmms = newMaxDlTmms; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, LNGroupCPackage.CPOW__MAX_DL_TMMS, oldMaxDlTmms, maxDlTmms)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case LNGroupCPackage.CPOW__TM_EXC: if (resolve) return getTmExc(); return basicGetTmExc(); case LNGroupCPackage.CPOW__STR_POW: if (resolve) return getStrPOW(); return basicGetStrPOW(); case LNGroupCPackage.CPOW__OP_OPN: if (resolve) return getOpOpn(); return basicGetOpOpn(); case LNGroupCPackage.CPOW__OP_CLS: if (resolve) return getOpCls(); return basicGetOpCls(); case LNGroupCPackage.CPOW__MAX_DL_TMMS: if (resolve) return getMaxDlTmms(); return basicGetMaxDlTmms(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case LNGroupCPackage.CPOW__TM_EXC: setTmExc((SPS)newValue); return; case LNGroupCPackage.CPOW__STR_POW: setStrPOW((SPS)newValue); return; case LNGroupCPackage.CPOW__OP_OPN: setOpOpn((ACT)newValue); return; case LNGroupCPackage.CPOW__OP_CLS: setOpCls((ACT)newValue); return; case LNGroupCPackage.CPOW__MAX_DL_TMMS: setMaxDlTmms((ING)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case LNGroupCPackage.CPOW__TM_EXC: setTmExc((SPS)null); return; case LNGroupCPackage.CPOW__STR_POW: setStrPOW((SPS)null); return; case LNGroupCPackage.CPOW__OP_OPN: setOpOpn((ACT)null); return; case LNGroupCPackage.CPOW__OP_CLS: setOpCls((ACT)null); return; case LNGroupCPackage.CPOW__MAX_DL_TMMS: setMaxDlTmms((ING)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case LNGroupCPackage.CPOW__TM_EXC: return tmExc != null; case LNGroupCPackage.CPOW__STR_POW: return strPOW != null; case LNGroupCPackage.CPOW__OP_OPN: return opOpn != null; case LNGroupCPackage.CPOW__OP_CLS: return opCls != null; case LNGroupCPackage.CPOW__MAX_DL_TMMS: return maxDlTmms != null; } return super.eIsSet(featureID); } } //CPOWImpl
package com.bignerdranch.android.criminalintent; import java.util.Date; import java.util.UUID; import com.bignerdranch.android.criminalintent.CrimeListFragment.Callbacks; import android.R.string; import android.annotation.TargetApi; import android.app.Activity; import android.content.Intent; import android.content.pm.PackageManager; import android.database.Cursor; import android.graphics.drawable.BitmapDrawable; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.ContactsContract; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.NavUtils; import android.text.Editable; import android.text.TextWatcher; import android.text.format.DateFormat; import android.util.Log; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.EditText; public class CrimeFragment extends Fragment { private static final String TAG = "CrimeFragment"; public static final String EXTRA_CRIME_ID = "com.bignerdranch.android.criminalintent.crime_id"; private static final String DIALOG_DATE = "date"; private static final String DIALOG_IMAGE = "image"; private static final int REQUEST_DATE = 0; private static final int REQUEST_PHOTO = 1; private static final int REQUEST_CONTACT = 2; private Crime mCrime; private EditText mTitleField; private Button mDateButton; private CheckBox mSolvedCheckBox; private ImageButton mPhotoButton; private Button mSuspectButton; private ImageView mPhotoView; private Callbacks mCallbacks; public interface Callbacks { void onCrimeUpdated(Crime crime); } @Override public void onAttach(Activity activity) { super.onAttach(activity); mCallbacks = (Callbacks) activity; } @Override public void onDetach() { super.onDetach(); mCallbacks = null; } public void updateDate() { mDateButton.setText(mCrime.getDate().toString()); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); UUID crimeId = (UUID) getArguments().getSerializable(EXTRA_CRIME_ID); mCrime = CrimeLab.get(getActivity()).getCrime(crimeId); setHasOptionsMenu(true); } @TargetApi(11) @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_crime, container, false); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { if (NavUtils.getParentActivityName(getActivity()) != null) { getActivity().getActionBar().setDisplayHomeAsUpEnabled(true); } } mTitleField = (EditText) v.findViewById(R.id.crime_title); mTitleField.setText(mCrime.getTitle()); mTitleField.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence arg0, int arg1, int arg2, int arg3) { mCrime.setTitle(arg0.toString()); mCallbacks.onCrimeUpdated(mCrime); getActivity().setTitle(mCrime.getTitle()); } @Override public void beforeTextChanged(CharSequence arg0, int arg1, int arg2, int arg3) { // TODO Auto-generated method stub } @Override public void afterTextChanged(Editable arg0) { // TODO Auto-generated method stub } }); mDateButton = (Button) v.findViewById(R.id.crime_date); updateDate(); mDateButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { FragmentManager fm = getActivity().getSupportFragmentManager(); DatePickerFragment dialog = DatePickerFragment.newInstance(mCrime.getDate()); dialog.setTargetFragment(CrimeFragment.this, REQUEST_DATE); dialog.show(fm, DIALOG_DATE); } }); mSolvedCheckBox = (CheckBox) v.findViewById(R.id.crime_solved); mSolvedCheckBox.setChecked(mCrime.isSolved()); mSolvedCheckBox.setOnCheckedChangeListener(new OnCheckedChangeListener() { public void onCheckedChanged(CompoundButton arg0, boolean arg1) { mCrime.setSolved(arg1); mCallbacks.onCrimeUpdated(mCrime); } }); mPhotoButton = (ImageButton) v.findViewById(R.id.crime_imageButton); mPhotoButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent i = new Intent(getActivity(), CrimeCameraActivity.class); startActivityForResult(i, REQUEST_PHOTO); } }); PackageManager pm = getActivity().getPackageManager(); boolean hasACamera = pm.hasSystemFeature(PackageManager.FEATURE_CAMERA) || pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT) || Build.VERSION.SDK_INT < Build.VERSION_CODES.GINGERBREAD ; if (!hasACamera) { mPhotoButton.setEnabled(false); } mPhotoView = (ImageView) v.findViewById(R.id.crime_imageView); mPhotoView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Photo p = mCrime.getPhoto(); if (p == null) { return; } FragmentManager fm = getActivity().getSupportFragmentManager(); String path = getActivity().getFileStreamPath(p.getFilename()).getAbsolutePath(); ImageFragment.newInstance(path).show(fm, DIALOG_IMAGE); } }); Button reportButton = (Button) v.findViewById(R.id.crime_reportButton); reportButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent i = new Intent(Intent.ACTION_SEND); i.setType("text/plain"); i.putExtra(Intent.EXTRA_TEXT, getCrimeReport()); i.putExtra(Intent.EXTRA_SUBJECT, getString(R.string.crime_report_subject)); i = Intent.createChooser(i, getString(R.string.send_report)); startActivity(i); } }); mSuspectButton = (Button) v.findViewById(R.id.crime_suspectButton); mSuspectButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent i = new Intent(Intent.ACTION_PICK, ContactsContract.Contacts.CONTENT_URI); startActivityForResult(i, REQUEST_CONTACT); } }); if (mCrime.getSuspect() != null) { mSuspectButton.setText(mCrime.getSuspect()); } return v; } private void showPhoto() { Photo p = mCrime.getPhoto(); BitmapDrawable b = null; if (p != null) { String path = getActivity().getFileStreamPath(p.getFilename()).getAbsolutePath(); b = PictureUtils.getScaledDrawable(getActivity(), path); } mPhotoView.setImageDrawable(b); } private String getCrimeReport() { String solvedString = null; if (mCrime.isSolved()) { solvedString = getString(R.string.crime_report_solved); } else { solvedString = getString(R.string.crime_report_unsolved); } String dateFormat = "EEE, MMM dd"; String dateString = DateFormat.format(dateFormat, mCrime.getDate()).toString(); String suspect = mCrime.getSuspect(); if (suspect == null) { suspect = getString(R.string.crime_report_no_suspect); } else { suspect = getString(R.string.crime_report_suspect, suspect); } String report = getString(R.string.crime_report, mCrime.getTitle(), dateString, solvedString, suspect); return report; } @Override public void onStart() { super.onStart(); showPhoto(); } @Override public void onStop() { super.onStop(); PictureUtils.cleanImageView(mPhotoView); } public static CrimeFragment newInstance(UUID crimeId) { Bundle args = new Bundle(); args.putSerializable(EXTRA_CRIME_ID, crimeId); CrimeFragment fragment = new CrimeFragment(); fragment.setArguments(args); return fragment; } @Override public void onActivityResult(int requestCode, int resultCode, Intent data){ if (resultCode != Activity.RESULT_OK) { return; } if (requestCode == REQUEST_DATE) { Date date = (Date) data.getSerializableExtra(DatePickerFragment.EXTRA_DATE); mCrime.setDate(date); mCallbacks.onCrimeUpdated(mCrime); updateDate(); } else if (requestCode == REQUEST_PHOTO) { String filename = data.getStringExtra(CrimeCameraFragment.EXTRA_PHOTO_FILENAME); if (filename != null) { Photo p = new Photo(filename); mCrime.setPhoto(p); mCallbacks.onCrimeUpdated(mCrime); showPhoto(); } } else if (requestCode == REQUEST_CONTACT) { Uri contactUri = data.getData(); String[] queryFieldStrings = new String[] { ContactsContract.Contacts.DISPLAY_NAME }; Cursor c = getActivity().getContentResolver().query(contactUri, queryFieldStrings, null, null, null); if (c.getCount() == 0) { c.close(); return; } c.moveToFirst(); String suspect = c.getString(0); mCrime.setSuspect(suspect); mCallbacks.onCrimeUpdated(mCrime); mSuspectButton.setText(suspect); c.close(); } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: if (NavUtils.getParentActivityName(getActivity()) != null) { NavUtils.navigateUpFromSameTask(getActivity()); } return true; default: return super.onOptionsItemSelected(item); } } @Override public void onPause() { super.onPause(); CrimeLab.get(getActivity()).saveCrimes(); } }
package com.google.sps.servlets; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.users.User; import com.google.appengine.api.users.UserService; import com.google.sps.data.AnnotatedField; import com.google.sps.data.ChromeOSDevice; import java.io.IOException; import java.io.StringWriter; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.collections4.keyvalue.MultiKey; import org.apache.commons.collections4.map.MultiKeyMap; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /* * Test the aggregation functionality to ensure it reports accurate counts and * throws errors when appropriate. Eventually will need to add tests for * aggregating by multiple fields at once. */ @RunWith(JUnit4.class) public final class AggregationServletTest { private final String TEST_USER_ID = "testUserId"; private final String TEST_USER_EMAIL = "testEmail"; private final String TEST_USER_AUTH_DOMAIN = "testAuthDomain"; private final String LOCATION_ONE = "New Jersey"; private final String LOCATION_TWO = "California"; private final String USER_ONE = "James"; private final String USER_TWO = "Josiah"; private final String USER_THREE = "Jeremy"; private final String ASSET_ID_ONE = "12345"; private final ChromeOSDevice DEVICE_ONE = new ChromeOSDevice(ASSET_ID_ONE, LOCATION_ONE, USER_ONE, "deviceId", "serialNumber"); private final ChromeOSDevice DEVICE_TWO = new ChromeOSDevice(ASSET_ID_ONE, LOCATION_ONE, USER_TWO, "deviceId", "serialNumber"); private final ChromeOSDevice DEVICE_THREE = new ChromeOSDevice(ASSET_ID_ONE, LOCATION_TWO, USER_THREE, "deviceId", "serialNumber"); private final ChromeOSDevice DEVICE_FOUR = new ChromeOSDevice(ASSET_ID_ONE, LOCATION_TWO, USER_ONE, "deviceId", "serialNumber"); private final ChromeOSDevice DEVICE_FIVE = new ChromeOSDevice(ASSET_ID_ONE, LOCATION_ONE, USER_THREE, "deviceId", "serialNumber"); private final List<ChromeOSDevice> allDevices = new ArrayList<>( Arrays.asList(DEVICE_ONE, DEVICE_TWO, DEVICE_THREE, DEVICE_FOUR, DEVICE_FIVE)); private AggregationServlet servlet = new AggregationServlet(); private HttpServletRequest request = mock(HttpServletRequest.class); private HttpServletResponse response = mock(HttpServletResponse.class); @Test public void onlyOneUniqueField() { MultiKeyMap<String, List<ChromeOSDevice>> expected = new MultiKeyMap<>(); expected.put(new MultiKey(new String[] {ASSET_ID_ONE}), allDevices); MultiKeyMap<String, List<ChromeOSDevice>> actual = processData(allDevices, AnnotatedField.ASSET_ID); Assert.assertEquals(expected, actual); } @Test public void multipleResultEntries() { MultiKeyMap<String, List<ChromeOSDevice>> expected = new MultiKeyMap<>(); expected.put(new MultiKey( new String[] {USER_ONE}), Arrays.asList(DEVICE_ONE, DEVICE_FOUR)); expected.put(new MultiKey( new String[] {USER_TWO}), Arrays.asList(DEVICE_TWO)); expected.put(new MultiKey( new String[] {USER_THREE}), Arrays.asList(DEVICE_THREE, DEVICE_FIVE)); MultiKeyMap<String, List<ChromeOSDevice>> actual = processData(allDevices, AnnotatedField.USER); Assert.assertEquals(expected, actual); } @Test public void annotatedLocation() { MultiKeyMap<String, List<ChromeOSDevice>> expected = new MultiKeyMap<>(); expected.put(new MultiKey( new String[] {LOCATION_ONE}), Arrays.asList(DEVICE_ONE, DEVICE_TWO, DEVICE_FIVE)); expected.put(new MultiKey( new String[] {LOCATION_TWO}), Arrays.asList(DEVICE_THREE, DEVICE_FOUR)); MultiKeyMap<String, List<ChromeOSDevice>> actual = processData(allDevices, AnnotatedField.LOCATION); Assert.assertEquals(expected, actual); } @Test public void multipleAggregationFields() { MultiKeyMap<String, List<ChromeOSDevice>> expected = new MultiKeyMap<>(); expected.put(new MultiKey( new String[] {ASSET_ID_ONE, LOCATION_ONE}), Arrays.asList(DEVICE_ONE, DEVICE_TWO, DEVICE_FIVE)); expected.put(new MultiKey( new String[] {ASSET_ID_ONE, LOCATION_TWO}), Arrays.asList(DEVICE_THREE, DEVICE_FOUR)); MultiKeyMap<String, List<ChromeOSDevice>> actual = AggregationServlet.processData(allDevices, new LinkedHashSet<>(Arrays.asList(AnnotatedField.ASSET_ID, AnnotatedField.LOCATION))); Assert.assertEquals(expected, actual); } @Test public void invalidMultiFieldArgumentReceivesBadRequest() throws IOException { when(request.getParameter("aggregationField")).thenReturn("annotatedLocation,deviceId"); setNewResponseWriter(response); servlet.doGet(request, response); verify(response).setStatus(HttpServletResponse.SC_BAD_REQUEST); } @Test public void invalidArgumentReceivesBadRequest() throws IOException { when(request.getParameter("aggregationField")).thenReturn("deviceId"); setNewResponseWriter(response); servlet.doGet(request, response); verify(response).setStatus(HttpServletResponse.SC_BAD_REQUEST); } @Test public void nullArgumentReceivesBadRequest() throws IOException { when(request.getParameter("aggregationField")).thenReturn(null); setNewResponseWriter(response); servlet.doGet(request, response); verify(response).setStatus(HttpServletResponse.SC_BAD_REQUEST); } @Test public void validArgumentReceivesSuccess() throws IOException{ when(request.getParameter("aggregationField")).thenReturn("annotatedLocation"); setNewResponseWriter(response); Util mockedUtil = mock(Util.class); User userFake = new User(TEST_USER_EMAIL, TEST_USER_AUTH_DOMAIN, TEST_USER_ID); DatastoreService mockedDataObj = mock(DatastoreService.class); UserService mockedUserService = mock(UserService.class); when(mockedUserService.isUserLoggedIn()).thenReturn(true); when(mockedUserService.getCurrentUser()).thenReturn(userFake); when(mockedUtil.getAllDevices(TEST_USER_ID)).thenReturn(allDevices); servlet.setUserService(mockedUserService); servlet.setUtilObj(mockedUtil); servlet.doGet(request, response); verify(response).setStatus(HttpServletResponse.SC_OK); } @Test public void validMultiFieldArgumentReceivesSuccess() throws IOException{ when(request.getParameter("aggregationField")).thenReturn("annotatedLocation,annotatedAssetId"); setNewResponseWriter(response); Util mockedUtil = mock(Util.class); User userFake = new User(TEST_USER_EMAIL, TEST_USER_AUTH_DOMAIN, TEST_USER_ID); DatastoreService mockedDataObj = mock(DatastoreService.class); UserService mockedUserService = mock(UserService.class); when(mockedUserService.isUserLoggedIn()).thenReturn(true); when(mockedUserService.getCurrentUser()).thenReturn(userFake); when(mockedUtil.getAllDevices(TEST_USER_ID)).thenReturn(allDevices); servlet.setUserService(mockedUserService); servlet.setUtilObj(mockedUtil); servlet.doGet(request, response); verify(response).setStatus(HttpServletResponse.SC_OK); } private void setNewResponseWriter(HttpServletResponse response) throws IOException{ StringWriter stringWriter = new StringWriter(); PrintWriter writer = new PrintWriter(stringWriter); when(response.getWriter()).thenReturn(writer); } /** Used for convenience in tests when only aggregating by one field*/ private MultiKeyMap<String, List<ChromeOSDevice>> processData(List<ChromeOSDevice> devices, AnnotatedField field) { LinkedHashSet<AnnotatedField> fields = new LinkedHashSet<>(); fields.add(field); return servlet.processData(devices, fields); } }
/* * Copyright 2011 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; import java.io.InterruptedIOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZooKeeperNodeTracker; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import static org.junit.Assert.*; /** * Tests unhandled exceptions thrown by coprocessors running on master. * Expected result is that the master will abort with an informative * error message describing the set of its loaded coprocessors for crash diagnosis. * (HBASE-4014). */ @Category(MediumTests.class) public class TestMasterCoprocessorExceptionWithAbort { public static class MasterTracker extends ZooKeeperNodeTracker { public boolean masterZKNodeWasDeleted = false; public MasterTracker(ZooKeeperWatcher zkw, String masterNode, Abortable abortable) { super(zkw, masterNode, abortable); } @Override public synchronized void nodeDeleted(String path) { if (path.equals("/hbase/master")) { masterZKNodeWasDeleted = true; } } } public static class CreateTableThread extends Thread { HBaseTestingUtility UTIL; public CreateTableThread(HBaseTestingUtility UTIL) { this.UTIL = UTIL; } @Override public void run() { // create a table : master coprocessor will throw an exception and not // catch it. HTableDescriptor htd = new HTableDescriptor(TEST_TABLE); htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); try { HBaseAdmin admin = UTIL.getHBaseAdmin(); admin.createTable(htd); fail("BuggyMasterObserver failed to throw an exception."); } catch (IOException e) { assertEquals("HBaseAdmin threw an interrupted IOException as expected.", e.getClass().getName(), "java.io.InterruptedIOException"); } } } public static class BuggyMasterObserver extends BaseMasterObserver { private boolean preCreateTableCalled; private boolean postCreateTableCalled; private boolean startCalled; private boolean postStartMasterCalled; @Override public void postCreateTable(ObserverContext<MasterCoprocessorEnvironment> env, HTableDescriptor desc, HRegionInfo[] regions) throws IOException { // cause a NullPointerException and don't catch it: this will cause the // master to abort(). Integer i; i = null; i = i++; } public boolean wasCreateTableCalled() { return preCreateTableCalled && postCreateTableCalled; } @Override public void postStartMaster(ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { postStartMasterCalled = true; } public boolean wasStartMasterCalled() { return postStartMasterCalled; } @Override public void start(CoprocessorEnvironment env) throws IOException { startCalled = true; } public boolean wasStarted() { return startCalled; } } private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static byte[] TEST_TABLE = Bytes.toBytes("observed_table"); private static byte[] TEST_FAMILY = Bytes.toBytes("fam1"); @BeforeClass public static void setupBeforeClass() throws Exception { Configuration conf = UTIL.getConfiguration(); conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, BuggyMasterObserver.class.getName()); conf.set("hbase.coprocessor.abortonerror", "true"); UTIL.startMiniCluster(); } @AfterClass public static void teardownAfterClass() throws Exception { UTIL.shutdownMiniCluster(); } @Test(timeout=30000) public void testExceptionFromCoprocessorWhenCreatingTable() throws IOException { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); HMaster master = cluster.getMaster(); MasterCoprocessorHost host = master.getCoprocessorHost(); BuggyMasterObserver cp = (BuggyMasterObserver)host.findCoprocessor( BuggyMasterObserver.class.getName()); assertFalse("No table created yet", cp.wasCreateTableCalled()); // set a watch on the zookeeper /hbase/master node. If the master dies, // the node will be deleted. ZooKeeperWatcher zkw = new ZooKeeperWatcher(UTIL.getConfiguration(), "unittest", new Abortable() { @Override public void abort(String why, Throwable e) { throw new RuntimeException("Fatal ZK error: " + why, e); } @Override public boolean isAborted() { return false; } }); MasterTracker masterTracker = new MasterTracker(zkw,"/hbase/master", new Abortable() { @Override public void abort(String why, Throwable e) { throw new RuntimeException("Fatal ZK master tracker error, why=", e); } @Override public boolean isAborted() { return false; } }); masterTracker.start(); zkw.registerListener(masterTracker); // Test (part of the) output that should have be printed by master when it aborts: // (namely the part that shows the set of loaded coprocessors). // In this test, there is only a single coprocessor (BuggyMasterObserver). assertTrue(master.getLoadedCoprocessors(). equals("[" + TestMasterCoprocessorExceptionWithAbort.BuggyMasterObserver.class.getName() + "]")); CreateTableThread createTableThread = new CreateTableThread(UTIL); // Attempting to create a table (using createTableThread above) triggers an NPE in BuggyMasterObserver. // Master will then abort and the /hbase/master zk node will be deleted. createTableThread.start(); // Wait up to 30 seconds for master's /hbase/master zk node to go away after master aborts. for (int i = 0; i < 30; i++) { if (masterTracker.masterZKNodeWasDeleted == true) { break; } try { Thread.sleep(1000); } catch (InterruptedException e) { fail("InterruptedException while waiting for master zk node to " + "be deleted."); } } assertTrue("Master aborted on coprocessor exception, as expected.", masterTracker.masterZKNodeWasDeleted); createTableThread.interrupt(); try { createTableThread.join(1000); } catch (InterruptedException e) { assertTrue("Ignoring InterruptedException while waiting for " + " createTableThread.join().", true); } } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.syntax; import static com.google.devtools.build.lib.syntax.compiler.ByteCodeUtils.append; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.syntax.ClassObject.SkylarkClassObject; import com.google.devtools.build.lib.syntax.SkylarkList.MutableList; import com.google.devtools.build.lib.syntax.SkylarkList.Tuple; import com.google.devtools.build.lib.syntax.compiler.ByteCodeMethodCalls; import com.google.devtools.build.lib.syntax.compiler.ByteCodeUtils; import com.google.devtools.build.lib.syntax.compiler.DebugInfo; import com.google.devtools.build.lib.syntax.compiler.DebugInfo.AstAccessors; import com.google.devtools.build.lib.syntax.compiler.Jump; import com.google.devtools.build.lib.syntax.compiler.Jump.PrimitiveComparison; import com.google.devtools.build.lib.syntax.compiler.LabelAdder; import com.google.devtools.build.lib.syntax.compiler.VariableScope; import net.bytebuddy.implementation.bytecode.ByteCodeAppender; import net.bytebuddy.implementation.bytecode.Duplication; import net.bytebuddy.implementation.bytecode.Removal; import net.bytebuddy.implementation.bytecode.StackManipulation; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.IllegalFormatException; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Syntax node for a binary operator expression. */ public final class BinaryOperatorExpression extends Expression { private final Expression lhs; private final Expression rhs; private final Operator operator; public BinaryOperatorExpression(Operator operator, Expression lhs, Expression rhs) { this.lhs = lhs; this.rhs = rhs; this.operator = operator; } public Expression getLhs() { return lhs; } public Expression getRhs() { return rhs; } /** * Returns the operator kind for this binary operation. */ public Operator getOperator() { return operator; } @Override public String toString() { return lhs + " " + operator + " " + rhs; } /** * Implements comparison operators. * * <p>Publicly accessible for reflection and compiled Skylark code. */ public static int compare(Object lval, Object rval, Location location) throws EvalException { try { return EvalUtils.SKYLARK_COMPARATOR.compare(lval, rval); } catch (EvalUtils.ComparisonException e) { throw new EvalException(location, e); } } /** * Implements the "in" operator. * * <p>Publicly accessible for reflection and compiled Skylark code. */ public static boolean in(Object lval, Object rval, Location location) throws EvalException { if (rval instanceof SkylarkList) { for (Object obj : (SkylarkList) rval) { if (obj.equals(lval)) { return true; } } return false; } else if (rval instanceof Collection<?>) { return ((Collection<?>) rval).contains(lval); } else if (rval instanceof Map<?, ?>) { return ((Map<?, ?>) rval).containsKey(lval); } else if (rval instanceof SkylarkNestedSet) { return ((SkylarkNestedSet) rval).expandedSet().contains(lval); } else if (rval instanceof String) { if (lval instanceof String) { return ((String) rval).contains((String) lval); } else { throw new EvalException( location, "in operator only works on strings if the left operand is also a string"); } } else { throw new EvalException( location, "in operator only works on lists, tuples, sets, dicts and strings"); } } @Override Object doEval(Environment env) throws EvalException, InterruptedException { Object lval = lhs.eval(env); // Short-circuit operators if (operator == Operator.AND) { if (EvalUtils.toBoolean(lval)) { return rhs.eval(env); } else { return lval; } } if (operator == Operator.OR) { if (EvalUtils.toBoolean(lval)) { return lval; } else { return rhs.eval(env); } } Object rval = rhs.eval(env); switch (operator) { case PLUS: return plus(lval, rval, env, getLocation()); case PIPE: return pipe(lval, rval, getLocation()); case MINUS: return minus(lval, rval, getLocation()); case MULT: return mult(lval, rval, getLocation()); case DIVIDE: return divide(lval, rval, getLocation()); case PERCENT: return percent(lval, rval, getLocation()); case EQUALS_EQUALS: return lval.equals(rval); case NOT_EQUALS: return !lval.equals(rval); case LESS: return compare(lval, rval, getLocation()) < 0; case LESS_EQUALS: return compare(lval, rval, getLocation()) <= 0; case GREATER: return compare(lval, rval, getLocation()) > 0; case GREATER_EQUALS: return compare(lval, rval, getLocation()) >= 0; case IN: return in(lval, rval, getLocation()); case NOT_IN: return !in(lval, rval, getLocation()); default: throw new AssertionError("Unsupported binary operator: " + operator); } // endswitch } @Override public void accept(SyntaxTreeVisitor visitor) { visitor.visit(this); } @Override void validate(ValidationEnvironment env) throws EvalException { lhs.validate(env); rhs.validate(env); } @Override ByteCodeAppender compile(VariableScope scope, DebugInfo debugInfo) throws EvalException { AstAccessors debugAccessors = debugInfo.add(this); List<ByteCodeAppender> code = new ArrayList<>(); ByteCodeAppender leftCompiled = lhs.compile(scope, debugInfo); ByteCodeAppender rightCompiled = rhs.compile(scope, debugInfo); // generate byte code for short-circuiting operators if (EnumSet.of(Operator.AND, Operator.OR).contains(operator)) { LabelAdder after = new LabelAdder(); code.add(leftCompiled); append( code, // duplicate the value, one to convert to boolean, one to leave on stack // assumes we don't compile Skylark values to long/double Duplication.SINGLE, EvalUtils.toBoolean, // short-circuit and jump behind second operand expression if first is false/true Jump.ifIntOperandToZero( operator == Operator.AND ? PrimitiveComparison.EQUAL : PrimitiveComparison.NOT_EQUAL) .to(after), // remove the duplicated value from above, as only the rhs is still relevant Removal.SINGLE); code.add(rightCompiled); append(code, after); } else if (EnumSet.of( Operator.LESS, Operator.LESS_EQUALS, Operator.GREATER, Operator.GREATER_EQUALS) .contains(operator)) { compileComparison(debugAccessors, code, leftCompiled, rightCompiled); } else { code.add(leftCompiled); code.add(rightCompiled); switch (operator) { case PLUS: append(code, callImplementation(scope, debugAccessors, operator)); break; case PIPE: case MINUS: case MULT: case DIVIDE: case PERCENT: append(code, callImplementation(debugAccessors, operator)); break; case EQUALS_EQUALS: append(code, ByteCodeMethodCalls.BCObject.equals, ByteCodeMethodCalls.BCBoolean.valueOf); break; case NOT_EQUALS: append( code, ByteCodeMethodCalls.BCObject.equals, ByteCodeUtils.intLogicalNegation(), ByteCodeMethodCalls.BCBoolean.valueOf); break; case IN: append( code, callImplementation(debugAccessors, operator), ByteCodeMethodCalls.BCBoolean.valueOf); break; case NOT_IN: append( code, callImplementation(debugAccessors, Operator.IN), ByteCodeUtils.intLogicalNegation(), ByteCodeMethodCalls.BCBoolean.valueOf); break; default: throw new UnsupportedOperationException("Unsupported binary operator: " + operator); } // endswitch } return ByteCodeUtils.compoundAppender(code); } /** * Compile a comparison oer * @param debugAccessors * @param code * @param leftCompiled * @param rightCompiled * @throws Error */ private void compileComparison( AstAccessors debugAccessors, List<ByteCodeAppender> code, ByteCodeAppender leftCompiled, ByteCodeAppender rightCompiled) throws Error { PrimitiveComparison byteCodeOperator = PrimitiveComparison.forOperator(operator); code.add(leftCompiled); code.add(rightCompiled); append( code, debugAccessors.loadLocation, ByteCodeUtils.invoke( BinaryOperatorExpression.class, "compare", Object.class, Object.class, Location.class), ByteCodeUtils.intToPrimitiveBoolean(byteCodeOperator), ByteCodeMethodCalls.BCBoolean.valueOf); } /** * Implements Operator.PLUS. * * <p>Publicly accessible for reflection and compiled Skylark code. */ public static Object plus(Object lval, Object rval, Environment env, Location location) throws EvalException { // int + int if (lval instanceof Integer && rval instanceof Integer) { return ((Integer) lval).intValue() + ((Integer) rval).intValue(); } // string + string if (lval instanceof String && rval instanceof String) { return (String) lval + (String) rval; } if (lval instanceof SelectorValue || rval instanceof SelectorValue || lval instanceof SelectorList || rval instanceof SelectorList) { return SelectorList.concat(location, lval, rval); } if ((lval instanceof Tuple) && (rval instanceof Tuple)) { return Tuple.copyOf(Iterables.concat((Tuple) lval, (Tuple) rval)); } if ((lval instanceof MutableList) && (rval instanceof MutableList)) { return MutableList.concat((MutableList) lval, (MutableList) rval, env); } if (lval instanceof Map<?, ?> && rval instanceof Map<?, ?>) { Map<?, ?> ldict = (Map<?, ?>) lval; Map<?, ?> rdict = (Map<?, ?>) rval; Map<Object, Object> result = new LinkedHashMap<>(ldict.size() + rdict.size()); result.putAll(ldict); result.putAll(rdict); return ImmutableMap.copyOf(result); } if (lval instanceof SkylarkClassObject && rval instanceof SkylarkClassObject) { return SkylarkClassObject.concat( (SkylarkClassObject) lval, (SkylarkClassObject) rval, location); } // TODO(bazel-team): Remove this case. Union of sets should use '|' instead of '+'. if (lval instanceof SkylarkNestedSet) { return new SkylarkNestedSet((SkylarkNestedSet) lval, rval, location); } throw typeException(lval, rval, Operator.PLUS, location); } /** * Implements Operator.PIPE. * * <p>Publicly accessible for reflection and compiled Skylark code. */ public static Object pipe(Object lval, Object rval, Location location) throws EvalException { if (lval instanceof SkylarkNestedSet) { return new SkylarkNestedSet((SkylarkNestedSet) lval, rval, location); } throw typeException(lval, rval, Operator.PIPE, location); } /** * Implements Operator.MINUS. * * <p>Publicly accessible for reflection and compiled Skylark code. */ public static Object minus(Object lval, Object rval, Location location) throws EvalException { if (lval instanceof Integer && rval instanceof Integer) { return ((Integer) lval).intValue() - ((Integer) rval).intValue(); } throw typeException(lval, rval, Operator.MINUS, location); } /** * Implements Operator.MULT. * * <p>Publicly accessible for reflection and compiled Skylark code. */ public static Object mult(Object lval, Object rval, Location location) throws EvalException { // int * int if (lval instanceof Integer && rval instanceof Integer) { return ((Integer) lval).intValue() * ((Integer) rval).intValue(); } // string * int if (lval instanceof String && rval instanceof Integer) { return Strings.repeat((String) lval, ((Integer) rval).intValue()); } // int * string if (lval instanceof Integer && rval instanceof String) { return Strings.repeat((String) rval, ((Integer) lval).intValue()); } throw typeException(lval, rval, Operator.MULT, location); } /** * Implements Operator.DIVIDE. * * <p>Publicly accessible for reflection and compiled Skylark code. */ public static Object divide(Object lval, Object rval, Location location) throws EvalException { // int / int if (lval instanceof Integer && rval instanceof Integer) { if (rval.equals(0)) { throw new EvalException(location, "integer division by zero"); } // Integer division doesn't give the same result in Java and in Python 2 with // negative numbers. // Java: -7/3 = -2 // Python: -7/3 = -3 // We want to follow Python semantics, so we use float division and round down. return (int) Math.floor(new Double((Integer) lval) / (Integer) rval); } throw typeException(lval, rval, Operator.DIVIDE, location); } /** * Implements Operator.PERCENT. * * <p>Publicly accessible for reflection and compiled Skylark code. */ public static Object percent(Object lval, Object rval, Location location) throws EvalException { // int % int if (lval instanceof Integer && rval instanceof Integer) { if (rval.equals(0)) { throw new EvalException(location, "integer modulo by zero"); } // Python and Java implement division differently, wrt negative numbers. // In Python, sign of the result is the sign of the divisor. int div = (Integer) rval; int result = ((Integer) lval).intValue() % Math.abs(div); if (result > 0 && div < 0) { result += div; // make the result negative } else if (result < 0 && div > 0) { result += div; // make the result positive } return result; } // string % tuple, string % dict, string % anything-else if (lval instanceof String) { try { String pattern = (String) lval; if (rval instanceof List<?>) { List<?> rlist = (List<?>) rval; if (EvalUtils.isTuple(rlist)) { return Printer.formatToString(pattern, rlist); } /* string % list: fall thru */ } if (rval instanceof Tuple) { return Printer.formatToString(pattern, ((Tuple) rval).getList()); } return Printer.formatToString(pattern, Collections.singletonList(rval)); } catch (IllegalFormatException e) { throw new EvalException(location, e.getMessage()); } } throw typeException(lval, rval, Operator.PERCENT, location); } /** * Returns a StackManipulation that calls the given operator's implementation method. * * <p> The method must be named exactly as the lower case name of the operator and in addition to * the operands require an Environment and Location. */ private static StackManipulation callImplementation( VariableScope scope, AstAccessors debugAccessors, Operator operator) { Class<?>[] parameterTypes = new Class<?>[] {Object.class, Object.class, Environment.class, Location.class}; return new StackManipulation.Compound( scope.loadEnvironment(), debugAccessors.loadLocation, ByteCodeUtils.invoke( BinaryOperatorExpression.class, operator.name().toLowerCase(), parameterTypes)); } /** * Returns a StackManipulation that calls the given operator's implementation method. * * <p> The method must be named exactly as the lower case name of the operator and in addition to * the operands require a Location. */ private static StackManipulation callImplementation( AstAccessors debugAccessors, Operator operator) { Class<?>[] parameterTypes = new Class<?>[] {Object.class, Object.class, Location.class}; return new StackManipulation.Compound( debugAccessors.loadLocation, ByteCodeUtils.invoke( BinaryOperatorExpression.class, operator.name().toLowerCase(), parameterTypes)); } /** * Throws an exception signifying incorrect types for the given operator. */ private static final EvalException typeException( Object lval, Object rval, Operator operator, Location location) { // NB: this message format is identical to that used by CPython 2.7.6 or 3.4.0, // though python raises a TypeError. // For more details, we'll hopefully have usable stack traces at some point. return new EvalException( location, String.format( "unsupported operand type(s) for %s: '%s' and '%s'", operator, EvalUtils.getDataTypeName(lval), EvalUtils.getDataTypeName(rval))); } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans; import java.util.List; import java.util.Map; import org.pentaho.di.core.Const; import org.pentaho.di.core.NotePadMeta; import org.pentaho.di.core.gui.AreaOwner; import org.pentaho.di.core.gui.BasePainter; import org.pentaho.di.core.gui.GCInterface; import org.pentaho.di.core.gui.Point; import org.pentaho.di.core.gui.Rectangle; import org.pentaho.di.core.gui.ScrollBarInterface; import org.pentaho.di.core.gui.AreaOwner.AreaType; import org.pentaho.di.core.gui.GCInterface.EColor; import org.pentaho.di.core.gui.GCInterface.EFont; import org.pentaho.di.core.gui.GCInterface.EImage; import org.pentaho.di.core.gui.GCInterface.ELineStyle; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.partition.PartitionSchema; import org.pentaho.di.trans.step.BaseStepData.StepExecutionStatus; import org.pentaho.di.trans.step.StepIOMetaInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepPartitioningMeta; import org.pentaho.di.trans.step.StepStatus; import org.pentaho.di.trans.step.errorhandling.StreamInterface; import org.pentaho.di.trans.step.errorhandling.StreamInterface.StreamType; public class TransPainter extends BasePainter { private static Class<?> PKG = TransPainter.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ public static final String STRING_PARTITIONING_CURRENT_STEP = "PartitioningCurrentStep"; // $NON-NLS-1$ public static final String STRING_REMOTE_INPUT_STEPS = "RemoteInputSteps"; // $NON-NLS-1$ public static final String STRING_REMOTE_OUTPUT_STEPS = "RemoteOutputSteps"; // $NON-NLS-1$ public static final String STRING_STEP_ERROR_LOG = "StepErrorLog"; // $NON-NLS-1$ public static final String STRING_HOP_TYPE_COPY = "HopTypeCopy"; // $NON-NLS-1$ public static final String[] magnificationDescriptions = new String[] { " 200% ", " 150% ", " 100% ", " 75% ", " 50% ", " 25% "}; private TransMeta transMeta; private TransHopMeta candidate; private Map<StepMeta, String> stepLogMap; private List<StepMeta> mouseOverSteps; private StepMeta startHopStep; private Point endHopLocation; private StepMeta endHopStep; private StepMeta noInputStep; private StreamType candidateHopType; private boolean startErrorHopStep; private StepMeta showTargetStreamsStep; private Trans trans; private boolean slowStepIndicatorEnabled; public TransPainter(GCInterface gc, TransMeta transMeta, Point area, ScrollBarInterface hori, ScrollBarInterface vert, TransHopMeta candidate, Point drop_candidate, Rectangle selrect, List<AreaOwner> areaOwners, List<StepMeta> mouseOverSteps, int iconsize, int linewidth, int gridsize, int shadowSize, boolean antiAliasing, String noteFontName, int noteFontHeight, Trans trans, boolean slowStepIndicatorEnabled ) { super(gc, transMeta, area, hori, vert, drop_candidate, selrect, areaOwners, iconsize, linewidth, gridsize, shadowSize, antiAliasing, noteFontName, noteFontHeight ); this.transMeta = transMeta; this.candidate = candidate; this.mouseOverSteps = mouseOverSteps; this.trans = trans; this.slowStepIndicatorEnabled = slowStepIndicatorEnabled; stepLogMap = null; } public TransPainter(GCInterface gc, TransMeta transMeta, Point area, ScrollBarInterface hori, ScrollBarInterface vert, TransHopMeta candidate, Point drop_candidate, Rectangle selrect, List<AreaOwner> areaOwners, List<StepMeta> mouseOverSteps, int iconsize, int linewidth, int gridsize, int shadowSize, boolean antiAliasing, String noteFontName, int noteFontHeight) { this(gc, transMeta, area, hori, vert, candidate, drop_candidate, selrect, areaOwners, mouseOverSteps, iconsize, linewidth, gridsize, shadowSize, antiAliasing, noteFontName, noteFontHeight, new Trans(transMeta), false); } private static String[] getPeekTitles(){ String[] titles = { BaseMessages.getString(PKG, "PeekMetric.Column.Copynr"), BaseMessages.getString(PKG, "PeekMetric.Column.Read"), BaseMessages.getString(PKG, "PeekMetric.Column.Written"), BaseMessages.getString(PKG, "PeekMetric.Column.Input"), BaseMessages.getString(PKG, "PeekMetric.Column.Output"), BaseMessages.getString(PKG, "PeekMetric.Column.Updated"), BaseMessages.getString(PKG, "PeekMetric.Column.Rejected"), BaseMessages.getString(PKG, "PeekMetric.Column.Errors"), BaseMessages.getString(PKG, "PeekMetric.Column.Active"), BaseMessages.getString(PKG, "PeekMetric.Column.Time"), BaseMessages.getString(PKG, "PeekMetric.Column.Speed"), BaseMessages.getString(PKG, "PeekMetric.Column.PriorityBufferSizes") }; return titles; } public void buildTransformationImage() { Point max = transMeta.getMaximum(); Point thumb = getThumb(area, max); offset = getOffset(thumb, area); // First clear the image in the background color gc.setBackground(EColor.BACKGROUND); gc.fillRectangle(0, 0, area.x, area.y); // If there is a shadow, we draw the transformation first with an alpha setting // if (shadowSize>0) { shadow = true; gc.setTransform(translationX, translationY, shadowSize, magnification); gc.setAlpha(20); drawTrans(thumb); } // Draw the transformation onto the image // shadow = false; gc.setTransform(translationX, translationY, 0, magnification); gc.setAlpha(255); drawTrans(thumb); gc.dispose(); } private void drawTrans(Point thumb) { if (!shadow && gridSize>1) { drawGrid(); } if (hori!=null && vert!=null) { hori.setThumb(thumb.x); vert.setThumb(thumb.y); } gc.setFont(EFont.NOTE); // First the notes for (int i = 0; i < transMeta.nrNotes(); i++) { NotePadMeta ni = transMeta.getNote(i); drawNote(ni); } gc.setFont(EFont.GRAPH); gc.setBackground(EColor.BACKGROUND); for (int i = 0; i < transMeta.nrTransHops(); i++) { TransHopMeta hi = transMeta.getTransHop(i); drawHop(hi); } if (candidate != null) { drawHop(candidate, true); } else { if (startHopStep!=null && endHopLocation!=null) { Point fr = startHopStep.getLocation(); Point to = endHopLocation; if (endHopStep==null) { gc.setForeground(EColor.GRAY); } else { gc.setForeground(EColor.BLUE); } Point start = real2screen(fr.x+iconsize/2, fr.y+iconsize/2); Point end = real2screen(to.x, to.y); drawArrow(start.x, start.y, end.x, end.y, theta, calcArrowLength(), 1.2, startHopStep, endHopStep==null ? endHopLocation : endHopStep); } else if (endHopStep!=null && endHopLocation!=null) { Point fr = endHopLocation; Point to = endHopStep.getLocation(); if (startHopStep==null) { gc.setForeground(EColor.GRAY); } else { gc.setForeground(EColor.BLUE); } Point start = real2screen(fr.x, fr.y); Point end = real2screen(to.x+iconsize/2, to.y+iconsize/2); drawArrow(start.x, start.y, end.x, end.y, theta, calcArrowLength(), 1.2, startHopStep==null ? endHopLocation : startHopStep, endHopStep); } } // Draw regular step appearance for (int i = 0; i < transMeta.nrSteps(); i++) { StepMeta stepMeta = transMeta.getStep(i); if (stepMeta.isDrawn()) drawStep(stepMeta); } if (slowStepIndicatorEnabled){ // Highlight possible bottlenecks for (int i = 0; i < transMeta.nrSteps(); i++) { StepMeta stepMeta = transMeta.getStep(i); if (stepMeta.isDrawn()) checkDrawSlowStepIndicator(stepMeta); } } // Draw step status indicators (running vs. done) for (int i = 0; i < transMeta.nrSteps(); i++) { StepMeta stepMeta = transMeta.getStep(i); if (stepMeta.isDrawn()) drawStepStatusIndicator(stepMeta); } // Draw performance table for selected step(s) for (int i = 0; i < transMeta.nrSteps(); i++) { StepMeta stepMeta = transMeta.getStep(i); if (stepMeta.isDrawn()) drawStepPerformanceTable(stepMeta); } // Display an icon on the indicated location signaling to the user that the step in question does not accept input // if (noInputStep!=null) { gc.setLineWidth(2); gc.setForeground(EColor.RED); Point n = noInputStep.getLocation(); gc.drawLine(n.x-5, n.y-5, n.x+iconsize+10, n.y+iconsize+10); gc.drawLine(n.x-5, n.y+iconsize+5, n.x+iconsize+5, n.y-5); } if (drop_candidate != null) { gc.setLineStyle(ELineStyle.SOLID); gc.setForeground(EColor.BLACK); Point screen = real2screen(drop_candidate.x, drop_candidate.y); gc.drawRectangle(screen.x, screen.y, iconsize, iconsize); } if (!shadow) { drawRect(selrect); } } private void checkDrawSlowStepIndicator(StepMeta stepMeta) { if (stepMeta == null){ return; } // draw optional performance indicator if (trans != null) { Point pt = stepMeta.getLocation(); if (pt == null) { pt = new Point(50, 50); } Point screen = real2screen(pt.x, pt.y); int x = screen.x; int y = screen.y; List<StepInterface> steps = trans.findBaseSteps(stepMeta.getName()); for (StepInterface step : steps) { if (step.isRunning()) { int inputRows = step.rowsetInputSize(); int outputRows = step.rowsetOutputSize(); // if the step can't keep up with its input, mark it by drawing an animation boolean isSlow = inputRows * 0.85 > outputRows; if (isSlow) { gc.setLineWidth(linewidth + 1); if (System.currentTimeMillis() % 2000 > 1000){ gc.setForeground(EColor.BACKGROUND); gc.setLineStyle(ELineStyle.SOLID); gc.drawRectangle(x + 1, y + 1, iconsize -2, iconsize - 2); gc.setForeground(EColor.DARKGRAY); gc.setLineStyle(ELineStyle.DOT); gc.drawRectangle(x + 1, y + 1, iconsize -2, iconsize - 2); } else{ gc.setForeground(EColor.DARKGRAY); gc.setLineStyle(ELineStyle.SOLID); gc.drawRectangle(x + 1, y + 1, iconsize -2, iconsize - 2); gc.setForeground(EColor.BACKGROUND); gc.setLineStyle(ELineStyle.DOT); gc.drawRectangle(x + 1, y + 1, iconsize -2, iconsize - 2); } } } gc.setLineStyle(ELineStyle.SOLID); } } } private void drawStepPerformanceTable(StepMeta stepMeta) { if (stepMeta == null){ return; } // draw optional performance indicator if (trans != null) { Point pt = stepMeta.getLocation(); if (pt == null) { pt = new Point(50, 50); } Point screen = real2screen(pt.x, pt.y); int x = screen.x; int y = screen.y; List<StepInterface> steps = trans.findBaseSteps(stepMeta.getName()); // draw mouse over performance indicator if (trans.isRunning()) { if (stepMeta.isSelected()) { // determine popup dimensions up front int popupX = x; int popupY = y; int popupWidth = 0; int popupHeight = 1; gc.setFont(EFont.SMALL); Point p = gc.textExtent("0000000000"); int colWidth = p.x+MINI_ICON_MARGIN; int rowHeight = p.y+MINI_ICON_MARGIN; int titleWidth = 0; // calculate max title width to get the colum with String[] titles = TransPainter.getPeekTitles(); for(String title : titles){ Point titleExtent = gc.textExtent(title); titleWidth = Math.max(titleExtent.x+MINI_ICON_MARGIN, titleWidth); popupHeight += titleExtent.y + MINI_ICON_MARGIN; } popupWidth = titleWidth + 2*MINI_ICON_MARGIN; // determine total popup width popupWidth += steps.size() * colWidth; // determine popup position popupX = popupX + (iconsize - popupWidth)/2; popupY = popupY - popupHeight - MINI_ICON_MARGIN; // draw the frame gc.setForeground(EColor.DARKGRAY); gc.setBackground(EColor.LIGHTGRAY); gc.setLineWidth(1); gc.fillRoundRectangle(popupX, popupY, popupWidth, popupHeight, 7, 7); // draw the title columns // gc.setBackground(EColor.BACKGROUND); // gc.fillRoundRectangle(popupX, popupY, titleWidth+MINI_ICON_MARGIN, popupHeight, 7, 7); gc.setBackground(EColor.LIGHTGRAY); gc.drawRoundRectangle(popupX, popupY, popupWidth, popupHeight, 7, 7); for (int i=0, barY=popupY;i<titles.length;i++){ // fill each line with a slightly different background color if(i%2 == 1){ gc.setBackground(EColor.BACKGROUND); } else{ gc.setBackground(EColor.LIGHTGRAY); } gc.fillRoundRectangle(popupX+1, barY+1, popupWidth-2, rowHeight, 7, 7); barY += rowHeight; } // draw the header column int rowY = popupY+MINI_ICON_MARGIN; int rowX = popupX+MINI_ICON_MARGIN; gc.setForeground(EColor.BLACK); gc.setBackground(EColor.BACKGROUND); for (int i=0;i<titles.length;i++){ if(i%2 == 1){ gc.setBackground(EColor.BACKGROUND); } else{ gc.setBackground(EColor.LIGHTGRAY); } gc.drawText(titles[i], rowX, rowY); rowY += rowHeight; } // draw the values for each copy of the step gc.setBackground(EColor.LIGHTGRAY); rowX += titleWidth; for(StepInterface step: steps){ rowX += colWidth; rowY = popupY+MINI_ICON_MARGIN; StepStatus stepStatus = new StepStatus(step); String[] fields = stepStatus.getPeekFields(); for(int i=0;i<fields.length;i++){ if(i%2 == 1){ gc.setBackground(EColor.BACKGROUND); } else{ gc.setBackground(EColor.LIGHTGRAY); } drawTextRightAligned(fields[i], rowX, rowY); rowY += rowHeight; } } } } } } private void drawStepStatusIndicator(StepMeta stepMeta) { if (stepMeta == null){ return; } // draw status indicator if (trans != null) { Point pt = stepMeta.getLocation(); if (pt == null) { pt = new Point(50, 50); } Point screen = real2screen(pt.x, pt.y); int x = screen.x; int y = screen.y; List<StepInterface> steps = trans.findBaseSteps(stepMeta.getName()); for(StepInterface step: steps){ if (step.getStatus().equals(StepExecutionStatus.STATUS_FINISHED)){ gc.drawImage(EImage.TRUE, x+iconsize-7, y-7); } } } } private void drawTextRightAligned(String txt, int x, int y){ int off = gc.textExtent(txt).x; x -= off; gc.drawText(txt, x, y); } private void drawHop(TransHopMeta hi) { drawHop(hi, false); } private void drawHop(TransHopMeta hi, boolean isCandidate) { StepMeta fs = hi.getFromStep(); StepMeta ts = hi.getToStep(); if (fs != null && ts != null) { drawLine(fs, ts, hi, isCandidate); } } private void drawStep(StepMeta stepMeta) { if (stepMeta == null) return; int alpha = gc.getAlpha(); StepIOMetaInterface ioMeta = stepMeta.getStepMetaInterface().getStepIOMeta(); Point pt = stepMeta.getLocation(); if (pt==null) { pt = new Point(50,50); } Point screen = real2screen(pt.x, pt.y); int x = screen.x; int y = screen.y; boolean stepError = false; if (stepLogMap!=null && !stepLogMap.isEmpty()) { String log = stepLogMap.get(stepMeta); if (!Const.isEmpty(log)) { stepError=true; } } // REMOTE STEPS // First draw an extra indicator for remote input steps... // if (!stepMeta.getRemoteInputSteps().isEmpty()) { gc.setLineWidth(1); gc.setForeground(EColor.GRAY); gc.setBackground(EColor.BACKGROUND); gc.setFont(EFont.GRAPH); String nrInput = Integer.toString(stepMeta.getRemoteInputSteps().size()); Point textExtent = gc.textExtent(nrInput); textExtent.x+=2; // add a tiny little bit of a margin textExtent.y+=2; // Draw it an icon above the step icon. // Draw it an icon and a half to the left // Point point = new Point(x-iconsize-iconsize/2, y-iconsize); gc.drawRectangle(point.x, point.y, textExtent.x, textExtent.y); gc.drawText(nrInput, point.x+1, point.y+1); // Now we draw an arrow from the cube to the step... // gc.drawLine(point.x+textExtent.x, point.y+textExtent.y/2, x-iconsize/2, point.y+textExtent.y/2); drawArrow(x-iconsize/2, point.y+textExtent.y/2, x+iconsize/3, y, Math.toRadians(15), 15, 1.8, null, null ); // Add to the list of areas... if (!shadow) { areaOwners.add(new AreaOwner(AreaType.REMOTE_INPUT_STEP, point.x, point.y, textExtent.x, textExtent.y, offset, stepMeta, STRING_REMOTE_INPUT_STEPS)); } } // Then draw an extra indicator for remote output steps... // if (!stepMeta.getRemoteOutputSteps().isEmpty()) { gc.setLineWidth(1); gc.setForeground(EColor.GRAY); gc.setBackground(EColor.BACKGROUND); gc.setFont(EFont.GRAPH); String nrOutput = Integer.toString(stepMeta.getRemoteOutputSteps().size()); Point textExtent = gc.textExtent(nrOutput); textExtent.x+=2; // add a tiny little bit of a margin textExtent.y+=2; // Draw it an icon above the step icon. // Draw it an icon and a half to the right // Point point = new Point(x+2*iconsize+iconsize/2-textExtent.x, y-iconsize); gc.drawRectangle(point.x, point.y, textExtent.x, textExtent.y); gc.drawText(nrOutput, point.x+1, point.y+1); // Now we draw an arrow from the cube to the step... // This time, we start at the left side... // gc.drawLine(point.x, point.y+textExtent.y/2, x+iconsize+iconsize/2, point.y+textExtent.y/2); drawArrow(x+2*iconsize/3, y, x+iconsize+iconsize/2, point.y+textExtent.y/2, Math.toRadians(15), 15, 1.8, null, null ); // Add to the list of areas... if (!shadow) { areaOwners.add(new AreaOwner(AreaType.REMOTE_OUTPUT_STEP, point.x, point.y, textExtent.x, textExtent.y, offset, stepMeta, STRING_REMOTE_OUTPUT_STEPS)); } } // PARTITIONING // If this step is partitioned, we're drawing a small symbol indicating this... // if (stepMeta.isPartitioned()) { gc.setLineWidth(1); gc.setForeground(EColor.RED); gc.setBackground(EColor.BACKGROUND); gc.setFont(EFont.GRAPH); PartitionSchema partitionSchema = stepMeta.getStepPartitioningMeta().getPartitionSchema(); if (partitionSchema!=null) { String nrInput; if (partitionSchema.isDynamicallyDefined()) { nrInput = "Dx"+partitionSchema.getNumberOfPartitionsPerSlave(); } else { nrInput = "Px"+Integer.toString(partitionSchema.getPartitionIDs().size()); } Point textExtent = gc.textExtent(nrInput); textExtent.x+=2; // add a tiny little bit of a margin textExtent.y+=2; // Draw it a 2 icons above the step icon. // Draw it an icon and a half to the left // Point point = new Point(x-iconsize-iconsize/2, y-iconsize-iconsize); gc.drawRectangle(point.x, point.y, textExtent.x, textExtent.y); gc.drawText(nrInput, point.x+1, point.y+1); // Now we draw an arrow from the cube to the step... // gc.drawLine(point.x+textExtent.x, point.y+textExtent.y/2, x-iconsize/2, point.y+textExtent.y/2); gc.drawLine(x-iconsize/2, point.y+textExtent.y/2, x+iconsize/3, y); // Also draw the name of the partition schema below the box // gc.setForeground(EColor.GRAY); gc.drawText(Const.NVL(partitionSchema.getName(), "<no partition name>"), point.x, point.y+textExtent.y+3, true); // Add to the list of areas... // if (!shadow) { areaOwners.add(new AreaOwner(AreaType.STEP_PARTITIONING, point.x, point.y, textExtent.x, textExtent.y, offset, stepMeta, STRING_PARTITIONING_CURRENT_STEP)); } } } String name = stepMeta.getName(); if (stepMeta.isSelected()) gc.setLineWidth(linewidth + 2); else gc.setLineWidth(linewidth); // Add to the list of areas... if (!shadow) { areaOwners.add(new AreaOwner(AreaType.STEP_ICON, x, y, iconsize, iconsize, offset, transMeta, stepMeta)); } gc.drawStepIcon(x, y, stepMeta); gc.setBackground(EColor.BACKGROUND); if (stepError) { gc.setForeground(EColor.RED); } else { gc.setForeground(EColor.BLACK); } gc.drawRectangle(x - 1, y - 1, iconsize + 1, iconsize + 1); Point namePosition = getNamePosition(name, screen, iconsize ); gc.setForeground(EColor.BLACK); gc.setFont(EFont.GRAPH); gc.drawText(name, namePosition.x, namePosition.y, true); boolean partitioned=false; StepPartitioningMeta meta = stepMeta.getStepPartitioningMeta(); if (stepMeta.isPartitioned() && meta!=null) { partitioned=true; } if (stepMeta.getClusterSchema()!=null) { String message = "C"; if (stepMeta.getClusterSchema().isDynamic()) { message+="xN"; } else { message+="x"+stepMeta.getClusterSchema().findNrSlaves(); } gc.setBackground(EColor.BACKGROUND); gc.setForeground(EColor.BLACK); gc.drawText(message, x + 3 + iconsize, y - 8); } if (stepMeta.getCopies() > 1 && !partitioned) { gc.setBackground(EColor.BACKGROUND); gc.setForeground(EColor.BLACK); String copies = "x" + stepMeta.getCopies(); Point textExtent = gc.textExtent(copies); //gc.fillRectangle(x - 11, y - 11, textExtent.x+2, textExtent.y+2); // gc.drawRectangle(x - 11, y - 11, textExtent.x+2, textExtent.y+2); gc.drawText(copies, x - textExtent.x/2, y - textExtent.y + 3, false); } // If there was an error during the run, the map "stepLogMap" is not empty and not null. // if (stepError) { String log = stepLogMap.get(stepMeta); // Show an error lines icon in the lower right corner of the step... // int xError = x + iconsize - 5; int yError = y + iconsize - 5; Point ib = gc.getImageBounds(EImage.STEP_ERROR); gc.drawImage(EImage.STEP_ERROR, xError, yError); if (!shadow) { areaOwners.add(new AreaOwner(AreaType.STEP_ERROR_ICON, pt.x + iconsize-5, pt.y + iconsize-5, ib.x, ib.y, offset, log, STRING_STEP_ERROR_LOG)); } } // Optionally drawn the mouse-over information // if (mouseOverSteps.contains(stepMeta)) { EImage[] miniIcons = new EImage[] { EImage.INPUT, EImage.EDIT, EImage.CONTEXT_MENU, EImage.OUTPUT, }; int totalHeight=0; int totalIconsWidth=0; int totalWidth=2*MINI_ICON_MARGIN; for (EImage miniIcon : miniIcons) { Point bounds = gc.getImageBounds(miniIcon); totalWidth+=bounds.x+MINI_ICON_MARGIN; totalIconsWidth+=bounds.x+MINI_ICON_MARGIN; if (bounds.y>totalHeight) totalHeight=bounds.y; } totalHeight+=2*MINI_ICON_MARGIN; gc.setFont(EFont.SMALL); String trimmedName = stepMeta.getName().length()<30 ? stepMeta.getName() : stepMeta.getName().substring(0,30); Point nameExtent = gc.textExtent(trimmedName); nameExtent.y+=2*MINI_ICON_MARGIN; nameExtent.x+=3*MINI_ICON_MARGIN; totalHeight+=nameExtent.y; if (nameExtent.x>totalWidth) totalWidth=nameExtent.x; int areaX = x+iconsize/2-totalWidth/2+MINI_ICON_SKEW; int areaY = y+iconsize+MINI_ICON_DISTANCE; gc.setForeground(EColor.DARKGRAY); gc.setBackground(EColor.LIGHTGRAY); gc.setLineWidth(1); gc.fillRoundRectangle(areaX, areaY, totalWidth, totalHeight, 7, 7); gc.drawRoundRectangle(areaX, areaY, totalWidth, totalHeight, 7, 7); gc.setBackground(EColor.BACKGROUND); gc.fillRoundRectangle(areaX+2, areaY+2, totalWidth-MINI_ICON_MARGIN+1, nameExtent.y-MINI_ICON_MARGIN, 7, 7); gc.setForeground(EColor.BLACK); gc.drawText(trimmedName, areaX+(totalWidth-nameExtent.x)/2+MINI_ICON_MARGIN, areaY+MINI_ICON_MARGIN, true); gc.setForeground(EColor.DARKGRAY); gc.setBackground(EColor.LIGHTGRAY); gc.setFont(EFont.GRAPH); areaOwners.add(new AreaOwner(AreaType.MINI_ICONS_BALLOON, areaX, areaY, totalWidth, totalHeight, offset, stepMeta, ioMeta)); gc.fillPolygon(new int[] { areaX+totalWidth/2-MINI_ICON_TRIANGLE_BASE/2+1, areaY+2, areaX+totalWidth/2+MINI_ICON_TRIANGLE_BASE/2, areaY+2, areaX+totalWidth/2-MINI_ICON_SKEW, areaY-MINI_ICON_DISTANCE-5, }); gc.drawPolyline(new int[] { areaX+totalWidth/2-MINI_ICON_TRIANGLE_BASE/2+1, areaY, areaX+totalWidth/2-MINI_ICON_SKEW, areaY-MINI_ICON_DISTANCE-5, areaX+totalWidth/2+MINI_ICON_TRIANGLE_BASE/2, areaY, areaX+totalWidth/2-MINI_ICON_SKEW, areaY-MINI_ICON_DISTANCE-5, }); gc.setBackground(EColor.BACKGROUND); // Put on the icons... // int xIcon = areaX+(totalWidth-totalIconsWidth)/2+MINI_ICON_MARGIN; int yIcon = areaY+5+nameExtent.y; for (int i=0;i<miniIcons.length;i++) { EImage miniIcon = miniIcons[i]; Point bounds = gc.getImageBounds(miniIcon); boolean enabled=false; switch(i) { case 0: // INPUT enabled=ioMeta.isInputAcceptor() || ioMeta.isInputDynamic(); areaOwners.add(new AreaOwner(AreaType.STEP_INPUT_HOP_ICON, xIcon, yIcon, bounds.x, bounds.y, offset, stepMeta, ioMeta)); break; case 1: // EDIT enabled=true; areaOwners.add(new AreaOwner(AreaType.STEP_EDIT_ICON, xIcon, yIcon, bounds.x, bounds.y, offset, stepMeta, ioMeta)); break; case 2: // STEP_MENU enabled=true; areaOwners.add(new AreaOwner(AreaType.STEP_MENU_ICON, xIcon, yIcon, bounds.x, bounds.y, offset, stepMeta, ioMeta)); break; case 3: // OUTPUT enabled=ioMeta.isOutputProducer() || ioMeta.isOutputDynamic(); areaOwners.add(new AreaOwner(AreaType.STEP_OUTPUT_HOP_ICON, xIcon, yIcon, bounds.x, bounds.y, offset, stepMeta, ioMeta)); break; } if (enabled) { gc.setAlpha(255); } else { gc.setAlpha(100); } gc.drawImage(miniIcon, xIcon, yIcon); xIcon+=bounds.x+5; } // OK, see if we need to show a slide-out for target streams... // if (showTargetStreamsStep!=null) { ioMeta = showTargetStreamsStep.getStepMetaInterface().getStepIOMeta(); List<StreamInterface> targetStreams = ioMeta.getTargetStreams(); int targetsWidth=0; int targetsHeight=0;; for (int i=0;i<targetStreams.size();i++) { String description = targetStreams.get(i).getDescription(); Point extent = gc.textExtent(description); if (extent.x>targetsWidth) targetsWidth=extent.x; targetsHeight+=extent.y+MINI_ICON_MARGIN; } targetsWidth+=MINI_ICON_MARGIN; gc.setBackground(EColor.LIGHTGRAY); gc.fillRoundRectangle(areaX, areaY+totalHeight+2, targetsWidth, targetsHeight, 7, 7); gc.drawRoundRectangle(areaX, areaY+totalHeight+2, targetsWidth, targetsHeight, 7, 7); int targetY=areaY+totalHeight+MINI_ICON_MARGIN; for (int i=0;i<targetStreams.size();i++) { String description = targetStreams.get(i).getDescription(); Point extent = gc.textExtent(description); gc.drawText(description, areaX+MINI_ICON_MARGIN, targetY, true); if (i<targetStreams.size()-1) { gc.drawLine(areaX+MINI_ICON_MARGIN/2, targetY+extent.y+3, areaX+targetsWidth-MINI_ICON_MARGIN/2, targetY+extent.y+2); } areaOwners.add(new AreaOwner(AreaType.STEP_TARGET_HOP_ICON_OPTION, areaX, targetY, targetsWidth, extent.y+MINI_ICON_MARGIN, offset, stepMeta, targetStreams.get(i))); targetY+=extent.y+MINI_ICON_MARGIN; } gc.setBackground(EColor.BACKGROUND); } } // Restore the previous alpha value // gc.setAlpha(alpha); } public Point getNamePosition(String string, Point screen, int iconsize) { Point textsize = gc.textExtent(string); int xpos = screen.x + (iconsize / 2) - (textsize.x / 2); int ypos = screen.y + iconsize + 5; return new Point(xpos, ypos); } private void drawLine(StepMeta fs, StepMeta ts, TransHopMeta hi, boolean is_candidate) { int line[] = getLine(fs, ts); EColor col; ELineStyle linestyle=ELineStyle.SOLID; int activeLinewidth = linewidth; if (is_candidate) { col = EColor.BLUE; } else { if (hi.isEnabled()) { if (fs.isSendingErrorRowsToStep(ts)) { col = EColor.RED; linestyle = ELineStyle.DOT; activeLinewidth = linewidth+1; } else { col = EColor.BLACK; } } else { col = EColor.GRAY; } } if (hi.split) activeLinewidth = linewidth+2; // Check to see if the source step is an info step for the target step. // StepIOMetaInterface ioMeta = ts.getStepMetaInterface().getStepIOMeta(); List<StreamInterface> infoStreams = ioMeta.getInfoStreams(); if (!infoStreams.isEmpty()) { // Check this situation, the source step can't run in multiple copies! // for (StreamInterface stream : infoStreams) { if (fs.getName().equalsIgnoreCase(stream.getStepname())) { // This is the info step over this hop! // if (fs.getCopies()>1) { // This is not a desirable situation, it will always end in error. // As such, it's better not to give feedback on it. // We do this by drawing an error icon over the hop... // col=EColor.RED; } } } } gc.setForeground(col); gc.setLineStyle(linestyle); gc.setLineWidth(activeLinewidth); drawArrow(line, fs, ts); if (hi.split) gc.setLineWidth(linewidth); gc.setForeground(EColor.BLACK); gc.setBackground(EColor.BACKGROUND); gc.setLineStyle(ELineStyle.SOLID); } private int[] getLine(StepMeta fs, StepMeta ts) { Point from = fs.getLocation(); Point to = ts.getLocation(); int x1 = from.x + iconsize / 2; int y1 = from.y + iconsize / 2; int x2 = to.x + iconsize / 2; int y2 = to.y + iconsize / 2; return new int[] { x1, y1, x2, y2 }; } private void drawArrow(int line[], Object startObject, Object endObject) { Point screen_from = real2screen(line[0], line[1]); Point screen_to = real2screen(line[2], line[3]); drawArrow(screen_from.x, screen_from.y, screen_to.x, screen_to.y, theta, calcArrowLength(), -1, startObject, endObject); } private void drawArrow(int x1, int y1, int x2, int y2, double theta, int size, double factor, Object startObject, Object endObject) { int mx, my; int x3; int y3; int x4; int y4; int a, b, dist; double angle; gc.drawLine(x1, y1, x2, y2); // in between 2 points mx = x1 + (x2 - x1) / 2; my = y1 + (y2 - y1) / 2; a = Math.abs(x2 - x1); b = Math.abs(y2 - y1); dist = (int) Math.sqrt(a * a + b * b); // determine factor (position of arrow to left side or right side // 0-->100%) if (factor<0) { if (dist >= 2 * iconsize) factor = 1.3; else factor = 1.2; } // in between 2 points mx = (int) (x1 + factor * (x2 - x1) / 2); my = (int) (y1 + factor * (y2 - y1) / 2); // calculate points for arrowhead angle = Math.atan2(y2 - y1, x2 - x1) + Math.PI; x3 = (int) (mx + Math.cos(angle - theta) * size); y3 = (int) (my + Math.sin(angle - theta) * size); x4 = (int) (mx + Math.cos(angle + theta) * size); y4 = (int) (my + Math.sin(angle + theta) * size); gc.switchForegroundBackgroundColors(); gc.fillPolygon(new int[] { mx, my, x3, y3, x4, y4 }); gc.switchForegroundBackgroundColors(); if ( startObject instanceof StepMeta && endObject instanceof StepMeta) { factor = 0.8; StepMeta fs = (StepMeta)startObject; StepMeta ts = (StepMeta)endObject; // in between 2 points mx = (int) (x1 + factor * (x2 - x1) / 2) - 8; my = (int) (y1 + factor * (y2 - y1) / 2) - 8; boolean errorHop = fs.isSendingErrorRowsToStep(ts) || (startErrorHopStep && fs.equals(startHopStep)); boolean targetHop = Const.indexOfString(ts.getName(), fs.getStepMetaInterface().getStepIOMeta().getTargetStepnames())>=0; if (targetHop) { StepIOMetaInterface ioMeta = fs.getStepMetaInterface().getStepIOMeta(); StreamInterface targetStream = ioMeta.findTargetStream(ts); if (targetStream!=null) { EImage hopsIcon = BasePainter.getStreamIconImage(targetStream.getStreamIcon()); Point bounds = gc.getImageBounds(hopsIcon); gc.drawImage(hopsIcon, mx, my); if (!shadow) { areaOwners.add(new AreaOwner(AreaType.STEP_TARGET_HOP_ICON, mx, my, bounds.x, bounds.y, offset, fs, targetStream)); } } } else if (!fs.isDistributes() && !ts.getStepPartitioningMeta().isMethodMirror() && !errorHop) { Point bounds = gc.getImageBounds(EImage.COPY_ROWS); gc.drawImage(EImage.COPY_ROWS, mx, my); if (!shadow) { areaOwners.add(new AreaOwner(AreaType.HOP_COPY_ICON, mx, my, bounds.x, bounds.y, offset, fs, STRING_HOP_TYPE_COPY)); } mx+=16; } if (errorHop) { Point bounds = gc.getImageBounds(EImage.COPY_ROWS); gc.drawImage(EImage.ERROR, mx, my); if (!shadow) { areaOwners.add(new AreaOwner(AreaType.HOP_ERROR_ICON, mx, my, bounds.x, bounds.y, offset, fs, ts)); } mx+=16; } StepIOMetaInterface ioMeta = ts.getStepMetaInterface().getStepIOMeta(); String[] infoStepnames = ioMeta.getInfoStepnames(); if ( (candidateHopType==StreamType.INFO && ts.equals(endHopStep) && fs.equals(startHopStep)) || Const.indexOfString(fs.getName(), infoStepnames) >= 0) { Point bounds = gc.getImageBounds(EImage.INFO); gc.drawImage(EImage.INFO, mx, my); if (!shadow) { areaOwners.add(new AreaOwner(AreaType.HOP_INFO_ICON, mx, my, bounds.x, bounds.y, offset, fs, ts)); } mx+=16; } // Check to see if the source step is an info step for the target step. // if (!Const.isEmpty(infoStepnames)) { // Check this situation, the source step can't run in multiple copies! // for (String infoStep : infoStepnames) { if (fs.getName().equalsIgnoreCase(infoStep)) { // This is the info step over this hop! // if (fs.getCopies()>1) { // This is not a desirable situation, it will always end in error. // As such, it's better not to give feedback on it. // We do this by drawing an error icon over the hop... // gc.drawImage(EImage.ERROR, mx, my); if (!shadow) { areaOwners.add(new AreaOwner(AreaType.HOP_INFO_STEP_COPIES_ERROR, mx, my, MINI_ICON_SIZE, MINI_ICON_SIZE, offset, fs, ts)); } mx+=16; } } } } } } /** * @return the translationX */ public float getTranslationX() { return translationX; } /** * @param translationX the translationX to set */ public void setTranslationX(float translationX) { this.translationX = translationX; } /** * @return the translationY */ public float getTranslationY() { return translationY; } /** * @param translationY the translationY to set */ public void setTranslationY(float translationY) { this.translationY = translationY; } /** * @return the stepLogMap */ public Map<StepMeta, String> getStepLogMap() { return stepLogMap; } /** * @param stepLogMap the stepLogMap to set */ public void setStepLogMap(Map<StepMeta, String> stepLogMap) { this.stepLogMap = stepLogMap; } /** * @param startHopStep the startHopStep to set */ public void setStartHopStep(StepMeta startHopStep) { this.startHopStep = startHopStep; } /** * @param endHopLocation the endHopLocation to set */ public void setEndHopLocation(Point endHopLocation) { this.endHopLocation = endHopLocation; } /** * @param noInputStep the noInputStep to set */ public void setNoInputStep(StepMeta noInputStep) { this.noInputStep = noInputStep; } /** * @param endHopStep the endHopStep to set */ public void setEndHopStep(StepMeta endHopStep) { this.endHopStep = endHopStep; } public void setCandidateHopType(StreamType candidateHopType) { this.candidateHopType = candidateHopType; } public void setStartErrorHopStep(boolean startErrorHopStep) { this.startErrorHopStep = startErrorHopStep; } /** * @return the showTargetStreamsStep */ public StepMeta getShowTargetStreamsStep() { return showTargetStreamsStep; } /** * @param showTargetStreamsStep the showTargetStreamsStep to set */ public void setShowTargetStreamsStep(StepMeta showTargetStreamsStep) { this.showTargetStreamsStep = showTargetStreamsStep; } }
/* * Copyright 2015, Yahoo Inc. * Copyrights licensed under the Apache 2.0 License. * See the accompanying LICENSE file for terms. */ package com.yahoo.squidb.sqlitebindings; import com.yahoo.squidb.android.SquidCursorWrapper; import com.yahoo.squidb.data.ICursor; import com.yahoo.squidb.data.ISQLiteDatabase; import com.yahoo.squidb.data.ISQLitePreparedStatement; import com.yahoo.squidb.data.SquidTransactionListener; import org.sqlite.database.sqlite.SQLiteDatabase; import org.sqlite.database.sqlite.SQLiteStatement; import org.sqlite.database.sqlite.SQLiteTransactionListener; /** * ISQLiteDatabase implementation that wraps {@link org.sqlite.database.sqlite.SQLiteDatabase} from the Android * SQLite bindings project (https://www.sqlite.org/android/doc/trunk/www/index.wiki) */ public class SQLiteBindingsAdapter implements ISQLiteDatabase { private final SQLiteDatabase db; public SQLiteBindingsAdapter(SQLiteDatabase db) { if (db == null) { throw new NullPointerException("Can't create SQLiteDatabaseAdapter with a null SQLiteDatabase"); } this.db = db; } private static class SQLiteTransactionListenerAdapter implements SQLiteTransactionListener { private final SquidTransactionListener listener; private SQLiteTransactionListenerAdapter(SquidTransactionListener listener) { this.listener = listener; } @Override public void onBegin() { listener.onBegin(); } @Override public void onCommit() { listener.onCommit(); } @Override public void onRollback() { listener.onRollback(); } } @Override public void beginTransaction() { db.beginTransaction(); } @Override public void beginTransactionNonExclusive() { db.beginTransactionNonExclusive(); } @Override public void beginTransactionWithListener(SquidTransactionListener listener) { db.beginTransactionWithListener(new SQLiteTransactionListenerAdapter(listener)); } @Override public void beginTransactionWithListenerNonExclusive(SquidTransactionListener listener) { db.beginTransactionWithListenerNonExclusive(new SQLiteTransactionListenerAdapter(listener)); } @Override public void endTransaction() { db.endTransaction(); } @Override public void execSQL(String sql) { db.execSQL(sql); } @Override public void execSQL(String sql, Object[] bindArgs) { db.execSQL(sql, bindArgs); } @Override public boolean inTransaction() { return db.inTransaction(); } @Override public boolean isOpen() { return db.isOpen(); } @Override public int getVersion() { return db.getVersion(); } @Override public void setVersion(int version) { db.setVersion(version); } @Override public ICursor rawQuery(String sql, Object[] bindArgs) { return new SquidCursorWrapper( db.rawQueryWithFactory(new SQLiteBindingsCursorFactory(bindArgs), sql, null, null)); } @Override public String simpleQueryForString(String sql, Object[] bindArgs) { SQLiteStatement statement = null; try { statement = db.compileStatement(sql); SQLiteBindingsCursorFactory.bindArgumentsToProgram(statement, bindArgs); return statement.simpleQueryForString(); } finally { if (statement != null) { statement.close(); } } } @Override public long simpleQueryForLong(String sql, Object[] bindArgs) { SQLiteStatement statement = null; try { statement = db.compileStatement(sql); SQLiteBindingsCursorFactory.bindArgumentsToProgram(statement, bindArgs); return statement.simpleQueryForLong(); } finally { if (statement != null) { statement.close(); } } } @Override public void setTransactionSuccessful() { db.setTransactionSuccessful(); } @Override public String toString() { return db.toString(); } @Override public boolean yieldIfContendedSafely() { return db.yieldIfContendedSafely(); } @Override public boolean yieldIfContendedSafely(long sleepAfterYieldDelay) { return db.yieldIfContendedSafely(sleepAfterYieldDelay); } @Override public void close() { db.close(); } @Override public void disableWriteAheadLogging() { db.disableWriteAheadLogging(); } @Override public boolean enableWriteAheadLogging() { return db.enableWriteAheadLogging(); } @Override public boolean isWriteAheadLoggingEnabled() { return db.isWriteAheadLoggingEnabled(); } @Override public long getMaximumSize() { return db.getMaximumSize(); } @Override public long getPageSize() { return db.getPageSize(); } @Override public String getPath() { return db.getPath(); } @Override public boolean isDatabaseIntegrityOk() { return db.isDatabaseIntegrityOk(); } @Override public boolean isDbLockedByCurrentThread() { return db.isDbLockedByCurrentThread(); } @Override public boolean isReadOnly() { return db.isReadOnly(); } @Override public boolean needUpgrade(int newVersion) { return db.needUpgrade(newVersion); } @Override public void setForeignKeyConstraintsEnabled(boolean enable) { db.setForeignKeyConstraintsEnabled(enable); } @Override public void setMaxSqlCacheSize(int cacheSize) { db.setMaxSqlCacheSize(cacheSize); } @Override public void setMaximumSize(long numBytes) { db.setMaximumSize(numBytes); } @Override public void setPageSize(long numBytes) { db.setPageSize(numBytes); } @Override public int executeUpdateDelete(String sql, Object[] bindArgs) { SQLiteStatement statement = null; try { statement = db.compileStatement(sql); SQLiteBindingsCursorFactory.bindArgumentsToProgram(statement, bindArgs); return statement.executeUpdateDelete(); } finally { if (statement != null) { statement.close(); } } } @Override public long executeInsert(String sql, Object[] bindArgs) { SQLiteStatement statement = null; try { statement = db.compileStatement(sql); SQLiteBindingsCursorFactory.bindArgumentsToProgram(statement, bindArgs); return statement.executeInsert(); } finally { if (statement != null) { statement.close(); } } } @Override public void ensureSqlCompiles(String sql) { SQLiteStatement statement = null; try { statement = db.compileStatement(sql); } finally { if (statement != null) { statement.close(); } } } @Override public ISQLitePreparedStatement prepareStatement(String sql) { return new SQLiteBindingsStatementAdapter(db.compileStatement(sql)); } @Override public SQLiteDatabase getWrappedObject() { return db; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) // Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source // schema. // Generated on: 2013.09.01 at 02:14:43 PM MESZ // package de.nobio.pfmsim.resource; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlTransient; import javax.xml.bind.annotation.XmlType; import de.nobio.pfmsim.project.Phase; import de.nobio.pfmsim.project.Project; /** */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "resource") public class Resource { @XmlTransient private String id; @XmlElement(required = false) private Double baseAvailability = 0.0D; @XmlAttribute(required = true) private Integer count = 0; @XmlTransient private Double reserved = 0.0D; @XmlElement(name = "skill", nillable = true, required = true) private List<Skill> skills; @XmlTransient private Plan plan; /** * Method addSkill. * * @param skill * Skill */ public void addSkill(Skill skill) { getSkills().add(skill); } /** * Method allocate. * * @param project * Project * @param phase * Phase * @param phasePeriod * Period * @param workload * Long * @return Period */ public Period allocate(Project project, Phase phase, Period phasePeriod, Long workload) { return plan.allocate(project, phase, phasePeriod, workload); } /** * Method clone. * * @return Resource * @throws CloneNotSupportedException */ @Override protected Resource clone() throws CloneNotSupportedException { Resource r = new Resource(); r.setBaseAvailability(new Double(baseAvailability)); r.setCount(new Integer(count)); r.getSkills().addAll(skills); return r; } /** * Method contribute. * * @return Double */ public Double contribute() { return plan.contribute(); } /** * Method getAllocation. * * @return Double */ public Double getAllocation() { if (plan == null) { plan = new Plan(baseAvailability); } return plan.getAllocation(); } /** * Method getBaseAvailability. * * @return Double */ public Double getBaseAvailability() { return baseAvailability; } /** * Method getCount. * * @return Integer */ public Integer getCount() { return count; } /** * Method getFreeCapacity. * * @return Double */ public Double getFreeCapacity() { if (plan == null) { plan = new Plan(baseAvailability); } return plan.getFreeCapacity(); } /** * @param period * * @return free capacity within this period */ public Double getFreeCapacity(Period period) { if (plan == null) { plan = new Plan(baseAvailability); } return plan.getFreeCapacity(period); } /** * Method getFreeCapacityForNow. * * @return Double */ public Double getFreeCapacityForNow() { if (plan == null) { plan = new Plan(baseAvailability); } if (plan.getFirst() == null) { return baseAvailability; } else { return baseAvailability - plan.getFirst().getAllocation(); } } /** * Method getId. * * @return String */ public String getId() { return id; } /** * Method getPlan. * * @return Plan */ public Plan getPlan() { return plan; } /** * Method getSkills. * * @return List<Skill> */ public List<Skill> getSkills() { if (skills == null) { skills = new ArrayList<Skill>(); } return this.skills; } /** * Method setBaseAvailability. * * @param availability * Double */ public void setBaseAvailability(Double availability) { this.baseAvailability = availability; } /** * Method setCount. * * @param count * Integer */ public void setCount(Integer count) { this.count = count; } /** * Method setId. * * @param id * String */ public void setId(String id) { this.id = id; } /** * Method toString. * * @return String */ @Override public String toString() { return "\n\tResource [id=" + id + ", baseAvailability=" + baseAvailability + ", count=" + count + ", reserved=" + getAllocation() + ", skills=" + skills + "]"; } }
/* * //****************************************************************** * // * // Copyright 2015 Intel Corporation. * // * //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= * // * // Licensed under the Apache License, Version 2.0 (the "License"); * // you may not use this file except in compliance with the License. * // You may obtain a copy of the License at * // * // http://www.apache.org/licenses/LICENSE-2.0 * // * // Unless required by applicable law or agreed to in writing, software * // distributed under the License is distributed on an "AS IS" BASIS, * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * // See the License for the specific language governing permissions and * // limitations under the License. * // * //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= */ package org.iotivity.base.examples; import android.app.Activity; import android.content.Context; import android.os.Bundle; import android.text.method.ScrollingMovementMethod; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.ScrollView; import android.widget.TextView; import org.iotivity.base.ModeType; import org.iotivity.base.OcConnectivityType; import org.iotivity.base.OcException; import org.iotivity.base.OcPlatform; import org.iotivity.base.OcRepresentation; import org.iotivity.base.PlatformConfig; import org.iotivity.base.QualityOfService; import org.iotivity.base.ServiceType; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; /** * This sample demonstrates the device discovery feature. * The client queries for the device related information stored by the server. */ public class DeviceDiscoveryClient extends Activity implements OcPlatform.OnDeviceFoundListener, OcPlatform.OnPlatformFoundListener { private void startDeviceDiscoveryClient() { Context context = this; PlatformConfig platformConfig = new PlatformConfig( context, ServiceType.IN_PROC, ModeType.CLIENT, "0.0.0.0", // By setting to "0.0.0.0", it binds to all available interfaces 0, // Uses randomly available port QualityOfService.LOW ); msg("Configuring platform."); OcPlatform.Configure(platformConfig); sleep(1); try { msg("Querying for platform information..."); OcPlatform.getPlatformInfo("", OcPlatform.WELL_KNOWN_PLATFORM_QUERY, EnumSet.of(OcConnectivityType.CT_DEFAULT), this); } catch (OcException e) { Log.e(TAG, e.toString()); msg("Failed to query for platform information"); } sleep(2); try { msg("Querying for device information..."); OcPlatform.getDeviceInfo("", OcPlatform.WELL_KNOWN_DEVICE_QUERY, EnumSet.of(OcConnectivityType.CT_DEFAULT), this); } catch (OcException e) { Log.e(TAG, e.toString()); msg("Failed to query for device information"); } sleep(2); enableStartButton(); printLine(); } private final static Map<String, String> PLATFORM_INFO_KEYS = new HashMap<String, String>() {{ put("pi", "Platform ID: "); put("mnmn", "Manufacturer name: "); put("mnml", "Manufacturer url: "); put("mnmo", "Manufacturer Model No: "); put("mndt", "Manufactured Date: "); put("mnpv", "Manufacturer Platform Version: "); put("mnos", "Manufacturer OS version: "); put("mnhw", "Manufacturer hardware version: "); put("mnfv", "Manufacturer firmware version: "); put("mnsl", "Manufacturer support url: "); put("st", "Manufacturer system time: "); }}; @Override public synchronized void onPlatformFound(OcRepresentation ocRepresentation) { msg("Platform Information received:"); try { for (String key : PLATFORM_INFO_KEYS.keySet()) { msg("\t" + PLATFORM_INFO_KEYS.get(key) + ocRepresentation.getValue(key)); } } catch (OcException e) { Log.e(TAG, e.toString()); msg("Failed to read platform info values."); } printLine(); } private final static Map<String, String> DEVICE_INFO_KEYS = new HashMap<String, String>() {{ put("di", "Device ID: "); put("n", "Device name: "); put("lcv", "Spec version url: "); put("dmv", "Data Model: "); }}; @Override public synchronized void onDeviceFound(OcRepresentation ocRepresentation) { msg("Device Information received:"); try { for (String key : DEVICE_INFO_KEYS.keySet()) { msg("\t" + DEVICE_INFO_KEYS.get(key) + ocRepresentation.getValue(key)); } } catch (OcException e) { Log.e(TAG, e.toString()); msg("Failed to read device info values."); } printLine(); } //****************************************************************************** // End of the OIC specific code //****************************************************************************** private final static String TAG = DeviceDiscoveryClient.class.getSimpleName(); private TextView mConsoleTextView; private ScrollView mScrollView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_device_discovery_client); mConsoleTextView = (TextView) findViewById(R.id.consoleTextView); mConsoleTextView.setMovementMethod(new ScrollingMovementMethod()); mScrollView = (ScrollView) findViewById(R.id.scrollView); mScrollView.fullScroll(View.FOCUS_DOWN); final Button button = (Button) findViewById(R.id.button); if (null == savedInstanceState) { button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { button.setText("Re-start"); button.setEnabled(false); new Thread(new Runnable() { public void run() { startDeviceDiscoveryClient(); } }).start(); } }); } else { String consoleOutput = savedInstanceState.getString("consoleOutputString"); mConsoleTextView.setText(consoleOutput); } } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putString("consoleOutputString", mConsoleTextView.getText().toString()); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); String consoleOutput = savedInstanceState.getString("consoleOutputString"); mConsoleTextView.setText(consoleOutput); } private void enableStartButton() { runOnUiThread(new Runnable() { public void run() { Button button = (Button) findViewById(R.id.button); button.setEnabled(true); } }); } private void sleep(int seconds) { try { Thread.sleep(seconds * 1000); } catch (InterruptedException e) { e.printStackTrace(); Log.e(TAG, e.toString()); } } private void msg(final String text) { runOnUiThread(new Runnable() { public void run() { mConsoleTextView.append("\n"); mConsoleTextView.append(text); mScrollView.fullScroll(View.FOCUS_DOWN); } }); Log.i(TAG, text); } private void printLine() { msg("------------------------------------------------------------------------"); } }
/* Copyright (c) 2014 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Victor Olaya (Boundless) - initial implementation */ package org.locationtech.geogig.geotools.plumbing; import static com.google.common.base.Preconditions.checkArgument; import java.io.IOException; import java.util.Iterator; import javax.annotation.Nullable; import org.geotools.data.DefaultTransaction; import org.geotools.data.Transaction; import org.geotools.data.simple.SimpleFeatureStore; import org.geotools.factory.Hints; import org.geotools.feature.FeatureCollection; import org.geotools.feature.FeatureIterator; import org.geotools.feature.collection.BaseFeatureCollection; import org.geotools.feature.collection.DelegateFeatureIterator; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.locationtech.geogig.api.AbstractGeoGigOp; import org.locationtech.geogig.api.NodeRef; import org.locationtech.geogig.api.ObjectId; import org.locationtech.geogig.api.ProgressListener; import org.locationtech.geogig.api.RevFeature; import org.locationtech.geogig.api.RevFeatureType; import org.locationtech.geogig.api.RevFeatureTypeImpl; import org.locationtech.geogig.api.RevObject.TYPE; import org.locationtech.geogig.api.RevTree; import org.locationtech.geogig.api.plumbing.FindTreeChild; import org.locationtech.geogig.api.plumbing.ResolveTreeish; import org.locationtech.geogig.api.plumbing.diff.DiffEntry; import org.locationtech.geogig.api.porcelain.DiffOp; import org.locationtech.geogig.geotools.plumbing.GeoToolsOpException.StatusCode; import org.locationtech.geogig.storage.ObjectDatabase; import org.opengis.feature.Feature; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Predicates; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.collect.UnmodifiableIterator; /** * Internal operation for creating a FeatureCollection from a tree content. * */ public class ExportDiffOp extends AbstractGeoGigOp<SimpleFeatureStore> { private static final Function<Feature, Optional<Feature>> IDENTITY = new Function<Feature, Optional<Feature>>() { @Override @Nullable public Optional<Feature> apply(@Nullable Feature feature) { return Optional.fromNullable(feature); } }; private String path; private Supplier<SimpleFeatureStore> targetStoreProvider; private Function<Feature, Optional<Feature>> function = IDENTITY; private boolean transactional; private boolean old; private String newRef; private String oldRef; /** * Executes the export operation using the parameters that have been specified. * * @return a FeatureCollection with the specified features */ @Override protected SimpleFeatureStore _call() { final SimpleFeatureStore targetStore = getTargetStore(); final String refspec = old ? oldRef : newRef; final RevTree rootTree = resolveRootTree(refspec); final NodeRef typeTreeRef = resolTypeTreeRef(refspec, path, rootTree); final ObjectId defaultMetadataId = typeTreeRef.getMetadataId(); final ProgressListener progressListener = getProgressListener(); progressListener.started(); progressListener.setDescription("Exporting diffs for path '" + path + "'... "); FeatureCollection<SimpleFeatureType, SimpleFeature> asFeatureCollection = new BaseFeatureCollection<SimpleFeatureType, SimpleFeature>() { @Override public FeatureIterator<SimpleFeature> features() { Iterator<DiffEntry> diffs = command(DiffOp.class).setOldVersion(oldRef) .setNewVersion(newRef).setFilter(path).call(); final Iterator<SimpleFeature> plainFeatures = getFeatures(diffs, old, stagingDatabase(), defaultMetadataId, progressListener); Iterator<Optional<Feature>> transformed = Iterators.transform(plainFeatures, ExportDiffOp.this.function); Iterator<SimpleFeature> filtered = Iterators.filter(Iterators.transform( transformed, new Function<Optional<Feature>, SimpleFeature>() { @Override public SimpleFeature apply(Optional<Feature> input) { return (SimpleFeature) (input.isPresent() ? input.get() : null); } }), Predicates.notNull()); return new DelegateFeatureIterator<SimpleFeature>(filtered); } }; // add the feature collection to the feature store final Transaction transaction; if (transactional) { transaction = new DefaultTransaction("create"); } else { transaction = Transaction.AUTO_COMMIT; } try { targetStore.setTransaction(transaction); try { targetStore.addFeatures(asFeatureCollection); transaction.commit(); } catch (final Exception e) { if (transactional) { transaction.rollback(); } Throwables.propagateIfInstanceOf(e, GeoToolsOpException.class); throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD); } finally { transaction.close(); } } catch (IOException e) { throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD); } progressListener.complete(); return targetStore; } private static Iterator<SimpleFeature> getFeatures(Iterator<DiffEntry> diffs, final boolean old, final ObjectDatabase database, final ObjectId metadataId, final ProgressListener progressListener) { final SimpleFeatureType featureType = addFidAttribute(database.getFeatureType(metadataId)); final RevFeatureType revFeatureType = RevFeatureTypeImpl.build(featureType); final SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(featureType); Function<DiffEntry, SimpleFeature> asFeature = new Function<DiffEntry, SimpleFeature>() { @Override @Nullable public SimpleFeature apply(final DiffEntry input) { NodeRef nodeRef = old ? input.getOldObject() : input.getNewObject(); if (nodeRef == null) { return null; } final RevFeature revFeature = database.getFeature(nodeRef.objectId()); ImmutableList<Optional<Object>> values = revFeature.getValues(); for (int i = 0; i < values.size(); i++) { String name = featureType.getDescriptor(i + 1).getLocalName(); Object value = values.get(i).orNull(); featureBuilder.set(name, value); } featureBuilder.set("geogig_fid", nodeRef.name()); Feature feature = featureBuilder.buildFeature(nodeRef.name()); feature.getUserData().put(Hints.USE_PROVIDED_FID, true); feature.getUserData().put(RevFeature.class, revFeature); feature.getUserData().put(RevFeatureType.class, revFeatureType); if (feature instanceof SimpleFeature) { return (SimpleFeature) feature; } return null; } }; Iterator<SimpleFeature> asFeatures = Iterators.transform(diffs, asFeature); UnmodifiableIterator<SimpleFeature> filterNulls = Iterators.filter(asFeatures, Predicates.notNull()); return filterNulls; } private static SimpleFeatureType addFidAttribute(RevFeatureType revFType) { SimpleFeatureType featureType = (SimpleFeatureType) revFType.type(); SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.add("geogig_fid", String.class); for (AttributeDescriptor descriptor : featureType.getAttributeDescriptors()) { builder.add(descriptor); } builder.setName(featureType.getName()); builder.setCRS(featureType.getCoordinateReferenceSystem()); featureType = builder.buildFeatureType(); return featureType; } private NodeRef resolTypeTreeRef(final String refspec, final String treePath, final RevTree rootTree) { Optional<NodeRef> typeTreeRef = command(FindTreeChild.class).setIndex(true) .setParent(rootTree).setChildPath(treePath).call(); checkArgument(typeTreeRef.isPresent(), "Type tree %s does not exist", refspec); checkArgument(TYPE.TREE.equals(typeTreeRef.get().getType()), "%s did not resolve to a tree", refspec); return typeTreeRef.get(); } private RevTree resolveRootTree(final String refspec) { Optional<ObjectId> rootTreeId = command(ResolveTreeish.class).setTreeish(refspec).call(); checkArgument(rootTreeId.isPresent(), "Invalid tree spec: %s", refspec); RevTree rootTree = stagingDatabase().getTree(rootTreeId.get()); return rootTree; } private SimpleFeatureStore getTargetStore() { SimpleFeatureStore targetStore; try { targetStore = targetStoreProvider.get(); } catch (Exception e) { throw new GeoToolsOpException(StatusCode.CANNOT_CREATE_FEATURESTORE); } if (targetStore == null) { throw new GeoToolsOpException(StatusCode.CANNOT_CREATE_FEATURESTORE); } return targetStore; } /** * * @param featureStore a supplier that resolves to the feature store to use for exporting * @return */ public ExportDiffOp setFeatureStore(Supplier<SimpleFeatureStore> featureStore) { this.targetStoreProvider = featureStore; return this; } /** * * @param featureStore the feature store to use for exporting The schema of the feature store * must be equal to the one of the layer whose diffs are to be exported, plus an * additional "geogig_fid" field of type String, which is used to include the id of each * feature. * * @return */ public ExportDiffOp setFeatureStore(SimpleFeatureStore featureStore) { this.targetStoreProvider = Suppliers.ofInstance(featureStore); return this; } /** * @param path the path to export * @return {@code this} */ public ExportDiffOp setPath(String path) { this.path = path; return this; } /** * Sets the function to use for creating a valid Feature that has the FeatureType of the output * FeatureStore, based on the actual FeatureType of the Features to export. * * The Export operation assumes that the feature returned by this function are valid to be added * to the current FeatureSource, and, therefore, performs no checking of FeatureType matching. * It is up to the user performing the export to ensure that the function actually generates * valid features for the current FeatureStore. * * If no function is explicitly set, an identity function is used, and Features are not * converted. * * This function can be used as a filter as well. If the returned object is Optional.absent, no * feature will be added * * @param function * @return {@code this} */ public ExportDiffOp setFeatureTypeConversionFunction( Function<Feature, Optional<Feature>> function) { this.function = function == null ? IDENTITY : function; return this; } /** * @param transactional whether to use a geotools transaction for the operation, defaults to * {@code true} */ public ExportDiffOp setTransactional(boolean transactional) { this.transactional = transactional; return this; } public ExportDiffOp setNewRef(String newRef) { this.newRef = newRef; return this; } public ExportDiffOp setOldRef(String oldRef) { this.oldRef = oldRef; return this; } public ExportDiffOp setUseOld(boolean old) { this.old = old; return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.datastructures; import org.apache.ignite.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.testframework.*; import org.apache.ignite.testframework.junits.common.*; import java.util.concurrent.*; /** * */ public abstract class IgniteClientDataStructuresAbstractTest extends GridCommonAbstractTest { /** */ protected static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private static final int NODE_CNT = 4; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); if (gridName.equals(getTestGridName(NODE_CNT - 1))) { cfg.setClientMode(true); if (!clientDiscovery()) ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setForceServerMode(true); } ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); startGrids(NODE_CNT); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** * @return {@code True} if use client discovery. */ protected abstract boolean clientDiscovery(); /** * @throws Exception If failed. */ public void testSequence() throws Exception { Ignite clientNode = clientIgnite(); Ignite srvNode = serverNode(); testSequence(clientNode, srvNode); testSequence(srvNode, clientNode); } /** * @param creator Creator node. * @param other Other node. * @throws Exception If failed. */ private void testSequence(Ignite creator, Ignite other) throws Exception { assertNull(creator.atomicSequence("seq1", 1L, false)); assertNull(other.atomicSequence("seq1", 1L, false)); try (IgniteAtomicSequence seq = creator.atomicSequence("seq1", 1L, true)) { assertNotNull(seq); assertEquals(1L, seq.get()); assertEquals(1L, seq.getAndAdd(1)); assertEquals(2L, seq.get()); IgniteAtomicSequence seq0 = other.atomicSequence("seq1", 1L, false); assertNotNull(seq0); } assertNull(creator.atomicSequence("seq1", 1L, false)); assertNull(other.atomicSequence("seq1", 1L, false)); } /** * @throws Exception If failed. */ public void testAtomicLong() throws Exception { Ignite clientNode = clientIgnite(); Ignite srvNode = serverNode(); testAtomicLong(clientNode, srvNode); testAtomicLong(srvNode, clientNode); } /** * @param creator Creator node. * @param other Other node. * @throws Exception If failed. */ private void testAtomicLong(Ignite creator, Ignite other) throws Exception { assertNull(creator.atomicLong("long1", 1L, false)); assertNull(other.atomicLong("long1", 1L, false)); try (IgniteAtomicLong cntr = creator.atomicLong("long1", 1L, true)) { assertNotNull(cntr); assertEquals(1L, cntr.get()); assertEquals(1L, cntr.getAndAdd(1)); assertEquals(2L, cntr.get()); IgniteAtomicLong cntr0 = other.atomicLong("long1", 1L, false); assertNotNull(cntr0); assertEquals(2L, cntr0.get()); assertEquals(3L, cntr0.incrementAndGet()); assertEquals(3L, cntr.get()); } assertNull(creator.atomicLong("long1", 1L, false)); assertNull(other.atomicLong("long1", 1L, false)); } /** * @throws Exception If failed. */ public void testSet() throws Exception { Ignite clientNode = clientIgnite(); Ignite srvNode = serverNode(); testSet(clientNode, srvNode); testSet(srvNode, clientNode); } /** * @param creator Creator node. * @param other Other node. * @throws Exception If failed. */ private void testSet(Ignite creator, Ignite other) throws Exception { assertNull(creator.set("set1", null)); assertNull(other.set("set1", null)); CollectionConfiguration colCfg = new CollectionConfiguration(); try (IgniteSet<Integer> set = creator.set("set1", colCfg)) { assertNotNull(set); assertEquals(0, set.size()); assertFalse(set.contains(1)); assertTrue(set.add(1)); assertTrue(set.contains(1)); IgniteSet<Integer> set0 = other.set("set1", null); assertTrue(set0.contains(1)); assertEquals(1, set0.size()); assertTrue(set0.remove(1)); assertFalse(set.contains(1)); } assertNull(creator.set("set1", null)); assertNull(other.set("set1", null)); } /** * @throws Exception If failed. */ public void testLatch() throws Exception { Ignite clientNode = clientIgnite(); Ignite srvNode = serverNode(); testLatch(clientNode, srvNode); testLatch(srvNode, clientNode); } /** * @param creator Creator node. * @param other Other node. * @throws Exception If failed. */ private void testLatch(Ignite creator, final Ignite other) throws Exception { assertNull(creator.countDownLatch("latch1", 1, true, false)); assertNull(other.countDownLatch("latch1", 1, true, false)); try (IgniteCountDownLatch latch = creator.countDownLatch("latch1", 1, true, true)) { assertNotNull(latch); assertEquals(1, latch.count()); IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Object>() { @Override public Object call() throws Exception { U.sleep(1000); IgniteCountDownLatch latch0 = other.countDownLatch("latch1", 1, true, false); assertEquals(1, latch0.count()); log.info("Count down latch."); latch0.countDown(); assertEquals(0, latch0.count()); return null; } }); log.info("Await latch."); assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); log.info("Finished wait."); fut.get(); } assertNull(creator.countDownLatch("latch1", 1, true, false)); assertNull(other.countDownLatch("latch1", 1, true, false)); } /** * @throws Exception If failed. */ public void testQueue() throws Exception { Ignite clientNode = clientIgnite(); Ignite srvNode = serverNode(); testQueue(clientNode, srvNode); testQueue(srvNode, clientNode); } /** * @param creator Creator node. * @param other Other node. * @throws Exception If failed. */ private void testQueue(Ignite creator, final Ignite other) throws Exception { assertNull(creator.queue("q1", 0, null)); assertNull(other.queue("q1", 0, null)); try (IgniteQueue<Integer> queue = creator.queue("q1", 0, new CollectionConfiguration())) { assertNotNull(queue); queue.add(1); assertEquals(1, queue.poll().intValue()); IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Object>() { @Override public Object call() throws Exception { U.sleep(1000); IgniteQueue<Integer> queue0 = other.queue("q1", 0, null); assertEquals(0, queue0.size()); log.info("Add in queue."); queue0.add(2); return null; } }); log.info("Try take."); assertEquals(2, queue.take().intValue()); log.info("Finished take."); fut.get(); } assertNull(creator.queue("q1", 0, null)); assertNull(other.queue("q1", 0, null)); } /** * @return Client node. */ private Ignite clientIgnite() { Ignite ignite = ignite(NODE_CNT - 1); assertTrue(ignite.configuration().isClientMode()); assertEquals(clientDiscovery(), ignite.configuration().getDiscoverySpi().isClientMode()); return ignite; } /** * @return Server node. */ private Ignite serverNode() { Ignite ignite = ignite(0); assertFalse(ignite.configuration().isClientMode()); return ignite; } }
/** * */ package edu.ucdenver.ccp.nlp.pipelines.evaluation.craft.conceptmapper; /* * #%L * Colorado Computational Pharmacology's NLP pipelines * module * %% * Copyright (C) 2014 - 2017 Regents of the University of Colorado * %% * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import org.apache.uima.UIMAException; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.uimafit.factory.TypeSystemDescriptionFactory; import edu.ucdenver.ccp.common.collections.CollectionsUtil; import edu.ucdenver.ccp.common.file.CharacterEncoding; import edu.ucdenver.ccp.common.file.FileUtil.CleanDirectory; import edu.ucdenver.ccp.common.file.FileWriterUtil; import edu.ucdenver.ccp.common.file.FileWriterUtil.FileSuffixEnforcement; import edu.ucdenver.ccp.common.file.FileWriterUtil.WriteMode; import edu.ucdenver.ccp.datasource.fileparsers.obo.OntologyUtil.SynonymType; import edu.ucdenver.ccp.nlp.pipelines.conceptmapper.ConceptMapperDictionaryFileFactory.DictionaryNamespace; import edu.ucdenver.ccp.nlp.pipelines.conceptmapper.ConceptMapperPipelineCmdOpts; import edu.ucdenver.ccp.nlp.pipelines.conceptmapper.ConceptMapperPipelineCmdOpts.DictionaryParameterOperation; import edu.ucdenver.ccp.nlp.pipelines.conceptmapper.ConceptMapperPipelineFactory; import edu.ucdenver.ccp.nlp.pipelines.evaluation.craft.CraftEvaluationPipeline; import edu.ucdenver.ccp.nlp.uima.annotators.comparison.AnnotationComparator_AE.MentionComparatorType; import edu.ucdenver.ccp.nlp.uima.annotators.comparison.AnnotationComparator_AE.SpanComparatorType; import edu.ucdenver.ccp.nlp.uima.annotators.converter.OboNamespaceRemover_AE; import edu.ucdenver.ccp.nlp.uima.annotators.filter.SlotRemovalFilter_AE.SlotRemovalOption; import edu.ucdenver.ccp.nlp.uima.annotators.sentence_detection.ExplicitSentenceCasInserter; import edu.ucdenver.ccp.nlp.uima.annotators.sentence_detection.OpenNlpSentenceDetectorAE; import edu.ucdenver.ccp.nlp.uima.collections.craft.CraftConceptType; import edu.ucdenver.ccp.nlp.uima.collections.craft.CraftRelease; import edu.ucdenver.ccp.nlp.uima.util.TypeSystemUtil; import edu.ucdenver.ccp.nlp.wrapper.conceptmapper.ConceptMapperPermutationFactory; import edu.ucdenver.ccp.nlp.wrapper.conceptmapper.dictionary.obo.DictionaryEntryModifier; import edu.ucdenver.ccp.nlp.wrapper.conceptmapper.dictionary.obo.OboToDictionary.IncludeExt; /** * Evaluates the Concept Mapper using dictionaries built from the original OBO * files used during the annotation of CRAFT. A "default" configuration is used * for the Concept Mapper as specified in the nlp-pipelines-core project. * * @author Colorado Computational Pharmacology, UC Denver; * ccpsupport@ucdenver.edu * */ public class CraftConceptMapperEvaluator { /** * Indicates that the CRAFT initial public "release" set of 67 documents * should be used for this evaluation */ private static final CraftRelease CRAFT_VERSION = CraftRelease.MAIN; /** * The type system that contains the SentenceAnnotation class that is * produced by the sentence detector being used * {@link #getSentenceDetectorDescription(TypeSystemDescription)} and * processed by the ConceptMapper */ private static final String SENTENCE_DETECTOR_TYPE_SYSTEM_STR = "edu.ucdenver.ccp.nlp.uima.annotators.TypeSystem"; public static void evaluateCmPipelineAgainstCraft(DictionaryNamespace dictNamespace, Set<CraftConceptType> craftConceptTypes, File dictionaryDirectory, File evalResultsFile, int paramValuesIndex, boolean cleanDictFile, AnalysisEngineDescription postProcessingComponentDescription, DictionaryEntryModifier dictEntryModifier, IncludeExt includeExt) throws IOException { SynonymType synonymType = ConceptMapperPermutationFactory.getSynonymType(paramValuesIndex); ConceptMapperPipelineCmdOpts cmdOptions = getCmdOpts(dictNamespace, dictionaryDirectory, cleanDictFile, synonymType, Integer.toString(paramValuesIndex), dictEntryModifier, includeExt); TypeSystemDescription tsd = createConceptMapperTypeSystem(); try { // List<AnalysisEngineDescription> cmPipelineDescs = // ConceptMapperPipelineFactory // .getCellTypePipelineAeDescriptions(tsd, cmdOptions, // DictionaryParameterOperation.USE, // CleanDirectory.NO, paramValuesIndex); List<AnalysisEngineDescription> cmPipelineDescs = ConceptMapperPipelineFactory.getPipelineAeDescriptions( tsd, cmdOptions, DictionaryParameterOperation.USE, dictNamespace, CleanDirectory.NO, paramValuesIndex, dictEntryModifier, postProcessingComponentDescription, includeExt); runConceptMapperEvaluationAgainstCraft(craftConceptTypes, cmPipelineDescs, tsd, evalResultsFile); appendParameterValuesToEvalResultsFile(paramValuesIndex, evalResultsFile); } catch (IOException e) { throw new RuntimeException(e); } catch (UIMAException e) { throw new RuntimeException(e); } } /** * Appends the Concept Mapper parameter values used to the end of the eval * results file * * @param paramValuesIndex * @param evalResultsFile * @throws IOException */ private static void appendParameterValuesToEvalResultsFile(int paramValuesIndex, File evalResultsFile) throws IOException { List<String> paramLines = CollectionsUtil .createList(ConceptMapperPermutationFactory.PARAM_COMBINATIONS.get(paramValuesIndex).toString()); FileWriterUtil.printLines(paramLines, evalResultsFile, CharacterEncoding.UTF_8, WriteMode.APPEND, FileSuffixEnforcement.OFF); } /** * @param dictNamespace * @param dictionaryDirectory * @param synonymType * @param dictId * @param dictEntryModifier * @param includeExt * @return a {@link ConceptMapperPipelineCmdOpts} with the Concept Mapper * dictionary and span class both specified * */ private static ConceptMapperPipelineCmdOpts getCmdOpts(DictionaryNamespace dictNamespace, File dictionaryDirectory, boolean cleanDictFile, SynonymType synonymType, String dictId, DictionaryEntryModifier dictEntryModifier, IncludeExt includeExt) throws IOException { ConceptMapperPipelineCmdOpts cmdOptions = new ConceptMapperPipelineCmdOpts(); File cmDictFile = CraftOntologiesDictionaryFactory.createDictionaryFile(dictNamespace, dictionaryDirectory, cleanDictFile, synonymType, dictId, dictEntryModifier, includeExt); cmdOptions.setDictionaryFile(cmDictFile); cmdOptions.setSpanClass(ExplicitSentenceCasInserter.SENTENCE_ANNOTATION_CLASS); return cmdOptions; } /** * @param craftConceptTypes * @param conceptMapperDescriptions * @param tsd * @param evalResultsFile * @param postProcessingComponents * @throws UIMAException * @throws IOException */ private static void runConceptMapperEvaluationAgainstCraft(Set<CraftConceptType> craftConceptTypes, List<AnalysisEngineDescription> conceptMapperDescriptions, TypeSystemDescription tsd, File evalResultsFile) throws UIMAException, IOException { CraftEvaluationPipeline evalPipeline = new CraftEvaluationPipeline(CRAFT_VERSION, craftConceptTypes, tsd, SpanComparatorType.STRICT, MentionComparatorType.IDENTICAL); if (evalResultsFile != null) { evalPipeline.setEvalResultsOutputFile(evalResultsFile); } AnalysisEngineDescription sentenceDetectorDesc = getSentenceDetectorDescription(tsd); AnalysisEngineDescription namespaceRemoverDesc = getNamespaceRemoverDescription(tsd); evalPipeline.addPipelineComponent(sentenceDetectorDesc); evalPipeline.addPipelineComponents(conceptMapperDescriptions); evalPipeline.addPipelineComponent(namespaceRemoverDesc); evalPipeline.run(SlotRemovalOption.REMOVE_ALL); } /** * @param tsd * @return a sentence detector {@link AnalysisEngineDescription} * @throws ResourceInitializationException */ private static AnalysisEngineDescription getSentenceDetectorDescription(TypeSystemDescription tsd) throws ResourceInitializationException { boolean treatLineBreaksAsSentenceBoundaries = true; return OpenNlpSentenceDetectorAE.createAnalysisEngineDescription(tsd, ExplicitSentenceCasInserter.class, treatLineBreaksAsSentenceBoundaries); } private static AnalysisEngineDescription getNamespaceRemoverDescription(TypeSystemDescription tsd) throws ResourceInitializationException { return OboNamespaceRemover_AE.getDescription(); } private static TypeSystemDescription createConceptMapperTypeSystem() { Collection<String> typeSystemStrs = new ArrayList<String>(); typeSystemStrs.add(TypeSystemUtil.CCP_TYPE_SYSTEM); typeSystemStrs.add(SENTENCE_DETECTOR_TYPE_SYSTEM_STR); typeSystemStrs.addAll(ConceptMapperPipelineFactory.CONCEPTMAPPER_TYPE_SYSTEM_STRS); TypeSystemDescription tsd = TypeSystemDescriptionFactory .createTypeSystemDescription(typeSystemStrs.toArray(new String[typeSystemStrs.size()])); return tsd; } }
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.structuralsearch; import com.intellij.dupLocator.iterators.ArrayBackedNodeIterator; import com.intellij.dupLocator.iterators.NodeIterator; import com.intellij.lang.Language; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypes; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ContentIterator; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.SearchScope; import com.intellij.structuralsearch.impl.matcher.*; import com.intellij.structuralsearch.impl.matcher.compiler.PatternCompiler; import com.intellij.structuralsearch.impl.matcher.handlers.MatchingHandler; import com.intellij.structuralsearch.impl.matcher.handlers.TopLevelMatchingHandler; import com.intellij.structuralsearch.impl.matcher.iterators.SingleNodeIterator; import com.intellij.structuralsearch.impl.matcher.iterators.SsrFilteringNodeIterator; import com.intellij.structuralsearch.impl.matcher.strategies.MatchingStrategy; import com.intellij.structuralsearch.plugin.ui.Configuration; import com.intellij.structuralsearch.plugin.ui.ConfigurationManager; import com.intellij.structuralsearch.plugin.util.CollectingMatchResultSink; import com.intellij.structuralsearch.plugin.util.DuplicateFilteringResultSink; import com.intellij.util.IncorrectOperationException; import com.intellij.util.PairProcessor; import com.intellij.util.SmartList; import com.intellij.util.indexing.FileBasedIndex; import org.jetbrains.annotations.NotNull; import java.lang.ref.SoftReference; import java.util.*; /** * This class makes program structure tree matching: */ public class Matcher { static final Logger LOG = Logger.getInstance("#com.intellij.structuralsearch.impl.matcher.MatcherImpl"); @SuppressWarnings("SSBasedInspection") private static final ThreadLocal<Set<String>> ourRecursionGuard = ThreadLocal.withInitial(() -> new HashSet<>()); // project being worked on final Project project; final DumbService myDumbService; // context of matching final MatchContext matchContext; private boolean isTesting; // visitor to delegate the real work private final GlobalMatchingVisitor visitor = new GlobalMatchingVisitor(); ProgressIndicator progress; private final TaskScheduler scheduler = new TaskScheduler(); int totalFilesToScan; int scannedFilesCount; public Matcher(Project project) { this(project, null); } public Matcher(final Project project, final MatchOptions matchOptions) { this.project = project; matchContext = new MatchContext(); matchContext.setMatcher(visitor); if (matchOptions != null) { matchContext.setOptions(matchOptions); cacheCompiledPattern(matchOptions, PatternCompiler.compilePattern(project, matchOptions)); } myDumbService = DumbService.getInstance(project); } public static Matcher buildMatcher(Project project, FileType fileType, String constraint) { if (StringUtil.isQuotedString(constraint)) { // keep old configurations working, also useful for testing final MatchOptions myMatchOptions = new MatchOptions(); myMatchOptions.setFileType(fileType); myMatchOptions.fillSearchCriteria(StringUtil.unquoteString(constraint)); return new Matcher(project, myMatchOptions); } else { final Set<String> set = ourRecursionGuard.get(); if (!set.add(constraint)) { throw new MalformedPatternException("Pattern recursively references itself"); } try { final Configuration configuration = ConfigurationManager.getInstance(project).findConfigurationByName(constraint); if (configuration == null) { throw new MalformedPatternException("Configuration '" + constraint + "' not found"); } return new Matcher(project, configuration.getMatchOptions()); } finally { set.remove(constraint); if (set.isEmpty()) { // we're finished with this thread local ourRecursionGuard.remove(); } } } } static class LastMatchData { CompiledPattern lastPattern; MatchOptions lastOptions; } private static SoftReference<LastMatchData> lastMatchData; private static final Object lastMatchDataLock = new Object(); public static void validate(Project project, MatchOptions options) { final CompiledPattern pattern = PatternCompiler.compilePattern(project, options); synchronized (lastMatchDataLock) { final LastMatchData data = new LastMatchData(); data.lastPattern = pattern; data.lastOptions = options; lastMatchData = new SoftReference<>(data); } final StructuralSearchProfile profile = StructuralSearchUtil.getProfileByFileType(options.getFileType()); assert profile != null; profile.checkSearchPattern(pattern); } public static boolean checkIfShouldAttemptToMatch(MatchContext context, NodeIterator matchedNodes) { final CompiledPattern pattern = context.getPattern(); final NodeIterator patternNodes = pattern.getNodes(); try { while (true) { final PsiElement patternNode = patternNodes.current(); if (patternNode == null) { return true; } final PsiElement matchedNode = matchedNodes.current(); if (matchedNode == null) { return false; } final MatchingHandler matchingHandler = pattern.getHandler(patternNode); if (matchingHandler == null || !matchingHandler.canMatch(patternNode, matchedNode, context)) { return false; } matchedNodes.advance(); patternNodes.advance(); } } finally { patternNodes.reset(); matchedNodes.reset(); } } public void processMatchesInElement(MatchContext context, Configuration configuration, NodeIterator matchedNodes, PairProcessor<MatchResult, Configuration> processor) { try { configureOptions(context, configuration, matchedNodes.current(), processor); context.setShouldRecursivelyMatch(false); visitor.matchContext(matchedNodes); } finally { matchedNodes.reset(); context.getOptions().setScope(null); } } public boolean matchNode(@NotNull PsiElement element) { final CollectingMatchResultSink sink = new CollectingMatchResultSink(); final MatchOptions options = matchContext.getOptions(); final CompiledPattern compiledPattern = prepareMatching(sink, options); if (compiledPattern == null) { return false; } matchContext.setShouldRecursivelyMatch(false); visitor.matchContext(new SingleNodeIterator(element)); return !sink.getMatches().isEmpty(); } public void clearContext() { matchContext.clear(); } private void configureOptions(MatchContext context, final Configuration configuration, PsiElement psiFile, final PairProcessor<MatchResult, Configuration> processor) { if (psiFile == null) return; LocalSearchScope scope = new LocalSearchScope(psiFile); matchContext.clear(); matchContext.setMatcher(visitor); MatchOptions options = context.getOptions(); matchContext.setOptions(options); matchContext.setPattern(context.getPattern()); matchContext.setShouldRecursivelyMatch(context.shouldRecursivelyMatch()); visitor.setMatchContext(matchContext); matchContext.setSink( new DuplicateFilteringResultSink( new DefaultMatchResultSink() { @Override public void newMatch(MatchResult result) { processor.process(result, configuration); } } ) ); options.setScope(scope); } public void precompileOptions(List<Configuration> configurations, Map<Configuration, MatchContext> out) { for (final Configuration configuration : configurations) { if (out.containsKey(configuration)) { continue; } final MatchContext matchContext = new MatchContext(); matchContext.setMatcher(visitor); final MatchOptions matchOptions = configuration.getMatchOptions(); matchContext.setOptions(matchOptions); ReadAction.run(() -> { try { final CompiledPattern compiledPattern = PatternCompiler.compilePattern(project, matchOptions); matchContext.setPattern(compiledPattern); out.put(configuration, matchContext); } catch (StructuralSearchException ignored) {} }); } } /** * Finds the matches of given pattern starting from given tree element. */ public void findMatches(MatchResultSink sink, MatchOptions options) throws MalformedPatternException, UnsupportedPatternException { CompiledPattern compiledPattern = prepareMatching(sink, options); if (compiledPattern== null) { return; } matchContext.getSink().setMatchingProcess( scheduler ); scheduler.init(); progress = matchContext.getSink().getProgressIndicator(); if (isTesting) { // testing mode; final PsiElement[] elements = ((LocalSearchScope)options.getScope()).getScope(); PsiElement parent = elements[0].getParent(); if (matchContext.getPattern().getStrategy().continueMatching(parent != null ? parent : elements[0])) { visitor.matchContext(new SsrFilteringNodeIterator(new ArrayBackedNodeIterator(elements))); } else { final LanguageFileType fileType = (LanguageFileType)matchContext.getOptions().getFileType(); final Language language = fileType.getLanguage(); for (PsiElement element : elements) { match(element, language); } } matchContext.getSink().matchingFinished(); return; } if (!findMatches(options, compiledPattern)) { return; } if (scheduler.getTaskQueueEndAction()==null) { scheduler.setTaskQueueEndAction( () -> matchContext.getSink().matchingFinished() ); } scheduler.executeNext(); } private boolean findMatches(MatchOptions options, CompiledPattern compiledPattern) { SearchScope searchScope = compiledPattern.getScope(); final boolean ourOptimizedScope = searchScope != null; if (!ourOptimizedScope) searchScope = options.getScope(); if (searchScope instanceof GlobalSearchScope) { final GlobalSearchScope scope = (GlobalSearchScope)searchScope; final ContentIterator ci = fileOrDir -> { if (!fileOrDir.isDirectory() && scope.contains(fileOrDir) && fileOrDir.getFileType() != FileTypes.UNKNOWN) { ++totalFilesToScan; scheduler.addOneTask(new MatchOneVirtualFile(fileOrDir)); } return true; }; ReadAction.run(() -> FileBasedIndex.getInstance().iterateIndexableFiles(ci, project, progress)); progress.setText2(""); } else { final PsiElement[] elementsToScan = ((LocalSearchScope)searchScope).getScope(); totalFilesToScan = elementsToScan.length; for (int i = 0; i < elementsToScan.length; ++i) { final PsiElement psiElement = elementsToScan[i]; if (psiElement == null) continue; scheduler.addOneTask(new MatchOnePsiFile(psiElement)); if (ourOptimizedScope) elementsToScan[i] = null; // to prevent long PsiElement reference } } return true; } private CompiledPattern prepareMatching(final MatchResultSink sink, final MatchOptions options) { CompiledPattern savedPattern = null; if (matchContext.getOptions() == options && matchContext.getPattern() != null && matchContext.getOptions().hashCode() == matchContext.getPattern().getOptionsHashStamp()) { savedPattern = matchContext.getPattern(); } matchContext.clear(); matchContext.setSink(new DuplicateFilteringResultSink(sink)); matchContext.setOptions(options); matchContext.setMatcher(visitor); visitor.setMatchContext(matchContext); CompiledPattern compiledPattern = savedPattern; if (compiledPattern == null) { synchronized (lastMatchDataLock) { final LastMatchData data = com.intellij.reference.SoftReference.dereference(lastMatchData); if (data != null && options == data.lastOptions) { compiledPattern = data.lastPattern; } lastMatchData = null; } if (compiledPattern==null) { compiledPattern = ReadAction.compute(() -> PatternCompiler.compilePattern(project, options)); } } cacheCompiledPattern(options, compiledPattern); return compiledPattern; } private void cacheCompiledPattern(final MatchOptions options, final CompiledPattern compiledPattern) { matchContext.setPattern(compiledPattern); compiledPattern.setOptionsHashStamp(options.hashCode()); } /** * Finds the matches of given pattern starting from given tree element. * @param source string for search * @return list of matches found * @throws MalformedPatternException * @throws UnsupportedPatternException */ public List<MatchResult> testFindMatches(String source, MatchOptions options, boolean fileContext, FileType sourceFileType, String sourceExtension, boolean physicalSourceFile) throws MalformedPatternException, UnsupportedPatternException { CollectingMatchResultSink sink = new CollectingMatchResultSink(); try { PsiElement[] elements = MatcherImplUtil.createSourceTreeFromText(source, fileContext ? PatternTreeContext.File : PatternTreeContext.Block, sourceFileType, sourceExtension, project, physicalSourceFile); options.setScope(new LocalSearchScope(elements)); testFindMatches(sink, options); } catch (IncorrectOperationException e) { MalformedPatternException exception = new MalformedPatternException(); exception.initCause(e); throw exception; } finally { options.setScope(null); } return sink.getMatches(); } public List<MatchResult> testFindMatches(String source, MatchOptions options, boolean fileContext) throws MalformedPatternException, UnsupportedPatternException { return testFindMatches(source, options, fileContext, options.getFileType(), null, false); } /** * Finds the matches of given pattern starting from given tree element. * @param sink match result destination * @throws MalformedPatternException * @throws UnsupportedPatternException */ public void testFindMatches(MatchResultSink sink, MatchOptions options) throws MalformedPatternException, UnsupportedPatternException { isTesting = true; try { findMatches(sink, options); } finally { isTesting = false; } } class TaskScheduler implements MatchingProcess { private ArrayList<Runnable> tasks = new ArrayList<>(); private boolean ended; private Runnable taskQueueEndAction; private boolean suspended; @Override public void stop() { ended = true; } @Override public void pause() { suspended = true; } @Override public void resume() { if (!suspended) return; suspended = false; executeNext(); } @Override public boolean isSuspended() { return suspended; } @Override public boolean isEnded() { return ended; } void setTaskQueueEndAction(Runnable taskQueueEndAction) { this.taskQueueEndAction = taskQueueEndAction; } Runnable getTaskQueueEndAction () { return taskQueueEndAction; } void addOneTask(Runnable runnable) { tasks.add(runnable); } void executeNext() { while(!suspended && !ended) { if (tasks.isEmpty()) { ended = true; break; } final Runnable task = tasks.remove(tasks.size() - 1); try { task.run(); } catch (ProcessCanceledException | StructuralSearchException e) { ended = true; clearSchedule(); throw e; } catch (Throwable th) { LOG.error(th); } } if (ended) clearSchedule(); } void init() { ended = false; suspended = false; PsiManager.getInstance(project).startBatchFilesProcessingMode(); } private void clearSchedule() { if (tasks != null) { taskQueueEndAction.run(); if (!project.isDisposed()) { PsiManager.getInstance(project).finishBatchFilesProcessingMode(); } tasks = null; } } } /** * Initiates the matching process for given element * @param element the current search tree element */ void match(PsiElement element, final Language language) { final MatchingStrategy strategy = matchContext.getPattern().getStrategy(); final Language elementLanguage = element.getLanguage(); if (strategy.continueMatching(element) && elementLanguage.isKindOf(language)) { visitor.matchContext(new SingleNodeIterator(element)); return; } for(PsiElement el=element.getFirstChild();el!=null;el=el.getNextSibling()) { match(el, language); } if (element instanceof PsiLanguageInjectionHost) { InjectedLanguageManager.getInstance(project).enumerate(element, (injectedPsi, places) -> match(injectedPsi, language)); } } /** * Tests if given element is matched by given pattern starting from target variable. * @throws MalformedPatternException * @throws UnsupportedPatternException */ @NotNull public List<MatchResult> matchByDownUp(PsiElement element) throws MalformedPatternException, UnsupportedPatternException { final CollectingMatchResultSink sink = new CollectingMatchResultSink(); final MatchOptions options = matchContext.getOptions(); final CompiledPattern compiledPattern = prepareMatching(sink, options); matchContext.setShouldRecursivelyMatch(false); PsiElement targetNode = compiledPattern.getTargetNode(); PsiElement elementToStartMatching = null; if (targetNode == null) { targetNode = compiledPattern.getNodes().current(); if (targetNode != null) { compiledPattern.getNodes().advance(); assert !compiledPattern.getNodes().hasNext(); compiledPattern.getNodes().rewind(); element = element.getParent(); if (element == null) { return Collections.emptyList(); } while (element.getClass() != targetNode.getClass()) { element = element.getParent(); if (element == null) return Collections.emptyList(); } elementToStartMatching = element; } } else { final StructuralSearchProfile profile = StructuralSearchUtil.getProfileByPsiElement(element); if (profile == null) return Collections.emptyList(); targetNode = profile.extendMatchedByDownUp(targetNode); MatchingHandler handler = null; while (element.getClass() == targetNode.getClass() || compiledPattern.isTypedVar(targetNode) && compiledPattern.getHandler(targetNode).canMatch(targetNode, element, matchContext)) { handler = compiledPattern.getHandler(targetNode); handler.setPinnedElement(element); elementToStartMatching = element; if (handler instanceof TopLevelMatchingHandler) break; element = element.getParent(); targetNode = targetNode.getParent(); if (options.isLooseMatching()) { element = profile.updateCurrentNode(element); targetNode = profile.updateCurrentNode(targetNode); } } if (!(handler instanceof TopLevelMatchingHandler)) return Collections.emptyList(); } assert targetNode != null : "Could not match down up when no target node"; visitor.matchContext(new SingleNodeIterator(elementToStartMatching)); matchContext.getSink().matchingFinished(); return sink.getMatches(); } private class MatchOnePsiFile extends MatchOneFile { private PsiElement file; MatchOnePsiFile(PsiElement file) { this.file = file; } @NotNull @Override protected List<PsiElement> getPsiElementsToProcess() { final PsiElement file = this.file; this.file = null; return new SmartList<>(file); } } private class MatchOneVirtualFile extends MatchOneFile { private final VirtualFile myFile; public MatchOneVirtualFile(VirtualFile file) { myFile = file; } @NotNull @Override protected List<PsiElement> getPsiElementsToProcess() { return ReadAction.compute( () -> { if (!myFile.isValid()) { // file may be been deleted since search started return Collections.emptyList(); } final PsiFile file = PsiManager.getInstance(project).findFile(myFile); if (file == null) { return Collections.emptyList(); } final FileViewProvider viewProvider = file.getViewProvider(); final List<PsiElement> elementsToProcess = new SmartList<>(); for (Language lang : viewProvider.getLanguages()) { elementsToProcess.add(viewProvider.getPsi(lang)); } return elementsToProcess; } ); } } private abstract class MatchOneFile implements Runnable { @Override public void run() { final List<PsiElement> files = getPsiElementsToProcess(); if (progress!=null) { progress.setFraction((double)scannedFilesCount/totalFilesToScan); } ++scannedFilesCount; if (files.isEmpty()) return; final LanguageFileType fileType = (LanguageFileType)matchContext.getOptions().getFileType(); final Language patternLanguage = fileType.getLanguage(); for (final PsiElement file : files) { if (file instanceof PsiFile) { matchContext.getSink().processFile((PsiFile)file); } myDumbService.runReadActionInSmartMode( () -> { if (!file.isValid()) return; final StructuralSearchProfile profile = StructuralSearchUtil.getProfileByLanguage(file.getLanguage()); if (profile == null) { return; } match(profile.extendMatchOnePsiFile(file), patternLanguage); } ); } } @NotNull protected abstract List<PsiElement> getPsiElementsToProcess(); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.08.07 at 06:17:52 PM CEST // package org.w3._1998.math.mathml; import javax.xml.bind.annotation.*; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.namespace.QName; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * <p>Java class for mtext.type complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="mtext.type"&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;group ref="{http://www.w3.org/1998/Math/MathML}Glyph-alignmark.class" maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;attGroup ref="{http://www.w3.org/1998/Math/MathML}mtext.attlist"/&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "mtext.type", propOrder = { "content" }) @XmlRootElement(name = "mtext") public class Mtext { @XmlElementRefs({ @XmlElementRef(name = "malignmark", namespace = "http://www.w3.org/1998/Math/MathML", type = Malignmark.class, required = false), @XmlElementRef(name = "mglyph", namespace = "http://www.w3.org/1998/Math/MathML", type = Mglyph.class, required = false) }) @XmlMixed protected List<Object> content; @XmlAttribute(name = "class") @XmlSchemaType(name = "NMTOKENS") protected List<String> clazzs; @XmlAttribute(name = "style") protected String style; @XmlAttribute(name = "xref") @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object xref; @XmlAttribute(name = "id") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anyURI") protected String href; @XmlAttribute(name = "mathvariant") protected String mathvariant; @XmlAttribute(name = "mathsize") protected String mathsize; @XmlAttribute(name = "mathcolor") protected String mathcolor; @XmlAttribute(name = "mathbackground") protected String mathbackground; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<>(); /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Mglyph } * {@link Malignmark } * {@link String } * * */ public List<Object> getContent() { if (content == null) { content = new ArrayList<>(); } return this.content; } /** * Gets the value of the clazzs property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the clazzs property. * * <p> * For example, to add a new item, do as follows: * <pre> * getClazzs().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getClazzs() { if (clazzs == null) { clazzs = new ArrayList<>(); } return this.clazzs; } /** * Gets the value of the style property. * * @return * possible object is * {@link String } * */ public String getStyle() { return style; } /** * Sets the value of the style property. * * @param value * allowed object is * {@link String } * */ public void setStyle(String value) { this.style = value; } /** * Gets the value of the xref property. * * @return * possible object is * {@link Object } * */ public Object getXref() { return xref; } /** * Sets the value of the xref property. * * @param value * allowed object is * {@link Object } * */ public void setXref(Object value) { this.xref = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the mathvariant property. * * @return * possible object is * {@link String } * */ public String getMathvariant() { return mathvariant; } /** * Sets the value of the mathvariant property. * * @param value * allowed object is * {@link String } * */ public void setMathvariant(String value) { this.mathvariant = value; } /** * Gets the value of the mathsize property. * * @return * possible object is * {@link String } * */ public String getMathsize() { return mathsize; } /** * Sets the value of the mathsize property. * * @param value * allowed object is * {@link String } * */ public void setMathsize(String value) { this.mathsize = value; } /** * Gets the value of the mathcolor property. * * @return * possible object is * {@link String } * */ public String getMathcolor() { return mathcolor; } /** * Sets the value of the mathcolor property. * * @param value * allowed object is * {@link String } * */ public void setMathcolor(String value) { this.mathcolor = value; } /** * Gets the value of the mathbackground property. * * @return * possible object is * {@link String } * */ public String getMathbackground() { return mathbackground; } /** * Sets the value of the mathbackground property. * * @param value * allowed object is * {@link String } * */ public void setMathbackground(String value) { this.mathbackground = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.netomi.jdiffutils.algorithm; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.netomi.jdiffutils.algorithm.MyersDiffAlgorithm; /** * * @author Jordane Sarda * @author Luc Maisonobe */ public class MyersDiffAlgorithmTest { private List<String> before; private List<String> after; private int[] length; @Test public void testLength() { final MyersDiffAlgorithm<Character> comparator = new MyersDiffAlgorithm<Character>(); for (int i = 0; i < before.size(); ++i) { Assert.assertEquals(length[i], comparator.getEditScript(sequence(before.get(i)), sequence(after.get(i)), null).getModifications()); } } @Test public void testExecution() { final ExecutionVisitor<Character> ev = new ExecutionVisitor<Character>(); for (int i = 0; i < before.size(); ++i) { ev.setList(sequence(before.get(i))); new MyersDiffAlgorithm<Character>().getEditScript(sequence(before.get(i)), sequence(after.get(i)), null).visit(ev); Assert.assertEquals(after.get(i), ev.getString()); } } @Test public void testMinimal() { final String[] shadokAlph = new String[] { new String("GA"), new String("BU"), new String("ZO"), new String("MEU") }; final List<String> sentenceBefore = new ArrayList<String>(); final List<String> sentenceAfter = new ArrayList<String>(); sentenceBefore.add(shadokAlph[0]); sentenceBefore.add(shadokAlph[2]); sentenceBefore.add(shadokAlph[3]); sentenceBefore.add(shadokAlph[1]); sentenceBefore.add(shadokAlph[0]); sentenceBefore.add(shadokAlph[0]); sentenceBefore.add(shadokAlph[2]); sentenceBefore.add(shadokAlph[1]); sentenceBefore.add(shadokAlph[3]); sentenceBefore.add(shadokAlph[0]); sentenceBefore.add(shadokAlph[2]); sentenceBefore.add(shadokAlph[1]); sentenceBefore.add(shadokAlph[3]); sentenceBefore.add(shadokAlph[2]); sentenceBefore.add(shadokAlph[2]); sentenceBefore.add(shadokAlph[0]); sentenceBefore.add(shadokAlph[1]); sentenceBefore.add(shadokAlph[3]); sentenceBefore.add(shadokAlph[0]); sentenceBefore.add(shadokAlph[3]); final Random random = new Random(4564634237452342L); for (int nbCom = 0; nbCom <= 40; nbCom += 5) { sentenceAfter.clear(); sentenceAfter.addAll(sentenceBefore); for (int i = 0; i < nbCom; i++) { if (random.nextInt(2) == 0) { sentenceAfter.add(random.nextInt(sentenceAfter.size() + 1), shadokAlph[random.nextInt(4)]); } else { sentenceAfter.remove(random.nextInt(sentenceAfter.size())); } } final MyersDiffAlgorithm<String> comparator = new MyersDiffAlgorithm<String>(); Assert.assertTrue(comparator.getEditScript(sentenceBefore, sentenceAfter, null).getModifications() <= nbCom); } } @Test public void testShadok() { final int lgMax = 5; final String[] shadokAlph = new String[] { "GA", "BU", "ZO", "MEU" }; List<List<String>> shadokSentences = new ArrayList<List<String>>(); for (int lg = 0; lg < lgMax; ++lg) { final List<List<String>> newTab = new ArrayList<List<String>>(); newTab.add(new ArrayList<String>()); for (final String element : shadokAlph) { for (final List<String> sentence : shadokSentences) { final List<String> newSentence = new ArrayList<String>(sentence); newSentence.add(element); newTab.add(newSentence); } } shadokSentences = newTab; } final ExecutionVisitor<String> ev = new ExecutionVisitor<String>(); for (int i = 0; i < shadokSentences.size(); ++i) { for (int j = 0; j < shadokSentences.size(); ++j) { ev.setList(shadokSentences.get(i)); new MyersDiffAlgorithm<String>().getEditScript(shadokSentences.get(i), shadokSentences.get(j), null).visit(ev); final StringBuilder concat = new StringBuilder(); for (final String s : shadokSentences.get(j)) { concat.append(s); } Assert.assertEquals(concat.toString(), ev.getString()); } } } private List<Character> sequence(final String string) { final List<Character> list = new ArrayList<Character>(); for (int i = 0; i < string.length(); ++i) { list.add(new Character(string.charAt(i))); } return list; } private class ExecutionVisitor<T> implements CommandVisitor<T> { private List<T> v; private int index; public void setList(final List<T> array) { v = new ArrayList<T>(array); index = 0; } public void visitInsertCommand(final T object) { v.add(index++, object); } public void visitKeepCommand(final T object) { ++index; } public void visitDeleteCommand(final T object) { v.remove(index); } public String getString() { final StringBuffer buffer = new StringBuffer(); for (final T c : v) { buffer.append(c); } return buffer.toString(); } @Override public void startVisit() { } @Override public void finishVisit() { } } @Before public void setUp() { before = Arrays.asList(new String[] { "bottle", "nematode knowledge", "", "aa", "prefixed string", "ABCABBA", "glop glop", "coq", "spider-man" }); after = Arrays.asList(new String[] { "noodle", "empty bottle", "", "C", "prefix", "CBABAC", "pas glop pas glop", "ane", "klingon" }); length = new int[] { 6, 16, 0, 3, 9, 5, 8, 6, 13 }; } @After public void tearDown() { before = null; after = null; length = null; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; public class IndexNameExpressionResolver extends AbstractComponent { private final List<ExpressionResolver> expressionResolvers; private final DateMathExpressionResolver dateMathExpressionResolver; private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(IndexNameExpressionResolver.class)); public IndexNameExpressionResolver(Settings settings) { super(settings); expressionResolvers = Arrays.asList( dateMathExpressionResolver = new DateMathExpressionResolver(settings), new WildcardExpressionResolver() ); } /** * Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options * are encapsulated in the specified request. */ public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { Context context = new Context(state, request.indicesOptions()); return concreteIndexNames(context, request.indices()); } /** * Same as {@link #concreteIndices(ClusterState, IndicesOptions, String...)}, but the index expressions and options * are encapsulated in the specified request. */ public Index[] concreteIndices(ClusterState state, IndicesRequest request) { Context context = new Context(state, request.indicesOptions()); return concreteIndices(context, request.indices()); } /** * Translates the provided index expression into actual concrete indices, properly deduplicated. * * @param state the cluster state containing all the data to resolve to expressions to concrete indices * @param options defines how the aliases or indices need to be resolved to concrete indices * @param indexExpressions expressions that can be resolved to alias or index names. * @return the resolved concrete indices based on the cluster state, indices options and index expressions * @throws IndexNotFoundException if one of the index expressions is pointing to a missing index or alias and the * provided indices options in the context don't allow such a case, or if the final result of the indices resolution * contains no indices and the indices options in the context don't allow such a case. * @throws IllegalArgumentException if one of the aliases resolve to multiple indices and the provided * indices options in the context don't allow such a case. */ public String[] concreteIndexNames(ClusterState state, IndicesOptions options, String... indexExpressions) { Context context = new Context(state, options); return concreteIndexNames(context, indexExpressions); } /** * Translates the provided index expression into actual concrete indices, properly deduplicated. * * @param state the cluster state containing all the data to resolve to expressions to concrete indices * @param options defines how the aliases or indices need to be resolved to concrete indices * @param indexExpressions expressions that can be resolved to alias or index names. * @return the resolved concrete indices based on the cluster state, indices options and index expressions * @throws IndexNotFoundException if one of the index expressions is pointing to a missing index or alias and the * provided indices options in the context don't allow such a case, or if the final result of the indices resolution * contains no indices and the indices options in the context don't allow such a case. * @throws IllegalArgumentException if one of the aliases resolve to multiple indices and the provided * indices options in the context don't allow such a case. */ public Index[] concreteIndices(ClusterState state, IndicesOptions options, String... indexExpressions) { Context context = new Context(state, options); return concreteIndices(context, indexExpressions); } /** * Translates the provided index expression into actual concrete indices, properly deduplicated. * * @param state the cluster state containing all the data to resolve to expressions to concrete indices * @param options defines how the aliases or indices need to be resolved to concrete indices * @param startTime The start of the request where concrete indices is being invoked for * @param indexExpressions expressions that can be resolved to alias or index names. * @return the resolved concrete indices based on the cluster state, indices options and index expressions * provided indices options in the context don't allow such a case, or if the final result of the indices resolution * contains no indices and the indices options in the context don't allow such a case. * @throws IllegalArgumentException if one of the aliases resolve to multiple indices and the provided * indices options in the context don't allow such a case. */ public Index[] concreteIndices(ClusterState state, IndicesOptions options, long startTime, String... indexExpressions) { Context context = new Context(state, options, startTime); return concreteIndices(context, indexExpressions); } String[] concreteIndexNames(Context context, String... indexExpressions) { Index[] indexes = concreteIndices(context, indexExpressions); String[] names = new String[indexes.length]; for (int i = 0; i < indexes.length; i++) { names[i] = indexes[i].getName(); } return names; } Index[] concreteIndices(Context context, String... indexExpressions) { if (indexExpressions == null || indexExpressions.length == 0) { indexExpressions = new String[]{MetaData.ALL}; } MetaData metaData = context.getState().metaData(); IndicesOptions options = context.getOptions(); boolean failClosed = options.forbidClosedIndices() && options.ignoreUnavailable() == false; boolean failNoIndices = options.ignoreUnavailable() == false; // If only one index is specified then whether we fail a request if an index is missing depends on the allow_no_indices // option. At some point we should change this, because there shouldn't be a reason why whether a single index // or multiple indices are specified yield different behaviour. if (indexExpressions.length == 1) { failNoIndices = options.allowNoIndices() == false; } List<String> expressions = Arrays.asList(indexExpressions); for (ExpressionResolver expressionResolver : expressionResolvers) { expressions = expressionResolver.resolve(context, expressions); } if (expressions.isEmpty()) { if (!options.allowNoIndices()) { IndexNotFoundException infe = new IndexNotFoundException((String)null); infe.setResources("index_expression", indexExpressions); throw infe; } else { return Index.EMPTY_ARRAY; } } final Set<Index> concreteIndices = new HashSet<>(expressions.size()); for (String expression : expressions) { AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(expression); if (aliasOrIndex == null) { if (failNoIndices) { IndexNotFoundException infe = new IndexNotFoundException(expression); infe.setResources("index_expression", expression); throw infe; } else { continue; } } Collection<IndexMetaData> resolvedIndices = aliasOrIndex.getIndices(); if (resolvedIndices.size() > 1 && !options.allowAliasesToMultipleIndices()) { String[] indexNames = new String[resolvedIndices.size()]; int i = 0; for (IndexMetaData indexMetaData : resolvedIndices) { indexNames[i++] = indexMetaData.getIndex().getName(); } throw new IllegalArgumentException("Alias [" + expression + "] has more than one indices associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op"); } for (IndexMetaData index : resolvedIndices) { if (index.getState() == IndexMetaData.State.CLOSE) { if (failClosed) { throw new IndexClosedException(index.getIndex()); } else { if (options.forbidClosedIndices() == false) { concreteIndices.add(index.getIndex()); } } } else if (index.getState() == IndexMetaData.State.OPEN) { concreteIndices.add(index.getIndex()); } else { throw new IllegalStateException("index state [" + index.getState() + "] not supported"); } } } if (options.allowNoIndices() == false && concreteIndices.isEmpty()) { IndexNotFoundException infe = new IndexNotFoundException((String)null); infe.setResources("index_expression", indexExpressions); throw infe; } return concreteIndices.toArray(new Index[concreteIndices.size()]); } /** * Utility method that allows to resolve an index expression to its corresponding single concrete index. * Callers should make sure they provide proper {@link org.elasticsearch.action.support.IndicesOptions} * that require a single index as a result. The indices resolution must in fact return a single index when * using this method, an {@link IllegalArgumentException} gets thrown otherwise. * * @param state the cluster state containing all the data to resolve to expression to a concrete index * @param request The request that defines how the an alias or an index need to be resolved to a concrete index * and the expression that can be resolved to an alias or an index name. * @throws IllegalArgumentException if the index resolution lead to more than one index * @return the concrete index obtained as a result of the index resolution */ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { String indexExpression = request.indices() != null && request.indices().length > 0 ? request.indices()[0] : null; Index[] indices = concreteIndices(state, request.indicesOptions(), indexExpression); if (indices.length != 1) { throw new IllegalArgumentException("unable to return a single index as the index and options provided got resolved to multiple indices"); } return indices[0]; } /** * @return whether the specified alias or index exists. If the alias or index contains datemath then that is resolved too. */ public boolean hasIndexOrAlias(String aliasOrIndex, ClusterState state) { Context context = new Context(state, IndicesOptions.lenientExpandOpen()); String resolvedAliasOrIndex = dateMathExpressionResolver.resolveExpression(aliasOrIndex, context); return state.metaData().getAliasAndIndexLookup().containsKey(resolvedAliasOrIndex); } /** * @return If the specified string is data math expression then this method returns the resolved expression. */ public String resolveDateMathExpression(String dateExpression) { // The data math expression resolver doesn't rely on cluster state or indices options, because // it just resolves the date math to an actual date. return dateMathExpressionResolver.resolveExpression(dateExpression, new Context(null, null)); } /** * Iterates through the list of indices and selects the effective list of filtering aliases for the * given index. * <p>Only aliases with filters are returned. If the indices list contains a non-filtering reference to * the index itself - null is returned. Returns <tt>null</tt> if no filtering is required. */ public String[] filteringAliases(ClusterState state, String index, String... expressions) { return indexAliases(state, index, AliasMetaData::filteringRequired, false, expressions); } /** * Iterates through the list of indices and selects the effective list of required aliases for the * given index. * <p>Only aliases where the given predicate tests successfully are returned. If the indices list contains a non-required reference to * the index itself - null is returned. Returns <tt>null</tt> if no filtering is required. */ public String[] indexAliases(ClusterState state, String index, Predicate<AliasMetaData> requiredAlias, boolean skipIdentity, String... expressions) { // expand the aliases wildcard List<String> resolvedExpressions = expressions != null ? Arrays.asList(expressions) : Collections.emptyList(); Context context = new Context(state, IndicesOptions.lenientExpandOpen(), true); for (ExpressionResolver expressionResolver : expressionResolvers) { resolvedExpressions = expressionResolver.resolve(context, resolvedExpressions); } if (isAllIndices(resolvedExpressions)) { return null; } final IndexMetaData indexMetaData = state.metaData().getIndices().get(index); if (indexMetaData == null) { // Shouldn't happen throw new IndexNotFoundException(index); } // optimize for the most common single index/alias scenario if (resolvedExpressions.size() == 1) { String alias = resolvedExpressions.get(0); AliasMetaData aliasMetaData = indexMetaData.getAliases().get(alias); if (aliasMetaData == null || requiredAlias.test(aliasMetaData) == false) { return null; } return new String[]{alias}; } List<String> aliases = null; for (String alias : resolvedExpressions) { if (alias.equals(index)) { if (skipIdentity) { continue; } else { return null; } } AliasMetaData aliasMetaData = indexMetaData.getAliases().get(alias); // Check that this is an alias for the current index // Otherwise - skip it if (aliasMetaData != null) { if (requiredAlias.test(aliasMetaData)) { // If required - add it to the list of aliases if (aliases == null) { aliases = new ArrayList<>(); } aliases.add(alias); } else { // If not, we have a non required alias for this index - no futher checking needed return null; } } } if (aliases == null) { return null; } return aliases.toArray(new String[aliases.size()]); } /** * Resolves the search routing if in the expression aliases are used. If expressions point to concrete indices * or aliases with no routing defined the specified routing is used. * * @return routing values grouped by concrete index */ public Map<String, Set<String>> resolveSearchRouting(ClusterState state, @Nullable String routing, String... expressions) { List<String> resolvedExpressions = expressions != null ? Arrays.asList(expressions) : Collections.<String>emptyList(); Context context = new Context(state, IndicesOptions.lenientExpandOpen()); for (ExpressionResolver expressionResolver : expressionResolvers) { resolvedExpressions = expressionResolver.resolve(context, resolvedExpressions); } if (isAllIndices(resolvedExpressions)) { return resolveSearchRoutingAllIndices(state.metaData(), routing); } Map<String, Set<String>> routings = null; Set<String> paramRouting = null; // List of indices that don't require any routing Set<String> norouting = new HashSet<>(); if (routing != null) { paramRouting = Strings.splitStringByCommaToSet(routing); } for (String expression : resolvedExpressions) { AliasOrIndex aliasOrIndex = state.metaData().getAliasAndIndexLookup().get(expression); if (aliasOrIndex != null && aliasOrIndex.isAlias()) { AliasOrIndex.Alias alias = (AliasOrIndex.Alias) aliasOrIndex; for (Tuple<String, AliasMetaData> item : alias.getConcreteIndexAndAliasMetaDatas()) { String concreteIndex = item.v1(); AliasMetaData aliasMetaData = item.v2(); if (!norouting.contains(concreteIndex)) { if (!aliasMetaData.searchRoutingValues().isEmpty()) { // Routing alias if (routings == null) { routings = new HashMap<>(); } Set<String> r = routings.get(concreteIndex); if (r == null) { r = new HashSet<>(); routings.put(concreteIndex, r); } r.addAll(aliasMetaData.searchRoutingValues()); if (paramRouting != null) { r.retainAll(paramRouting); } if (r.isEmpty()) { routings.remove(concreteIndex); } } else { // Non-routing alias if (!norouting.contains(concreteIndex)) { norouting.add(concreteIndex); if (paramRouting != null) { Set<String> r = new HashSet<>(paramRouting); if (routings == null) { routings = new HashMap<>(); } routings.put(concreteIndex, r); } else { if (routings != null) { routings.remove(concreteIndex); } } } } } } } else { // Index if (!norouting.contains(expression)) { norouting.add(expression); if (paramRouting != null) { Set<String> r = new HashSet<>(paramRouting); if (routings == null) { routings = new HashMap<>(); } routings.put(expression, r); } else { if (routings != null) { routings.remove(expression); } } } } } if (routings == null || routings.isEmpty()) { return null; } return routings; } /** * Sets the same routing for all indices */ private Map<String, Set<String>> resolveSearchRoutingAllIndices(MetaData metaData, String routing) { if (routing != null) { Set<String> r = Strings.splitStringByCommaToSet(routing); Map<String, Set<String>> routings = new HashMap<>(); String[] concreteIndices = metaData.getConcreteAllIndices(); for (String index : concreteIndices) { routings.put(index, r); } return routings; } return null; } /** * Identifies whether the array containing index names given as argument refers to all indices * The empty or null array identifies all indices * * @param aliasesOrIndices the array containing index names * @return true if the provided array maps to all indices, false otherwise */ public static boolean isAllIndices(List<String> aliasesOrIndices) { return aliasesOrIndices == null || aliasesOrIndices.isEmpty() || isExplicitAllPattern(aliasesOrIndices); } /** * Identifies whether the array containing index names given as argument explicitly refers to all indices * The empty or null array doesn't explicitly map to all indices * * @param aliasesOrIndices the array containing index names * @return true if the provided array explicitly maps to all indices, false otherwise */ static boolean isExplicitAllPattern(List<String> aliasesOrIndices) { return aliasesOrIndices != null && aliasesOrIndices.size() == 1 && MetaData.ALL.equals(aliasesOrIndices.get(0)); } /** * Identifies whether the first argument (an array containing index names) is a pattern that matches all indices * * @param indicesOrAliases the array containing index names * @param concreteIndices array containing the concrete indices that the first argument refers to * @return true if the first argument is a pattern that maps to all available indices, false otherwise */ boolean isPatternMatchingAllIndices(MetaData metaData, String[] indicesOrAliases, String[] concreteIndices) { // if we end up matching on all indices, check, if its a wildcard parameter, or a "-something" structure if (concreteIndices.length == metaData.getConcreteAllIndices().length && indicesOrAliases.length > 0) { //we might have something like /-test1,+test1 that would identify all indices //or something like /-test1 with test1 index missing and IndicesOptions.lenient() if (indicesOrAliases[0].charAt(0) == '-') { return true; } //otherwise we check if there's any simple regex for (String indexOrAlias : indicesOrAliases) { if (Regex.isSimpleMatchPattern(indexOrAlias)) { return true; } } } return false; } static final class Context { private final ClusterState state; private final IndicesOptions options; private final long startTime; private final boolean preserveAliases; Context(ClusterState state, IndicesOptions options) { this(state, options, System.currentTimeMillis()); } Context(ClusterState state, IndicesOptions options, boolean preserveAliases) { this(state, options, System.currentTimeMillis(), preserveAliases); } Context(ClusterState state, IndicesOptions options, long startTime) { this(state, options, startTime, false); } Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases) { this.state = state; this.options = options; this.startTime = startTime; this.preserveAliases = preserveAliases; } public ClusterState getState() { return state; } public IndicesOptions getOptions() { return options; } public long getStartTime() { return startTime; } /** * This is used to prevent resolving aliases to concrete indices but this also means * that we might return aliases that point to a closed index. This is currently only used * by {@link #filteringAliases(ClusterState, String, String...)} since it's the only one that needs aliases */ boolean isPreserveAliases() { return preserveAliases; } } private interface ExpressionResolver { /** * Resolves the list of expressions into other expressions if possible (possible concrete indices and aliases, but * that isn't required). The provided implementations can also be left untouched. * * @return a new list with expressions based on the provided expressions */ List<String> resolve(Context context, List<String> expressions); } /** * Resolves alias/index name expressions with wildcards into the corresponding concrete indices/aliases */ static final class WildcardExpressionResolver implements ExpressionResolver { @Override public List<String> resolve(Context context, List<String> expressions) { IndicesOptions options = context.getOptions(); MetaData metaData = context.getState().metaData(); if (options.expandWildcardsClosed() == false && options.expandWildcardsOpen() == false) { return expressions; } if (isEmptyOrTrivialWildcard(expressions)) { return resolveEmptyOrTrivialWildcard(options, metaData, true); } Set<String> result = innerResolve(context, expressions, options, metaData); if (result == null) { return expressions; } if (result.isEmpty() && !options.allowNoIndices()) { IndexNotFoundException infe = new IndexNotFoundException((String)null); infe.setResources("index_or_alias", expressions.toArray(new String[0])); throw infe; } return new ArrayList<>(result); } private Set<String> innerResolve(Context context, List<String> expressions, IndicesOptions options, MetaData metaData) { Set<String> result = null; boolean wildcardSeen = false; boolean plusSeen = false; for (int i = 0; i < expressions.size(); i++) { String expression = expressions.get(i); if (aliasOrIndexExists(metaData, expression)) { if (result != null) { result.add(expression); } continue; } if (Strings.isEmpty(expression)) { throw infe(expression); } boolean add = true; if (expression.charAt(0) == '+') { // if its the first, add empty result set plusSeen = true; if (i == 0) { result = new HashSet<>(); } expression = expression.substring(1); } else if (expression.charAt(0) == '-') { // if there is a negation without a wildcard being previously seen, add it verbatim, // otherwise return the expression if (wildcardSeen) { add = false; expression = expression.substring(1); } else { add = true; } } if (result == null) { // add all the previous ones... result = new HashSet<>(expressions.subList(0, i)); } if (!Regex.isSimpleMatchPattern(expression)) { if (!unavailableIgnoredOrExists(options, metaData, expression)) { throw infe(expression); } if (add) { result.add(expression); } else { result.remove(expression); } continue; } final IndexMetaData.State excludeState = excludeState(options); final Map<String, AliasOrIndex> matches = matches(metaData, expression); Set<String> expand = expand(context, excludeState, matches); if (add) { result.addAll(expand); } else { result.removeAll(expand); } if (!noIndicesAllowedOrMatches(options, matches)) { throw infe(expression); } if (Regex.isSimpleMatchPattern(expression)) { wildcardSeen = true; } } if (plusSeen) { DEPRECATION_LOGGER.deprecated("support for '+' as part of index expressions is deprecated"); } return result; } private boolean noIndicesAllowedOrMatches(IndicesOptions options, Map<String, AliasOrIndex> matches) { return options.allowNoIndices() || !matches.isEmpty(); } private boolean unavailableIgnoredOrExists(IndicesOptions options, MetaData metaData, String expression) { return options.ignoreUnavailable() || aliasOrIndexExists(metaData, expression); } private boolean aliasOrIndexExists(MetaData metaData, String expression) { return metaData.getAliasAndIndexLookup().containsKey(expression); } private static IndexNotFoundException infe(String expression) { IndexNotFoundException infe = new IndexNotFoundException(expression); infe.setResources("index_or_alias", expression); return infe; } private static IndexMetaData.State excludeState(IndicesOptions options) { final IndexMetaData.State excludeState; if (options.expandWildcardsOpen() && options.expandWildcardsClosed()) { excludeState = null; } else if (options.expandWildcardsOpen() && options.expandWildcardsClosed() == false) { excludeState = IndexMetaData.State.CLOSE; } else if (options.expandWildcardsClosed() && options.expandWildcardsOpen() == false) { excludeState = IndexMetaData.State.OPEN; } else { assert false : "this shouldn't get called if wildcards expand to none"; excludeState = null; } return excludeState; } private static Map<String, AliasOrIndex> matches(MetaData metaData, String expression) { if (Regex.isMatchAllPattern(expression)) { // Can only happen if the expressions was initially: '-*' return metaData.getAliasAndIndexLookup(); } else if (expression.indexOf("*") == expression.length() - 1) { return suffixWildcard(metaData, expression); } else { return otherWildcard(metaData, expression); } } private static Map<String, AliasOrIndex> suffixWildcard(MetaData metaData, String expression) { assert expression.length() >= 2 : "expression [" + expression + "] should have at least a length of 2"; String fromPrefix = expression.substring(0, expression.length() - 1); char[] toPrefixCharArr = fromPrefix.toCharArray(); toPrefixCharArr[toPrefixCharArr.length - 1]++; String toPrefix = new String(toPrefixCharArr); return metaData.getAliasAndIndexLookup().subMap(fromPrefix, toPrefix); } private static Map<String, AliasOrIndex> otherWildcard(MetaData metaData, String expression) { final String pattern = expression; return metaData.getAliasAndIndexLookup() .entrySet() .stream() .filter(e -> Regex.simpleMatch(pattern, e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } private static Set<String> expand(Context context, IndexMetaData.State excludeState, Map<String, AliasOrIndex> matches) { Set<String> expand = new HashSet<>(); for (Map.Entry<String, AliasOrIndex> entry : matches.entrySet()) { AliasOrIndex aliasOrIndex = entry.getValue(); if (context.isPreserveAliases() && aliasOrIndex.isAlias()) { expand.add(entry.getKey()); } else { for (IndexMetaData meta : aliasOrIndex.getIndices()) { if (excludeState == null || meta.getState() != excludeState) { expand.add(meta.getIndex().getName()); } } } } return expand; } private boolean isEmptyOrTrivialWildcard(List<String> expressions) { return expressions.isEmpty() || (expressions.size() == 1 && (MetaData.ALL.equals(expressions.get(0)) || Regex.isMatchAllPattern(expressions.get(0)))); } private List<String> resolveEmptyOrTrivialWildcard(IndicesOptions options, MetaData metaData, boolean assertEmpty) { if (options.expandWildcardsOpen() && options.expandWildcardsClosed()) { return Arrays.asList(metaData.getConcreteAllIndices()); } else if (options.expandWildcardsOpen()) { return Arrays.asList(metaData.getConcreteAllOpenIndices()); } else if (options.expandWildcardsClosed()) { return Arrays.asList(metaData.getConcreteAllClosedIndices()); } else { assert assertEmpty : "Shouldn't end up here"; return Collections.emptyList(); } } } static final class DateMathExpressionResolver implements ExpressionResolver { private static final String EXPRESSION_LEFT_BOUND = "<"; private static final String EXPRESSION_RIGHT_BOUND = ">"; private static final char LEFT_BOUND = '{'; private static final char RIGHT_BOUND = '}'; private static final char ESCAPE_CHAR = '\\'; private static final char TIME_ZONE_BOUND = '|'; private final DateTimeZone defaultTimeZone; private final String defaultDateFormatterPattern; private final DateTimeFormatter defaultDateFormatter; DateMathExpressionResolver(Settings settings) { String defaultTimeZoneId = settings.get("date_math_expression_resolver.default_time_zone", "UTC"); this.defaultTimeZone = DateTimeZone.forID(defaultTimeZoneId); defaultDateFormatterPattern = settings.get("date_math_expression_resolver.default_date_format", "YYYY.MM.dd"); this.defaultDateFormatter = DateTimeFormat.forPattern(defaultDateFormatterPattern); } @Override public List<String> resolve(final Context context, List<String> expressions) { List<String> result = new ArrayList<>(expressions.size()); for (String expression : expressions) { result.add(resolveExpression(expression, context)); } return result; } @SuppressWarnings("fallthrough") String resolveExpression(String expression, final Context context) { if (expression.startsWith(EXPRESSION_LEFT_BOUND) == false || expression.endsWith(EXPRESSION_RIGHT_BOUND) == false) { return expression; } boolean escape = false; boolean inDateFormat = false; boolean inPlaceHolder = false; final StringBuilder beforePlaceHolderSb = new StringBuilder(); StringBuilder inPlaceHolderSb = new StringBuilder(); final char[] text = expression.toCharArray(); final int from = 1; final int length = text.length - 1; for (int i = from; i < length; i++) { boolean escapedChar = escape; if (escape) { escape = false; } char c = text[i]; if (c == ESCAPE_CHAR) { if (escapedChar) { beforePlaceHolderSb.append(c); escape = false; } else { escape = true; } continue; } if (inPlaceHolder) { switch (c) { case LEFT_BOUND: if (inDateFormat && escapedChar) { inPlaceHolderSb.append(c); } else if (!inDateFormat) { inDateFormat = true; inPlaceHolderSb.append(c); } else { throw new ElasticsearchParseException("invalid dynamic name expression [{}]. invalid character in placeholder at position [{}]", new String(text, from, length), i); } break; case RIGHT_BOUND: if (inDateFormat && escapedChar) { inPlaceHolderSb.append(c); } else if (inDateFormat) { inDateFormat = false; inPlaceHolderSb.append(c); } else { String inPlaceHolderString = inPlaceHolderSb.toString(); int dateTimeFormatLeftBoundIndex = inPlaceHolderString.indexOf(LEFT_BOUND); String mathExpression; String dateFormatterPattern; DateTimeFormatter dateFormatter; final DateTimeZone timeZone; if (dateTimeFormatLeftBoundIndex < 0) { mathExpression = inPlaceHolderString; dateFormatterPattern = defaultDateFormatterPattern; dateFormatter = defaultDateFormatter; timeZone = defaultTimeZone; } else { if (inPlaceHolderString.lastIndexOf(RIGHT_BOUND) != inPlaceHolderString.length() - 1) { throw new ElasticsearchParseException("invalid dynamic name expression [{}]. missing closing `}` for date math format", inPlaceHolderString); } if (dateTimeFormatLeftBoundIndex == inPlaceHolderString.length() - 2) { throw new ElasticsearchParseException("invalid dynamic name expression [{}]. missing date format", inPlaceHolderString); } mathExpression = inPlaceHolderString.substring(0, dateTimeFormatLeftBoundIndex); String dateFormatterPatternAndTimeZoneId = inPlaceHolderString.substring(dateTimeFormatLeftBoundIndex + 1, inPlaceHolderString.length() - 1); int formatPatternTimeZoneSeparatorIndex = dateFormatterPatternAndTimeZoneId.indexOf(TIME_ZONE_BOUND); if (formatPatternTimeZoneSeparatorIndex != -1) { dateFormatterPattern = dateFormatterPatternAndTimeZoneId.substring(0, formatPatternTimeZoneSeparatorIndex); timeZone = DateTimeZone.forID(dateFormatterPatternAndTimeZoneId.substring(formatPatternTimeZoneSeparatorIndex + 1)); } else { dateFormatterPattern = dateFormatterPatternAndTimeZoneId; timeZone = defaultTimeZone; } dateFormatter = DateTimeFormat.forPattern(dateFormatterPattern); } DateTimeFormatter parser = dateFormatter.withZone(timeZone); FormatDateTimeFormatter formatter = new FormatDateTimeFormatter(dateFormatterPattern, parser, Locale.ROOT); DateMathParser dateMathParser = new DateMathParser(formatter); long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone); String time = formatter.printer().print(millis); beforePlaceHolderSb.append(time); inPlaceHolderSb = new StringBuilder(); inPlaceHolder = false; } break; default: inPlaceHolderSb.append(c); } } else { switch (c) { case LEFT_BOUND: if (escapedChar) { beforePlaceHolderSb.append(c); } else { inPlaceHolder = true; } break; case RIGHT_BOUND: if (!escapedChar) { throw new ElasticsearchParseException("invalid dynamic name expression [{}]. invalid character at position [{}]. " + "`{` and `}` are reserved characters and should be escaped when used as part of the index name using `\\` (e.g. `\\{text\\}`)", new String(text, from, length), i); } default: beforePlaceHolderSb.append(c); } } } if (inPlaceHolder) { throw new ElasticsearchParseException("invalid dynamic name expression [{}]. date math placeholder is open ended", new String(text, from, length)); } if (beforePlaceHolderSb.length() == 0) { throw new ElasticsearchParseException("nothing captured"); } return beforePlaceHolderSb.toString(); } } /** * Returns <code>true</code> iff the given expression resolves to the given index name otherwise <code>false</code> */ public final boolean matchesIndex(String indexName, String expression, ClusterState state) { final String[] concreteIndices = concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), expression); for (String index : concreteIndices) { if (Regex.simpleMatch(index, indexName)) { return true; } } return indexName.equals(expression); } }
package org.ovirt.engine.core.common.businessentities.network; import java.util.Map; import java.util.Objects; import javax.validation.Valid; import javax.validation.constraints.NotNull; import org.ovirt.engine.core.common.businessentities.BusinessEntity; import org.ovirt.engine.core.common.businessentities.IVdcQueryable; import org.ovirt.engine.core.common.utils.ToStringBuilder; import org.ovirt.engine.core.common.validation.annotation.NetworkIdOrNetworkNameIsSet; import org.ovirt.engine.core.common.validation.group.CreateEntity; import org.ovirt.engine.core.common.validation.group.RemoveEntity; import org.ovirt.engine.core.common.validation.group.UpdateEntity; import org.ovirt.engine.core.compat.Guid; @NetworkIdOrNetworkNameIsSet(groups = { CreateEntity.class, UpdateEntity.class }) public class NetworkAttachment implements IVdcQueryable, BusinessEntity<Guid> { private static final long serialVersionUID = -8052325342869681284L; @NotNull(groups = { UpdateEntity.class, RemoveEntity.class }) private Guid id; private Guid networkId; private String networkName; private Guid nicId; private HostNetworkQos hostNetworkQos; private String nicName; @Valid private IpConfiguration ipConfiguration; private Map<String, String> properties; private boolean overrideConfiguration; private ReportedConfigurations reportedConfigurations; public static long getSerialVersionUID() { return serialVersionUID; } public NetworkAttachment() { } public NetworkAttachment(NetworkAttachment networkAttachment) { id = networkAttachment.getId(); nicId = networkAttachment.getNicId(); nicName = networkAttachment.getNicName(); networkId = networkAttachment.getNetworkId(); networkName = networkAttachment.getNetworkName(); ipConfiguration = networkAttachment.getIpConfiguration(); properties = networkAttachment.getProperties(); overrideConfiguration = networkAttachment.isOverrideConfiguration(); reportedConfigurations = networkAttachment.getReportedConfigurations(); } public NetworkAttachment(VdsNetworkInterface baseNic, Network network, IpConfiguration ipConfiguration) { this.networkId = network.getId(); this.networkName = network.getName(); this.nicId = baseNic.getId(); this.nicName = baseNic.getName(); this.ipConfiguration = ipConfiguration; } @Override public Guid getId() { return id; } @Override public void setId(Guid id) { this.id = id; if (isQosOverridden()) { hostNetworkQos.setId(id); } } public Guid getNetworkId() { return networkId; } public void setNetworkId(Guid networkId) { this.networkId = networkId; } public Guid getNicId() { return nicId; } public void setNicId(Guid nicId) { this.nicId = nicId; } public String getNicName() { return nicName; } public void setNicName(String nicName) { this.nicName = nicName; } public IpConfiguration getIpConfiguration() { return ipConfiguration; } public void setIpConfiguration(IpConfiguration ipConfiguration) { this.ipConfiguration = ipConfiguration; } public Map<String, String> getProperties() { return properties; } public void setProperties(Map<String, String> properties) { this.properties = properties; } public boolean hasProperties() { return !(getProperties() == null || getProperties().isEmpty()); } @Override public Object getQueryableId() { return getId(); } public boolean isOverrideConfiguration() { return overrideConfiguration; } public void setOverrideConfiguration(boolean overrideConfiguration) { this.overrideConfiguration = overrideConfiguration; } public void setReportedConfigurations(ReportedConfigurations reportedConfigurations) { this.reportedConfigurations = reportedConfigurations; } public ReportedConfigurations getReportedConfigurations() { return reportedConfigurations; } public String getNetworkName() { return networkName; } public void setNetworkName(String networkName) { this.networkName = networkName; } public HostNetworkQos getHostNetworkQos() { return hostNetworkQos; } public void setHostNetworkQos(HostNetworkQos hostNetworkQos) { this.hostNetworkQos = hostNetworkQos; if (this.hostNetworkQos != null) { this.hostNetworkQos.setId(this.getId()); } } public boolean isQosOverridden() { return hostNetworkQos != null; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof NetworkAttachment)) return false; NetworkAttachment that = (NetworkAttachment) o; return Objects.equals(getNetworkId(), that.getNetworkId()) && Objects.equals(getNicId(), that.getNicId()); } @Override public int hashCode() { return Objects.hash(getNetworkId(), getNicId()); } @Override public String toString() { return ToStringBuilder.forInstance(this) .append("id", getId()) .append("networkId", getNetworkId()) .append("networkName", getNetworkName()) .append("nicId", getNicId()) .append("nicName", getNicName()) .append("ipConfiguration", getIpConfiguration()) .append("properties", getProperties()) .append("overrideConfiguration", isOverrideConfiguration()) .build(); } }
/* * * * Copyright 2010-2016 OrientDB LTD (http://orientdb.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://orientdb.com * */ package com.orientechnologies.orient.core.tx; import com.orientechnologies.orient.core.db.ODatabase; import com.orientechnologies.orient.core.db.document.ODatabaseDocument; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.db.record.ORecordOperation; import com.orientechnologies.orient.core.exception.ORecordNotFoundException; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.storage.ORecordCallback; import com.orientechnologies.orient.core.storage.OStorage; import java.util.List; public interface OTransaction { enum TXTYPE { NOTX, OPTIMISTIC, PESSIMISTIC } enum TXSTATUS { INVALID, BEGUN, COMMITTING, ROLLBACKING, COMPLETED, ROLLED_BACK } enum ISOLATION_LEVEL { READ_COMMITTED, REPEATABLE_READ } void begin(); void commit(); void commit(boolean force); void rollback(); /** Returns the current isolation level. */ ISOLATION_LEVEL getIsolationLevel(); /** * Changes the isolation level. Default is READ_COMMITTED. When REPEATABLE_READ is set, any record * read from the storage is cached in memory to guarantee the repeatable reads. This affects the * used RAM and speed (because JVM Garbage Collector job). * * @param iIsolationLevel Isolation level to set * @return Current object to allow call in chain */ OTransaction setIsolationLevel(ISOLATION_LEVEL iIsolationLevel); void rollback(boolean force, int commitLevelDiff); ODatabaseDocument getDatabase(); @Deprecated void clearRecordEntries(); @Deprecated ORecord loadRecord( ORID iRid, ORecord iRecord, String iFetchPlan, boolean ignoreCache, boolean loadTombstone, final OStorage.LOCKING_STRATEGY iLockingStrategy); @Deprecated ORecord loadRecord( ORID iRid, ORecord iRecord, String iFetchPlan, boolean ignoreCache, boolean iUpdateCache, boolean loadTombstone, final OStorage.LOCKING_STRATEGY iLockingStrategy); ORecord loadRecord(ORID iRid, ORecord iRecord, String iFetchPlan, boolean ignoreCache); ORecord reloadRecord(ORID iRid, ORecord iRecord, String iFetchPlan, boolean ignoreCache); ORecord reloadRecord( ORID iRid, ORecord iRecord, String iFetchPlan, boolean ignoreCache, boolean force); ORecord loadRecordIfVersionIsNotLatest( ORID rid, int recordVersion, String fetchPlan, boolean ignoreCache) throws ORecordNotFoundException; TXSTATUS getStatus(); @Deprecated Iterable<? extends ORecordOperation> getCurrentRecordEntries(); Iterable<? extends ORecordOperation> getRecordOperations(); List<ORecordOperation> getNewRecordEntriesByClass(OClass iClass, boolean iPolymorphic); List<ORecordOperation> getNewRecordEntriesByClusterIds(int[] iIds); ORecordOperation getRecordEntry(ORID rid); List<String> getInvolvedIndexes(); ODocument getIndexChanges(); @Deprecated void clearIndexEntries(); boolean isUsingLog(); /** * If you set this flag to false, you are unable to * * <ol> * <li>Rollback data changes in case of exception * <li>Restore data in case of server crash * </ol> * * <p>So you practically unable to work in multithreaded environment and keep data consistent. * * @deprecated This option has no effect */ @Deprecated void setUsingLog(boolean useLog); void close(); /** * When commit in transaction is performed all new records will change their identity, but index * values will contain stale links, to fix them given method will be called for each entry. This * update local transaction maps too. * * @param oldRid Record identity before commit. * @param newRid Record identity after commit. */ void updateIdentityAfterCommit(final ORID oldRid, final ORID newRid); int amountOfNestedTxs(); boolean isLockedRecord(OIdentifiable iRecord); @Deprecated OStorage.LOCKING_STRATEGY lockingStrategy(OIdentifiable iRecord); @Deprecated OTransaction lockRecord(OIdentifiable iRecord, OStorage.LOCKING_STRATEGY iLockingStrategy); @Deprecated OTransaction unlockRecord(OIdentifiable iRecord); int getEntryCount(); /** @return {@code true} if this transaction is active, {@code false} otherwise. */ boolean isActive(); /** * Saves the given record in this transaction. * * @param record the record to save. * @param clusterName record's cluster name. * @param operationMode the operation mode. * @param forceCreate the force creation flag, {@code true} to force the creation of the record, * {@code false} to allow updates. * @param createdCallback the callback to invoke when the record save operation triggered the * creation of the record. * @param updatedCallback the callback to invoke when the record save operation triggered the * update of the record. * @return the record saved. */ ORecord saveRecord( ORecord record, String clusterName, ODatabase.OPERATION_MODE operationMode, boolean forceCreate, ORecordCallback<? extends Number> createdCallback, ORecordCallback<Integer> updatedCallback); /** * Deletes the given record in this transaction. * * @param record the record to delete. * @param mode the operation mode. */ void deleteRecord(ORecord record, ODatabase.OPERATION_MODE mode); /** * Resolves a record with the given RID in the context of this transaction. * * @param rid the record RID. * @return the resolved record, or {@code null} if no record is found, or {@link * OTransactionAbstract#DELETED_RECORD} if the record was deleted in this transaction. */ ORecord getRecord(ORID rid); /** * Adds the transactional index entry in this transaction. * * @param index the index. * @param indexName the index name. * @param operation the index operation to register. * @param key the index key. * @param value the index key value. */ void addIndexEntry( OIndex index, String indexName, OTransactionIndexChanges.OPERATION operation, Object key, OIdentifiable value); /** * Adds the given document to a set of changed documents known to this transaction. * * @param document the document to add. */ void addChangedDocument(ODocument document); /** * Obtains the index changes done in the context of this transaction. * * @param indexName the index name. * @return the index changes in question or {@code null} if index is not found. */ OTransactionIndexChanges getIndexChanges(String indexName); /** * Does the same thing as {@link #getIndexChanges(String)}, but handles remote storages in a * special way. * * @param indexName the index name. * @return the index changes in question or {@code null} if index is not found or storage is * remote. */ OTransactionIndexChanges getIndexChangesInternal(String indexName); /** * Obtains the custom value by its name stored in the context of this transaction. * * @param name the value name. * @return the obtained value or {@code null} if no value found. */ Object getCustomData(String name); /** * Sets the custom value by its name stored in the context of this transaction. * * @param name the value name. * @param value the value to store. */ void setCustomData(String name, Object value); /** @return this transaction ID as seen by the client of this transaction. */ default int getClientTransactionId() { return getId(); } int getId(); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.request.resource; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.time.Instant; import java.util.Locale; import java.util.Objects; import jakarta.servlet.http.HttpServletResponse; import org.apache.wicket.Application; import org.apache.wicket.IWicketInternalException; import org.apache.wicket.Session; import org.apache.wicket.WicketRuntimeException; import org.apache.wicket.core.util.lang.WicketObjects; import org.apache.wicket.core.util.resource.locator.IResourceStreamLocator; import org.apache.wicket.javascript.IJavaScriptCompressor; import org.apache.wicket.markup.html.IPackageResourceGuard; import org.apache.wicket.mock.MockWebRequest; import org.apache.wicket.request.Url; import org.apache.wicket.request.cycle.RequestCycle; import org.apache.wicket.request.resource.caching.IStaticCacheableResource; import org.apache.wicket.resource.IScopeAwareTextResourceProcessor; import org.apache.wicket.resource.ITextResourceCompressor; import org.apache.wicket.response.StringResponse; import org.apache.wicket.util.io.IOUtils; import org.apache.wicket.util.lang.Classes; import org.apache.wicket.util.lang.Packages; import org.apache.wicket.util.resource.IFixedLocationResourceStream; import org.apache.wicket.util.resource.IResourceStream; import org.apache.wicket.util.resource.ResourceStreamNotFoundException; import org.apache.wicket.util.resource.ResourceStreamWrapper; import org.apache.wicket.util.string.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Represents a localizable static resource. * <p> * Use like eg: * * <pre> * MyPackageResource IMG_UNKNOWN = new MyPackageResource(EditPage.class, &quot;questionmark.gif&quot;); * </pre> * * where the static resource references image 'questionmark.gif' from the the package that EditPage * is in to get a package resource. * </p> * * Access to resources can be granted or denied via a {@link IPackageResourceGuard}. Please see * {@link org.apache.wicket.settings.ResourceSettings#getPackageResourceGuard()} as well. * * @author Jonathan Locke * @author Eelco Hillenius * @author Juergen Donnerstag * @author Matej Knopp * @author Tobias Soloschenko */ public class PackageResource extends AbstractResource implements IStaticCacheableResource { private static final Logger log = LoggerFactory.getLogger(PackageResource.class); private static final long serialVersionUID = 1L; /** * Exception thrown when the creation of a package resource is not allowed. */ public static final class PackageResourceBlockedException extends WicketRuntimeException implements IWicketInternalException { private static final long serialVersionUID = 1L; /** * Construct. * * @param message * error message */ public PackageResourceBlockedException(String message) { super(message); } } /** * The path to the resource */ private final String absolutePath; /** * The resource's locale */ private final Locale locale; /** * The path this resource was created with. */ private final String path; /** * The scoping class, used for class loading and to determine the package. */ private final String scopeName; /** * The name of the resource */ private final String name; /** * The resource's style */ private final String style; /** * The component's variation (of the style) */ private final String variation; /** * A flag indicating whether {@code ITextResourceCompressor} can be used to compress this * resource. Default is {@code false} because this resource may be used for binary data (e.g. an * image). Specializations of this class should change this flag appropriately. */ private boolean compress = false; /** * controls whether {@link org.apache.wicket.request.resource.caching.IResourceCachingStrategy} * should be applied to resource */ private boolean cachingEnabled = true; /** * text encoding (may be null) - only makes sense for character-based resources */ private String textEncoding = null; /** * Reads the resource buffered - the content is copied into memory */ private boolean readBuffered = true; /** * Hidden constructor. * * @param scope * This argument will be used to get the class loader for loading the package * resource, and to determine what package it is in * @param name * The relative path to the resource * @param locale * The locale of the resource * @param style * The style of the resource * @param variation * The component's variation (of the style) */ protected PackageResource(final Class<?> scope, final String name, final Locale locale, final String style, final String variation) { // Convert resource path to absolute path relative to base package absolutePath = Packages.absolutePath(scope, name); final String parentEscape = getParentFolderPlaceholder(); if (Strings.isEmpty(parentEscape) == false) { path = Strings.replaceAll(name, "../", parentEscape + "/").toString(); } else { path = name; } this.name = name; this.scopeName = scope.getName(); this.locale = locale; this.style = style; this.variation = variation; } private Locale getCurrentLocale() { if (locale == null && Session.exists()) { return Session.get().getLocale(); } return locale; } private String getCurrentStyle() { if (style == null && Session.exists()) { return Session.get().getStyle(); } return style; } /** * Returns true if the caching for this resource is enabled * * @return if the caching is enabled */ @Override public boolean isCachingEnabled() { return cachingEnabled; } /** * Sets the caching for this resource to be enabled * * @param enabled * if the cacheing should be enabled */ public void setCachingEnabled(final boolean enabled) { this.cachingEnabled = enabled; } /** * get text encoding (intented for character-based resources) * * @return custom encoding or {@code null} to use default */ public String getTextEncoding() { return textEncoding; } /** * set text encoding (intented for character-based resources) * * @param textEncoding * custom encoding or {@code null} to use default */ public void setTextEncoding(final String textEncoding) { this.textEncoding = textEncoding; } @Override public Serializable getCacheKey() { Class<?> scope = getScope(); String currentStyle = getCurrentStyle(); Locale currentLocale = getCurrentLocale(); IResourceStream packageResource = Application.get() .getResourceSettings() .getResourceStreamLocator() .locate(scope, absolutePath, currentStyle, variation, currentLocale, null, false); // if resource stream can not be found do not cache if (packageResource != null) { return new CacheKey(scopeName, absolutePath, currentLocale, currentStyle, variation); } return null; } /** * Gets the scoping class, used for class loading and to determine the package. * * @return the scoping class */ public final Class<?> getScope() { return WicketObjects.resolveClass(scopeName); } public final String getName() { return name; } /** * Gets the style. * * @return the style */ public final String getStyle() { return style; } /** * creates a new resource response based on the request attributes * * @param attributes * current request attributes from client * @return resource response for answering request */ @Override protected ResourceResponse newResourceResponse(Attributes attributes) { final ResourceResponse resourceResponse = new ResourceResponse(); final IResourceStream resourceStream = getResourceStream(); // bail out if resource stream could not be found if (resourceStream == null) { return sendResourceError(resourceResponse, HttpServletResponse.SC_NOT_FOUND, "Unable to find resource"); } // add Last-Modified header (to support HEAD requests and If-Modified-Since) final Instant lastModified = resourceStream.lastModifiedTime(); resourceResponse.setLastModified(lastModified); if (resourceResponse.dataNeedsToBeWritten(attributes)) { String contentType = resourceStream.getContentType(); if (contentType == null && Application.exists()) { contentType = Application.get().getMimeType(path); } // set Content-Type (may be null) resourceResponse.setContentType(contentType); // set content encoding (may be null) resourceResponse.setTextEncoding(getTextEncoding()); // supports accept range resourceResponse.setAcceptRange(ContentRangeType.BYTES); try { // read resource data to get the content length InputStream inputStream = resourceStream.getInputStream(); byte[] bytes = null; // send Content-Length header if (readBuffered) { bytes = IOUtils.toByteArray(inputStream); resourceResponse.setContentLength(bytes.length); } else { resourceResponse.setContentLength(resourceStream.length().bytes()); } // get content range information RequestCycle cycle = RequestCycle.get(); Long startbyte = cycle.getMetaData(CONTENT_RANGE_STARTBYTE); Long endbyte = cycle.getMetaData(CONTENT_RANGE_ENDBYTE); // send response body with resource data PartWriterCallback partWriterCallback = new PartWriterCallback(bytes != null ? new ByteArrayInputStream(bytes) : inputStream, resourceResponse.getContentLength(), startbyte, endbyte); // If read buffered is set to false ensure the part writer callback is going to // close the input stream resourceResponse.setWriteCallback(partWriterCallback.setClose(!readBuffered)); } catch (IOException e) { log.debug(e.getMessage(), e); return sendResourceError(resourceResponse, 500, "Unable to read resource stream"); } catch (ResourceStreamNotFoundException e) { log.debug(e.getMessage(), e); return sendResourceError(resourceResponse, 500, "Unable to open resource stream"); } finally { try { if (readBuffered) { IOUtils.close(resourceStream); } } catch (IOException e) { log.warn("Unable to close the resource stream", e); } } } return resourceResponse; } /** * Gives a chance to modify the resource going to be written in the response * * @param attributes * current request attributes from client * @param original * the original response * @return the processed response */ protected byte[] processResponse(final Attributes attributes, final byte[] original) { return compressResponse(attributes, original); } /** * Compresses the response if its is eligible and there is a configured compressor * * @param attributes * * current request attributes from client * * @param original * * the original response * * @return the compressed response */ protected byte[] compressResponse(final Attributes attributes, final byte[] original) { ITextResourceCompressor compressor = getCompressor(); if (compressor != null && getCompress()) { try { Charset charset = getProcessingEncoding(); String nonCompressed = new String(original, charset); String output; if (compressor instanceof IScopeAwareTextResourceProcessor) { IScopeAwareTextResourceProcessor scopeAwareProcessor = (IScopeAwareTextResourceProcessor)compressor; output = scopeAwareProcessor.process(nonCompressed, getScope(), name); } else { output = compressor.compress(nonCompressed); } final String textEncoding = getTextEncoding(); final Charset outputCharset; if (Strings.isEmpty(textEncoding)) { outputCharset = charset; } else { outputCharset = Charset.forName(textEncoding); } return output.getBytes(outputCharset); } catch (Exception e) { log.error("Error while compressing the content", e); return original; } } else { // don't strip the comments return original; } } /** * @return The charset to use to read the resource */ protected Charset getProcessingEncoding() { return StandardCharsets.UTF_8; } /** * Gets the {@link IJavaScriptCompressor} to be used. By default returns the configured * compressor on application level, but can be overriden by the user application to provide * compressor specific to the resource. * * @return the configured application level JavaScript compressor. May be {@code null}. */ protected ITextResourceCompressor getCompressor() { return null; } /** * send resource specific error message and write log entry * * @param resourceResponse * resource response * @param errorCode * error code (=http status) * @param errorMessage * error message (=http error message) * @return resource response for method chaining */ private ResourceResponse sendResourceError(ResourceResponse resourceResponse, int errorCode, String errorMessage) { String msg = String.format( "resource [path = %s, style = %s, variation = %s, locale = %s]: %s (status=%d)", absolutePath, style, variation, locale, errorMessage, errorCode); log.warn(msg); resourceResponse.setError(errorCode, errorMessage); return resourceResponse; } /** * locate resource stream for current resource * * @return resource stream or <code>null</code> if not found */ @Override public IResourceStream getResourceStream() { return internalGetResourceStream(getCurrentStyle(), getCurrentLocale()); } /** * @return whether {@link org.apache.wicket.resource.ITextResourceCompressor} can be used to * compress the resource. */ public boolean getCompress() { return compress; } /** * @param compress * A flag indicating whether the resource should be compressed. */ public void setCompress(boolean compress) { this.compress = compress; } private IResourceStream internalGetResourceStream(final String style, final Locale locale) { IResourceStreamLocator resourceStreamLocator = Application.get() .getResourceSettings() .getResourceStreamLocator(); IResourceStream resourceStream = resourceStreamLocator.locate(getScope(), absolutePath, style, variation, locale, null, false); String realPath = absolutePath; if (resourceStream instanceof IFixedLocationResourceStream) { realPath = ((IFixedLocationResourceStream)resourceStream).locationAsString(); if (realPath != null) { int index = realPath.indexOf(absolutePath); if (index != -1) { realPath = realPath.substring(index); } } else { realPath = absolutePath; } } if (accept(realPath) == false) { throw new PackageResourceBlockedException( "Access denied to (static) package resource " + absolutePath + ". See IPackageResourceGuard"); } if (resourceStream != null) { resourceStream = new ProcessingResourceStream(resourceStream); } return resourceStream; } /** * An IResourceStream that processes the input stream of the original IResourceStream */ private class ProcessingResourceStream extends ResourceStreamWrapper { private static final long serialVersionUID = 1L; private ProcessingResourceStream(IResourceStream delegate) { super(delegate); } @Override public InputStream getInputStream() throws ResourceStreamNotFoundException { byte[] bytes = null; InputStream inputStream = super.getInputStream(); if (readBuffered) { try { bytes = IOUtils.toByteArray(inputStream); } catch (IOException iox) { throw new WicketRuntimeException(iox); } finally { IOUtils.closeQuietly(this); } } RequestCycle cycle = RequestCycle.get(); Attributes attributes; if (cycle != null) { attributes = new Attributes(cycle.getRequest(), cycle.getResponse()); } else { // use empty request and response in case of non-http thread. WICKET-5532 attributes = new Attributes(new MockWebRequest(Url.parse("")), new StringResponse()); } if (bytes != null) { byte[] processedBytes = processResponse(attributes, bytes); return new ByteArrayInputStream(processedBytes); } else { return inputStream; } } } /** * Checks whether access is granted for this resource. * * By default IPackageResourceGuard is used to check the permissions but the resource itself can * also make the check. * * @param path * resource path * @return <code>true<code> if resource access is granted */ protected boolean accept(String path) { IPackageResourceGuard guard = Application.get() .getResourceSettings() .getPackageResourceGuard(); return guard.accept(path); } /** * Checks whether a resource for a given set of criteria exists. * * @param key * The key that contains all attributes about the requested resource * @return {@code true} if there is a package resource with the given attributes */ public static boolean exists(final ResourceReference.Key key) { return exists(key.getScopeClass(), key.getName(), key.getLocale(), key.getStyle(), key.getVariation()); } /** * Checks whether a resource for a given set of criteria exists. * * @param scope * This argument will be used to get the class loader for loading the package * resource, and to determine what package it is in. Typically this is the class in * which you call this method * @param path * The path to the resource * @param locale * The locale of the resource * @param style * The style of the resource (see {@link org.apache.wicket.Session}) * @param variation * The component's variation (of the style) * @return {@code true} if a resource could be loaded, {@code false} otherwise */ public static boolean exists(final Class<?> scope, final String path, final Locale locale, final String style, final String variation) { String absolutePath = Packages.absolutePath(scope, path); return Application.get() .getResourceSettings() .getResourceStreamLocator() .locate(scope, absolutePath, style, variation, locale, null, false) != null; } @Override public String toString() { final StringBuilder result = new StringBuilder(); result.append('[') .append(Classes.simpleName(getClass())) .append(' ') .append("name = ") .append(path) .append(", scope = ") .append(scopeName) .append(", locale = ") .append(locale) .append(", style = ") .append(style) .append(", variation = ") .append(variation) .append(']'); return result.toString(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((absolutePath == null) ? 0 : absolutePath.hashCode()); result = prime * result + ((locale == null) ? 0 : locale.hashCode()); result = prime * result + ((path == null) ? 0 : path.hashCode()); result = prime * result + ((scopeName == null) ? 0 : scopeName.hashCode()); result = prime * result + ((style == null) ? 0 : style.hashCode()); result = prime * result + ((variation == null) ? 0 : variation.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; PackageResource other = (PackageResource)obj; return Objects.equals(absolutePath, other.absolutePath) && Objects.equals(locale, other.locale) && Objects.equals(path, other.path) && Objects.equals(scopeName, other.scopeName) && Objects.equals(style, other.style) && Objects.equals(variation, other.variation); } String getParentFolderPlaceholder() { String parentFolderPlaceholder; if (Application.exists()) { parentFolderPlaceholder = Application.get() .getResourceSettings() .getParentFolderPlaceholder(); } else { parentFolderPlaceholder = ".."; } return parentFolderPlaceholder; } private static class CacheKey implements Serializable { private final String scopeName; private final String path; private final Locale locale; private final String style; private final String variation; public CacheKey(String scopeName, String path, Locale locale, String style, String variation) { this.scopeName = scopeName; this.path = path; this.locale = locale; this.style = style; this.variation = variation; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof CacheKey)) return false; CacheKey cacheKey = (CacheKey)o; return Objects.equals(locale, cacheKey.locale) && Objects.equals(path, cacheKey.path) && Objects.equals(scopeName, cacheKey.scopeName) && Objects.equals(style, cacheKey.style) && Objects.equals(variation, cacheKey.variation); } @Override public int hashCode() { int result = scopeName.hashCode(); result = 31 * result + path.hashCode(); result = 31 * result + (locale != null ? locale.hashCode() : 0); result = 31 * result + (style != null ? style.hashCode() : 0); result = 31 * result + (variation != null ? variation.hashCode() : 0); return result; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("CacheKey"); sb.append("{scopeName='").append(scopeName).append('\''); sb.append(", path='").append(path).append('\''); sb.append(", locale=").append(locale); sb.append(", style='").append(style).append('\''); sb.append(", variation='").append(variation).append('\''); sb.append('}'); return sb.toString(); } } /** * If the package resource should be read buffered.<br> * <br> * WARNING - if the stream is not read buffered compressors will not work, because they require * the whole content to be read into memory.<br> * ({@link org.apache.wicket.javascript.IJavaScriptCompressor}, <br> * {@link org.apache.wicket.css.ICssCompressor}, <br> * {@link org.apache.wicket.resource.IScopeAwareTextResourceProcessor}) * * @param readBuffered * if the package resource should be read buffered * @return the current package resource */ public PackageResource readBuffered(boolean readBuffered) { this.readBuffered = readBuffered; return this; } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.masterdb.security.hibernate.forward; import static com.opengamma.masterdb.security.hibernate.Converters.currencyBeanToCurrency; import static com.opengamma.masterdb.security.hibernate.Converters.expiryBeanToExpiry; import static com.opengamma.masterdb.security.hibernate.Converters.expiryToExpiryBean; import static com.opengamma.masterdb.security.hibernate.Converters.externalIdBeanToExternalId; import static com.opengamma.masterdb.security.hibernate.Converters.externalIdToExternalIdBean; import java.util.Date; import com.opengamma.financial.security.FinancialSecurityVisitorAdapter; import com.opengamma.financial.security.forward.AgricultureForwardSecurity; import com.opengamma.financial.security.forward.CommodityForwardSecurity; import com.opengamma.financial.security.forward.EnergyForwardSecurity; import com.opengamma.financial.security.forward.MetalForwardSecurity; import com.opengamma.id.ExternalId; import com.opengamma.masterdb.security.hibernate.AbstractSecurityBeanOperation; import com.opengamma.masterdb.security.hibernate.HibernateSecurityMasterDao; import com.opengamma.masterdb.security.hibernate.OperationContext; /** * Hibernate bean for storage. */ public final class CommodityForwardSecurityBeanOperation extends AbstractSecurityBeanOperation<CommodityForwardSecurity, CommodityForwardSecurityBean> { /** * Singleton. * */ public static final CommodityForwardSecurityBeanOperation INSTANCE = new CommodityForwardSecurityBeanOperation(); private CommodityForwardSecurityBeanOperation() { super(CommodityForwardSecurity.SECURITY_TYPE, CommodityForwardSecurity.class, CommodityForwardSecurityBean.class); } @Override public CommodityForwardSecurity createSecurity(final OperationContext context, final CommodityForwardSecurityBean bean) { CommodityForwardSecurity sec = bean.accept( new CommodityForwardSecurityBean.Visitor<CommodityForwardSecurity>() { @Override public CommodityForwardSecurity visitAgricultureForwardType(AgricultureForwardSecurityBean bean) { final AgricultureForwardSecurity security = new AgricultureForwardSecurity( bean.getUnitName().getName(), bean.getUnitNumber(), expiryBeanToExpiry(bean.getExpiry()), currencyBeanToCurrency(bean.getCurrency()), bean.getUnitAmount(), bean.getCategory().getName()); security.setUnitNumber(bean.getUnitNumber()); if (bean.getUnitName() != null) { security.setUnitName(bean.getUnitName().getName()); } return security; } @Override public CommodityForwardSecurity visitEnergyForwardType(EnergyForwardSecurityBean bean) { final EnergyForwardSecurity security = new EnergyForwardSecurity( bean.getUnitName().getName(), bean.getUnitNumber(), expiryBeanToExpiry(bean.getExpiry()), currencyBeanToCurrency(bean.getCurrency()), bean.getUnitAmount(), bean.getCategory().getName()); security.setUnitNumber(bean.getUnitNumber()); if (bean.getUnitName() != null) { security.setUnitName(bean.getUnitName().getName()); } security.setUnderlyingId(externalIdBeanToExternalId(bean .getUnderlying())); return security; } @Override public CommodityForwardSecurity visitMetalForwardType(MetalForwardSecurityBean bean) { final MetalForwardSecurity security = new MetalForwardSecurity( bean.getUnitName().getName(), bean.getUnitNumber(), expiryBeanToExpiry(bean.getExpiry()), currencyBeanToCurrency(bean.getCurrency()), bean.getUnitAmount(), bean.getCategory().getName()); security.setUnitNumber(bean.getUnitNumber()); if (bean.getUnitName() != null) { security.setUnitName(bean.getUnitName().getName()); } security.setUnderlyingId(externalIdBeanToExternalId(bean .getUnderlying())); return security; } }); return sec; } @Override public CommodityForwardSecurityBean resolve(final OperationContext context, final HibernateSecurityMasterDao secMasterSession, final Date now, final CommodityForwardSecurityBean bean) { return bean.accept( new CommodityForwardSecurityBean.Visitor<CommodityForwardSecurityBean>() { @Override public CommodityForwardSecurityBean visitAgricultureForwardType(AgricultureForwardSecurityBean bean) { return bean; } @Override public CommodityForwardSecurityBean visitEnergyForwardType(EnergyForwardSecurityBean bean) { return bean; } @Override public CommodityForwardSecurityBean visitMetalForwardType(MetalForwardSecurityBean bean) { return bean; } }); } @Override public void postPersistBean(final OperationContext context, final HibernateSecurityMasterDao secMasterSession, final Date now, final CommodityForwardSecurityBean bean) { bean.accept(new CommodityForwardSecurityBean.Visitor<Object>() { private void postPersistForward() { // No action } private void postPersistCommodityForward() { postPersistForward(); } @Override public Object visitAgricultureForwardType(AgricultureForwardSecurityBean bean) { postPersistCommodityForward(); return null; } @Override public Object visitEnergyForwardType(EnergyForwardSecurityBean bean) { postPersistCommodityForward(); return null; } @Override public Object visitMetalForwardType(MetalForwardSecurityBean bean) { postPersistCommodityForward(); return null; } }); } @Override public CommodityForwardSecurityBean createBean(final OperationContext context, final HibernateSecurityMasterDao secMasterSession, final CommodityForwardSecurity security) { return security.accept(new FinancialSecurityVisitorAdapter<CommodityForwardSecurityBean>() { private <F extends CommodityForwardSecurityBean> F createForwardBean(final F bean, final CommodityForwardSecurity security) { bean.setExpiry(expiryToExpiryBean(security.getExpiry())); bean.setCurrency(secMasterSession .getOrCreateCurrencyBean(security.getCurrency() .getCode())); bean.setUnitAmount(security.getUnitAmount()); bean.setCategory(secMasterSession.getOrCreateContractCategoryBean(security.getContractCategory())); return bean; } private <F extends CommodityForwardSecurityBean> F createCommodityForwardSecurityBean( final F commodityForwardSecurityBean, final CommodityForwardSecurity security) { final F bean = createForwardBean(commodityForwardSecurityBean, security); if (security.getUnitName() != null) { bean.setUnitName(secMasterSession .getOrCreateUnitNameBean(security.getUnitName())); } if (security.getUnitNumber() != null) { bean.setUnitNumber(security.getUnitNumber()); } return bean; } @Override public AgricultureForwardSecurityBean visitAgricultureForwardSecurity( AgricultureForwardSecurity security) { return createCommodityForwardSecurityBean(new AgricultureForwardSecurityBean(), security); } @Override public EnergyForwardSecurityBean visitEnergyForwardSecurity( EnergyForwardSecurity security) { final EnergyForwardSecurityBean bean = createCommodityForwardSecurityBean(new EnergyForwardSecurityBean(), security); ExternalId underlying = security.getUnderlyingId(); if (underlying != null) { bean.setUnderlying(externalIdToExternalIdBean(underlying)); } return bean; } @Override public MetalForwardSecurityBean visitMetalForwardSecurity( MetalForwardSecurity security) { final MetalForwardSecurityBean bean = createCommodityForwardSecurityBean(new MetalForwardSecurityBean(), security); ExternalId underlying = security.getUnderlyingId(); if (underlying != null) { bean.setUnderlying(externalIdToExternalIdBean(security.getUnderlyingId())); } return bean; } }); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.object; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DateFieldMapper; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; /** * */ public class RootObjectMapper extends ObjectMapper { public static class Defaults { public static final FormatDateTimeFormatter[] DYNAMIC_DATE_TIME_FORMATTERS = new FormatDateTimeFormatter[]{ DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, Joda.getStrictStandardDateFormatter() }; public static final boolean DATE_DETECTION = true; public static final boolean NUMERIC_DETECTION = false; } public static class Builder extends ObjectMapper.Builder<Builder, RootObjectMapper> { protected final List<DynamicTemplate> dynamicTemplates = new ArrayList<>(); // we use this to filter out seen date formats, because we might get duplicates during merging protected Set<String> seenDateFormats = new HashSet<>(); protected List<FormatDateTimeFormatter> dynamicDateTimeFormatters = new ArrayList<>(); protected boolean dateDetection = Defaults.DATE_DETECTION; protected boolean numericDetection = Defaults.NUMERIC_DETECTION; public Builder(String name) { super(name); this.builder = this; } public Builder noDynamicDateTimeFormatter() { this.dynamicDateTimeFormatters = null; return builder; } public Builder dynamicDateTimeFormatter(Iterable<FormatDateTimeFormatter> dateTimeFormatters) { for (FormatDateTimeFormatter dateTimeFormatter : dateTimeFormatters) { if (!seenDateFormats.contains(dateTimeFormatter.format())) { seenDateFormats.add(dateTimeFormatter.format()); this.dynamicDateTimeFormatters.add(dateTimeFormatter); } } return builder; } public Builder add(DynamicTemplate dynamicTemplate) { this.dynamicTemplates.add(dynamicTemplate); return this; } public Builder add(DynamicTemplate... dynamicTemplate) { for (DynamicTemplate template : dynamicTemplate) { this.dynamicTemplates.add(template); } return this; } @Override protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map<String, Mapper> mappers, @Nullable Settings settings) { assert !nested.isNested(); FormatDateTimeFormatter[] dates = null; if (dynamicDateTimeFormatters == null) { dates = new FormatDateTimeFormatter[0]; } else if (dynamicDateTimeFormatters.isEmpty()) { // add the default one dates = Defaults.DYNAMIC_DATE_TIME_FORMATTERS; } else { dates = dynamicDateTimeFormatters.toArray(new FormatDateTimeFormatter[dynamicDateTimeFormatters.size()]); } return new RootObjectMapper(name, enabled, dynamic, mappers, dates, dynamicTemplates.toArray(new DynamicTemplate[dynamicTemplates.size()]), dateDetection, numericDetection); } } public static class TypeParser extends ObjectMapper.TypeParser { @Override protected ObjectMapper.Builder createBuilder(String name) { return new Builder(name); } @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { ObjectMapper.Builder builder = createBuilder(name); Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder) || processField(builder, fieldName, fieldNode, parserContext.indexVersionCreated())) { iterator.remove(); } } return builder; } protected boolean processField(ObjectMapper.Builder builder, String fieldName, Object fieldNode, Version indexVersionCreated) { if (fieldName.equals("date_formats") || fieldName.equals("dynamic_date_formats")) { List<FormatDateTimeFormatter> dateTimeFormatters = new ArrayList<>(); if (fieldNode instanceof List) { for (Object node1 : (List) fieldNode) { if (node1.toString().startsWith("epoch_")) { throw new MapperParsingException("Epoch ["+ node1.toString() +"] is not supported as dynamic date format"); } dateTimeFormatters.add(parseDateTimeFormatter(node1)); } } else if ("none".equals(fieldNode.toString())) { dateTimeFormatters = null; } else { dateTimeFormatters.add(parseDateTimeFormatter(fieldNode)); } if (dateTimeFormatters == null) { ((Builder) builder).noDynamicDateTimeFormatter(); } else { ((Builder) builder).dynamicDateTimeFormatter(dateTimeFormatters); } return true; } else if (fieldName.equals("dynamic_templates")) { // "dynamic_templates" : [ // { // "template_1" : { // "match" : "*_test", // "match_mapping_type" : "string", // "mapping" : { "type" : "string", "store" : "yes" } // } // } // ] List tmplNodes = (List) fieldNode; for (Object tmplNode : tmplNodes) { Map<String, Object> tmpl = (Map<String, Object>) tmplNode; if (tmpl.size() != 1) { throw new MapperParsingException("A dynamic template must be defined with a name"); } Map.Entry<String, Object> entry = tmpl.entrySet().iterator().next(); String templateName = entry.getKey(); Map<String, Object> templateParams = (Map<String, Object>) entry.getValue(); DynamicTemplate template = DynamicTemplate.parse(templateName, templateParams, indexVersionCreated); ((Builder) builder).add(template); } return true; } else if (fieldName.equals("date_detection")) { ((Builder) builder).dateDetection = lenientNodeBooleanValue(fieldNode); return true; } else if (fieldName.equals("numeric_detection")) { ((Builder) builder).numericDetection = lenientNodeBooleanValue(fieldNode); return true; } return false; } } private final FormatDateTimeFormatter[] dynamicDateTimeFormatters; private final boolean dateDetection; private final boolean numericDetection; private volatile DynamicTemplate dynamicTemplates[]; RootObjectMapper(String name, boolean enabled, Dynamic dynamic, Map<String, Mapper> mappers, FormatDateTimeFormatter[] dynamicDateTimeFormatters, DynamicTemplate dynamicTemplates[], boolean dateDetection, boolean numericDetection) { super(name, name, enabled, Nested.NO, dynamic, mappers); this.dynamicTemplates = dynamicTemplates; this.dynamicDateTimeFormatters = dynamicDateTimeFormatters; this.dateDetection = dateDetection; this.numericDetection = numericDetection; } @Override public ObjectMapper mappingUpdate(Mapper mapper) { RootObjectMapper update = (RootObjectMapper) super.mappingUpdate(mapper); // dynamic templates are irrelevant for dynamic mappings updates update.dynamicTemplates = new DynamicTemplate[0]; return update; } public boolean dateDetection() { return this.dateDetection; } public boolean numericDetection() { return this.numericDetection; } public FormatDateTimeFormatter[] dynamicDateTimeFormatters() { return dynamicDateTimeFormatters; } public Mapper.Builder findTemplateBuilder(ParseContext context, String name, String matchType) { final String dynamicType; switch (matchType) { case "string": // string is a corner case since a json string can either map to a // text or keyword field in elasticsearch. For now we use text when // unspecified. For other types, the mapping type matches the json // type so we are fine dynamicType = "text"; break; default: dynamicType = matchType; break; } return findTemplateBuilder(context, name, dynamicType, matchType); } /** * Find a template. Returns {@code null} if no template could be found. * @param name the field name * @param dynamicType the field type to give the field if the template does not define one * @param matchType the type of the field in the json document or null if unknown * @return a mapper builder, or null if there is no template for such a field */ public Mapper.Builder findTemplateBuilder(ParseContext context, String name, String dynamicType, String matchType) { DynamicTemplate dynamicTemplate = findTemplate(context.path(), name, matchType); if (dynamicTemplate == null) { return null; } Mapper.TypeParser.ParserContext parserContext = context.docMapperParser().parserContext(name); String mappingType = dynamicTemplate.mappingType(dynamicType); Mapper.TypeParser typeParser = parserContext.typeParser(mappingType); if (typeParser == null) { throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]"); } return typeParser.parse(name, dynamicTemplate.mappingForName(name, dynamicType), parserContext); } public DynamicTemplate findTemplate(ContentPath path, String name, String matchType) { for (DynamicTemplate dynamicTemplate : dynamicTemplates) { if (dynamicTemplate.match(path, name, matchType)) { return dynamicTemplate; } } return null; } @Override public RootObjectMapper merge(Mapper mergeWith, boolean updateAllTypes) { return (RootObjectMapper) super.merge(mergeWith, updateAllTypes); } @Override protected void doMerge(ObjectMapper mergeWith, boolean updateAllTypes) { super.doMerge(mergeWith, updateAllTypes); RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; // merge them List<DynamicTemplate> mergedTemplates = new ArrayList<>(Arrays.asList(this.dynamicTemplates)); for (DynamicTemplate template : mergeWithObject.dynamicTemplates) { boolean replaced = false; for (int i = 0; i < mergedTemplates.size(); i++) { if (mergedTemplates.get(i).name().equals(template.name())) { mergedTemplates.set(i, template); replaced = true; } } if (!replaced) { mergedTemplates.add(template); } } this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]); } @Override public RootObjectMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) { return (RootObjectMapper) super.updateFieldType(fullNameToFieldType); } @Override protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { if (dynamicDateTimeFormatters != Defaults.DYNAMIC_DATE_TIME_FORMATTERS) { if (dynamicDateTimeFormatters.length > 0) { builder.startArray("dynamic_date_formats"); for (FormatDateTimeFormatter dateTimeFormatter : dynamicDateTimeFormatters) { builder.value(dateTimeFormatter.format()); } builder.endArray(); } } if (dynamicTemplates != null && dynamicTemplates.length > 0) { builder.startArray("dynamic_templates"); for (DynamicTemplate dynamicTemplate : dynamicTemplates) { builder.startObject(); builder.field(dynamicTemplate.name(), dynamicTemplate); builder.endObject(); } builder.endArray(); } if (dateDetection != Defaults.DATE_DETECTION) { builder.field("date_detection", dateDetection); } if (numericDetection != Defaults.NUMERIC_DETECTION) { builder.field("numeric_detection", numericDetection); } } }
package com.codahale.metrics.graphite; import com.codahale.metrics.*; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; import java.net.UnknownHostException; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import static org.mockito.Mockito.*; public class GraphiteReporterTest { private final long timestamp = 1000198; private final Clock clock = mock(Clock.class); private final Graphite graphite = mock(Graphite.class); private final MetricRegistry registry = mock(MetricRegistry.class); private final GraphiteReporter reporter = GraphiteReporter.forRegistry(registry) .withClock(clock) .prefixedWith("prefix") .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .filter(MetricFilter.ALL) .build(graphite); @Before public void setUp() throws Exception { when(clock.getTime()).thenReturn(timestamp * 1000); } @Test public void doesNotReportStringGaugeValues() throws Exception { reporter.report(map("gauge", gauge("value")), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite, never()).send("prefix.gauge", "value", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsByteGaugeValues() throws Exception { reporter.report(map("gauge", gauge((byte) 1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsShortGaugeValues() throws Exception { reporter.report(map("gauge", gauge((short) 1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsIntegerGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsLongGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1L)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsFloatGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1.1f)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1.10", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsDoubleGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1.1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1.10", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsCounters() throws Exception { final Counter counter = mock(Counter.class); when(counter.getCount()).thenReturn(100L); reporter.report(this.<Gauge>map(), this.<Counter>map("counter", counter), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.counter.count", "100", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsHistograms() throws Exception { final Histogram histogram = mock(Histogram.class); when(histogram.getCount()).thenReturn(1L); final Snapshot snapshot = mock(Snapshot.class); when(snapshot.getMax()).thenReturn(2L); when(snapshot.getMean()).thenReturn(3.0); when(snapshot.getMin()).thenReturn(4L); when(snapshot.getStdDev()).thenReturn(5.0); when(snapshot.getMedian()).thenReturn(6.0); when(snapshot.get75thPercentile()).thenReturn(7.0); when(snapshot.get95thPercentile()).thenReturn(8.0); when(snapshot.get98thPercentile()).thenReturn(9.0); when(snapshot.get99thPercentile()).thenReturn(10.0); when(snapshot.get999thPercentile()).thenReturn(11.0); when(histogram.getSnapshot()).thenReturn(snapshot); reporter.report(this.<Gauge>map(), this.<Counter>map(), this.<Histogram>map("histogram", histogram), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.histogram.count", "1", timestamp); inOrder.verify(graphite).send("prefix.histogram.max", "2", timestamp); inOrder.verify(graphite).send("prefix.histogram.mean", "3.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.min", "4", timestamp); inOrder.verify(graphite).send("prefix.histogram.stddev", "5.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.p50", "6.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.p75", "7.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.p95", "8.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.p98", "9.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.p99", "10.00", timestamp); inOrder.verify(graphite).send("prefix.histogram.p999", "11.00", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsMeters() throws Exception { final Meter meter = mock(Meter.class); when(meter.getCount()).thenReturn(1L); when(meter.getOneMinuteRate()).thenReturn(2.0); when(meter.getFiveMinuteRate()).thenReturn(3.0); when(meter.getFifteenMinuteRate()).thenReturn(4.0); when(meter.getMeanRate()).thenReturn(5.0); reporter.report(this.<Gauge>map(), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map("meter", meter), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.meter.count", "1", timestamp); inOrder.verify(graphite).send("prefix.meter.m1_rate", "2.00", timestamp); inOrder.verify(graphite).send("prefix.meter.m5_rate", "3.00", timestamp); inOrder.verify(graphite).send("prefix.meter.m15_rate", "4.00", timestamp); inOrder.verify(graphite).send("prefix.meter.mean_rate", "5.00", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void reportsTimers() throws Exception { final Timer timer = mock(Timer.class); when(timer.getCount()).thenReturn(1L); when(timer.getMeanRate()).thenReturn(2.0); when(timer.getOneMinuteRate()).thenReturn(3.0); when(timer.getFiveMinuteRate()).thenReturn(4.0); when(timer.getFifteenMinuteRate()).thenReturn(5.0); final Snapshot snapshot = mock(Snapshot.class); when(snapshot.getMax()).thenReturn(TimeUnit.MILLISECONDS.toNanos(100)); when(snapshot.getMean()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(200)); when(snapshot.getMin()).thenReturn(TimeUnit.MILLISECONDS.toNanos(300)); when(snapshot.getStdDev()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(400)); when(snapshot.getMedian()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(500)); when(snapshot.get75thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(600)); when(snapshot.get95thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(700)); when(snapshot.get98thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(800)); when(snapshot.get99thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(900)); when(snapshot.get999thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS .toNanos(1000)); when(timer.getSnapshot()).thenReturn(snapshot); reporter.report(this.<Gauge>map(), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), map("timer", timer)); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.timer.max", "100.00", timestamp); inOrder.verify(graphite).send("prefix.timer.mean", "200.00", timestamp); inOrder.verify(graphite).send("prefix.timer.min", "300.00", timestamp); inOrder.verify(graphite).send("prefix.timer.stddev", "400.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p50", "500.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p75", "600.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p95", "700.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p98", "800.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p99", "900.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p999", "1000.00", timestamp); inOrder.verify(graphite).send("prefix.timer.count", "1", timestamp); inOrder.verify(graphite).send("prefix.timer.m1_rate", "3.00", timestamp); inOrder.verify(graphite).send("prefix.timer.m5_rate", "4.00", timestamp); inOrder.verify(graphite).send("prefix.timer.m15_rate", "5.00", timestamp); inOrder.verify(graphite).send("prefix.timer.mean_rate", "2.00", timestamp); inOrder.verify(graphite).flush(); verifyNoMoreInteractions(graphite); } @Test public void closesConnectionIfGraphiteIsUnavailable() throws Exception { doThrow(new UnknownHostException("UNKNOWN-HOST")).when(graphite).connect(); reporter.report(map("gauge", gauge(1)), this.<Counter>map(), this.<Histogram>map(), this.<Meter>map(), this.<Timer>map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).isConnected(); inOrder.verify(graphite).connect(); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); } @Test public void closesConnectionOnReporterStop() throws Exception { reporter.stop(); verify(graphite).close(); verifyNoMoreInteractions(graphite); } private <T> SortedMap<String, T> map() { return new TreeMap<String, T>(); } private <T> SortedMap<String, T> map(String name, T metric) { final TreeMap<String, T> map = new TreeMap<String, T>(); map.put(name, metric); return map; } private <T> Gauge gauge(T value) { final Gauge gauge = mock(Gauge.class); when(gauge.getValue()).thenReturn(value); return gauge; } }
/* * Copyright (C) 2012 The CyanogenMod Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cyanogenmod.filemanager.commands.shell; import android.content.res.Resources; import android.content.res.XmlResourceParser; import com.android.internal.util.XmlUtils; import com.cyanogenmod.filemanager.FileManagerApplication; import com.cyanogenmod.filemanager.R; import com.cyanogenmod.filemanager.console.CommandNotFoundException; import com.cyanogenmod.filemanager.console.ExecutionException; import com.cyanogenmod.filemanager.console.InsufficientPermissionsException; import com.cyanogenmod.filemanager.preferences.FileManagerSettings; import com.cyanogenmod.filemanager.preferences.Preferences; import com.cyanogenmod.filemanager.util.ShellHelper; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; /** * An abstract class that represents a command to be executed * in the underlying operating system. * * @see "command_list.xml" */ public abstract class Command { // Command list XML tags private static final String TAG_COMMAND_LIST = "CommandList"; //$NON-NLS-1$ private static final String TAG_COMMAND = "command"; //$NON-NLS-1$ private static final String TAG_STARTCODE = "startcode"; //$NON-NLS-1$ private static final String TAG_EXITCODE = "exitcode"; //$NON-NLS-1$ private static final String EXPANDED_ARGS = "[@]"; //$NON-NLS-1$ private final String mId; private String mCmd; private String mArgs; // The real arguments private final Object[] mCmdArgs; //The arguments to be formatted private static String sStartCodeCmd; private static String sExitCodeCmd; private boolean mTrace; /** * @Constructor of <code>Command</code> * * @param id The resource identifier of the command * @param args Arguments of the command (will be formatted with the arguments from * the command definition) * @throws InvalidCommandDefinitionException If the command has an invalid definition */ public Command(String id, String... args) throws InvalidCommandDefinitionException { this(id, true, args); } /** * @Constructor of <code>Command</code> * * @param id The resource identifier of the command * @param prepare Indicates if the argument must be prepared * @param args Arguments of the command (will be formatted with the arguments from * the command definition) * @throws InvalidCommandDefinitionException If the command has an invalid definition */ public Command(String id, boolean prepare, String... args) throws InvalidCommandDefinitionException { super(); this.mId = id; //Convert and quote arguments this.mCmdArgs = new Object[args.length]; int cc = args.length; for (int i = 0; i < cc; i++) { //Quote the arguments? if (prepare) { this.mCmdArgs[i] = "\"" + ShellHelper.prepareArgument(args[i]) //$NON-NLS-1$ + "\""; //$NON-NLS-1$ } else { this.mCmdArgs[i] = ShellHelper.prepareArgument(args[i]); } } //Load the command info getCommandInfo(FileManagerApplication.getInstance().getResources()); // Get the current trace value reloadTrace(); } /** * Method that add expended arguments to the arguments. This is defined with a * <code>[@]</code> expression in the <code>commandArgs</code> attribute of the * command xml definition file. * * @param args The expanded arguments * @param prepare Indicates if the argument must be prepared */ protected void addExpandedArguments(String[] args, boolean prepare) { // Don't use of regexp to avoid the need to parse of args to make it compilable. // Only one expanded argument of well known characters int pos = this.mArgs.indexOf(EXPANDED_ARGS); if (pos != -1) { int cc = args.length; StringBuffer sb = new StringBuffer(); for (int i = 0; i < cc; i++) { //Quote the arguments? if (prepare) { sb = sb.append("\"" + //$NON-NLS-1$ ShellHelper.prepareArgument(args[i]) + "\""); //$NON-NLS-1$ sb = sb.append(" "); //$NON-NLS-1$ } else { sb = sb.append(ShellHelper.prepareArgument(args[i])); sb = sb.append(" "); //$NON-NLS-1$ } } // Replace the expanded argument String start = this.mArgs.substring(0, pos); String end = this.mArgs.substring(pos+EXPANDED_ARGS.length()); this.mArgs = start + sb.toString() + end; } } /** * Method that return if the command has to trace his operations * * @return boolean If the command has to trace */ public boolean isTrace() { return this.mTrace; } /** * Method that reload the status of trace setting */ public final void reloadTrace() { this.mTrace = Preferences.getSharedPreferences().getBoolean( FileManagerSettings.SETTINGS_SHOW_TRACES.getId(), ((Boolean)FileManagerSettings.SETTINGS_SHOW_TRACES.getDefaultValue()).booleanValue()); } /** * Method that checks if the result code of the execution was successfully. * * @param exitCode Program exit code * @throws InsufficientPermissionsException If an operation requires elevated permissions * @throws CommandNotFoundException If the command was not found * @throws ExecutionException If the operation returns a invalid exit code * @hide */ public abstract void checkExitCode(int exitCode) throws InsufficientPermissionsException, CommandNotFoundException, ExecutionException; /** * Method that returns the resource identifier of the command. * * @return String The resource identifier of the command */ public String getId() { return this.mId; } /** * This method must returns the name of the full qualified command path.<br /> * <br /> * This method always must returns a full qualified path, and not an * abbreviation to the command to avoid security problems.<br /> * In the same way, a command not must contains any type of arguments. * Arguments must be passed through method {@link #getArguments()} * * @return String The full qualified command path * @see #getArguments() */ public String getCommand() { return this.mCmd; } /** * This method can return the list of arguments to be executed along * with the command. * * @return String A list of individual arguments */ public String getArguments() { return this.mArgs; } /** * Method that loads the resource command list xml and * inflate the internal variables. * * @param resources The application resource manager * @throws InvalidCommandDefinitionException If the command has an invalid definition */ private void getCommandInfo(Resources resources) throws InvalidCommandDefinitionException { //Read the command list xml file XmlResourceParser parser = resources.getXml(R.xml.command_list); try { //Find the root element XmlUtils.beginDocument(parser, TAG_COMMAND_LIST); while (true) { XmlUtils.nextElement(parser); String element = parser.getName(); if (element == null) { break; } if (TAG_COMMAND.equals(element)) { CharSequence id = parser.getAttributeValue(R.styleable.Command_commandId); if (id != null && id.toString().compareTo(this.mId) == 0) { CharSequence path = parser.getAttributeValue(R.styleable.Command_commandPath); CharSequence args = parser.getAttributeValue(R.styleable.Command_commandArgs); if (path == null) { throw new InvalidCommandDefinitionException( this.mId + ": path is null"); //$NON-NLS-1$ } if (args == null) { throw new InvalidCommandDefinitionException( this.mId + ": args is null"); //$NON-NLS-1$ } //Save paths this.mCmd = path.toString(); this.mArgs = args.toString(); //Format the arguments of the process with the command arguments if (this.mArgs != null && this.mArgs.length() > 0 && this.mCmdArgs != null && this.mCmdArgs.length > 0) { this.mArgs = String.format(this.mArgs, this.mCmdArgs); } return; } } } } catch (XmlPullParserException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } finally { parser.close(); } //Command not found throw new InvalidCommandDefinitionException(this.mId); } /** * Method that returns the exit code command info. * * @param resources The application resource manager * @return String The exit code command info * @throws InvalidCommandDefinitionException If the command is not present or has an * invalid definition */ public static synchronized String getStartCodeCommandInfo( Resources resources) throws InvalidCommandDefinitionException { //Singleton if (sStartCodeCmd != null) { return new String(sStartCodeCmd); } //Read the command list xml file XmlResourceParser parser = resources.getXml(R.xml.command_list); try { //Find the root element XmlUtils.beginDocument(parser, TAG_COMMAND_LIST); while (true) { XmlUtils.nextElement(parser); String element = parser.getName(); if (element == null) { break; } if (TAG_STARTCODE.equals(element)) { CharSequence path = parser.getAttributeValue(R.styleable.Command_commandPath); if (path == null) { throw new InvalidCommandDefinitionException( TAG_STARTCODE + ": path is null"); //$NON-NLS-1$ } //Save paths sStartCodeCmd = path.toString(); return new String(sStartCodeCmd); } } } catch (XmlPullParserException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } finally { parser.close(); } //Command not found throw new InvalidCommandDefinitionException(TAG_STARTCODE); } /** * Method that returns the exit code command info. * * @param resources The application resource manager * @return String The exit code command info * @throws InvalidCommandDefinitionException If the command is not present or has an * invalid definition */ public static synchronized String getExitCodeCommandInfo( Resources resources) throws InvalidCommandDefinitionException { //Singleton if (sExitCodeCmd != null) { return new String(sExitCodeCmd); } //Read the command list xml file XmlResourceParser parser = resources.getXml(R.xml.command_list); try { //Find the root element XmlUtils.beginDocument(parser, TAG_COMMAND_LIST); while (true) { XmlUtils.nextElement(parser); String element = parser.getName(); if (element == null) { break; } if (TAG_EXITCODE.equals(element)) { CharSequence path = parser.getAttributeValue(R.styleable.Command_commandPath); if (path == null) { throw new InvalidCommandDefinitionException( TAG_EXITCODE + ": path is null"); //$NON-NLS-1$ } //Save paths sExitCodeCmd = path.toString(); return new String(sExitCodeCmd); } } } catch (XmlPullParserException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } finally { parser.close(); } //Command not found throw new InvalidCommandDefinitionException(TAG_EXITCODE); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.persistence; import java.io.ByteArrayInputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.EnumSet; import java.util.concurrent.ConcurrentHashMap; import org.apache.logging.log4j.Logger; import org.apache.geode.DataSerializer; import org.apache.geode.cache.DiskAccessException; import org.apache.geode.cache.UnsupportedVersionException; import org.apache.geode.internal.ExitCode; import org.apache.geode.internal.InternalDataSerializer; import org.apache.geode.internal.Version; import org.apache.geode.internal.cache.CountingDataInputStream; import org.apache.geode.internal.cache.DiskInitFile; import org.apache.geode.internal.cache.DiskInitFile.DiskRegionFlag; import org.apache.geode.internal.cache.DiskStoreImpl; import org.apache.geode.internal.cache.Oplog.OPLOG_TYPE; import org.apache.geode.internal.cache.ProxyBucketRegion; import org.apache.geode.internal.cache.versions.RegionVersionHolder; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.logging.log4j.LogMarker; public class DiskInitFileParser { private static final Logger logger = LogService.getLogger(); private final CountingDataInputStream dis; private DiskInitFileInterpreter interpreter; public DiskInitFileParser(CountingDataInputStream dis, DiskInitFileInterpreter interpreter) { this.dis = dis; if (logger.isDebugEnabled()) { this.interpreter = createPrintingInterpreter(interpreter); } else { this.interpreter = interpreter; } } private transient boolean gotEOF; public DiskStoreID parse() throws IOException, ClassNotFoundException { Version gfversion = Version.GFE_662; DiskStoreID result = null; boolean endOfFile = false; while (!endOfFile) { if (dis.atEndOfFile()) { endOfFile = true; break; } byte opCode = dis.readByte(); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "DiskInitFile opcode={}", opCode); } switch (opCode) { case DiskInitFile.IF_EOF_ID: endOfFile = true; gotEOF = true; break; case DiskInitFile.IFREC_INSTANTIATOR_ID: { int id = dis.readInt(); String cn = readClassName(dis); String icn = readClassName(dis); readEndOfRecord(dis); interpreter.cmnInstantiatorId(id, cn, icn); } break; case DiskInitFile.IFREC_DATA_SERIALIZER_ID: { Class<?> dsc = readClass(dis); readEndOfRecord(dis); interpreter.cmnDataSerializerId(dsc); } break; case DiskInitFile.IFREC_ONLINE_MEMBER_ID: { long drId = readDiskRegionID(dis); PersistentMemberID pmid = readPMID(dis, gfversion); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_ONLINE_MEMBER_ID drId={} omid={}", drId, pmid); } interpreter.cmnOnlineMemberId(drId, pmid); } break; case DiskInitFile.IFREC_OFFLINE_MEMBER_ID: { long drId = readDiskRegionID(dis); PersistentMemberID pmid = readPMID(dis, gfversion); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_OFFLINE_MEMBER_ID drId={} pmid={}", drId, pmid); } interpreter.cmnOfflineMemberId(drId, pmid); } break; case DiskInitFile.IFREC_RM_MEMBER_ID: { long drId = readDiskRegionID(dis); PersistentMemberID pmid = readPMID(dis, gfversion); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_RM_MEMBER_ID drId={} pmid={}", drId, pmid); } interpreter.cmnRmMemberId(drId, pmid); } break; case DiskInitFile.IFREC_MY_MEMBER_INITIALIZING_ID: { long drId = readDiskRegionID(dis); PersistentMemberID pmid = readPMID(dis, gfversion); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_MY_MEMBER_INITIALIZING_ID drId={} pmid={}", drId, pmid); } interpreter.cmnAddMyInitializingPMID(drId, pmid); } break; case DiskInitFile.IFREC_MY_MEMBER_INITIALIZED_ID: { long drId = readDiskRegionID(dis); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_MY_MEMBER_INITIALIZED_ID drId={}", drId); } interpreter.cmnMarkInitialized(drId); } break; case DiskInitFile.IFREC_CREATE_REGION_ID: { long drId = readDiskRegionID(dis); String regName = dis.readUTF(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_CREATE_REGION_ID drId={} name={}", drId, regName); } interpreter.cmnCreateRegion(drId, regName); } break; case DiskInitFile.IFREC_BEGIN_DESTROY_REGION_ID: { long drId = readDiskRegionID(dis); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_BEGIN_DESTROY_REGION_ID drId={}", drId); } interpreter.cmnBeginDestroyRegion(drId); } break; case DiskInitFile.IFREC_END_DESTROY_REGION_ID: { long drId = readDiskRegionID(dis); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_END_DESTROY_REGION_ID drId={}", drId); } interpreter.cmnEndDestroyRegion(drId); } break; case DiskInitFile.IFREC_BEGIN_PARTIAL_DESTROY_REGION_ID: { long drId = readDiskRegionID(dis); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_BEGIN_PARTIAL_DESTROY_REGION_ID drId={}", drId); } interpreter.cmnBeginPartialDestroyRegion(drId); } break; case DiskInitFile.IFREC_END_PARTIAL_DESTROY_REGION_ID: { long drId = readDiskRegionID(dis); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_END_PARTIAL_DESTROY_REGION_ID drId={}", drId); } interpreter.cmnEndPartialDestroyRegion(drId); } break; case DiskInitFile.IFREC_CLEAR_REGION_ID: { long drId = readDiskRegionID(dis); long clearOplogEntryId = dis.readLong(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_CLEAR_REGION_ID drId={} oplogEntryId={}", drId, clearOplogEntryId); } interpreter.cmnClearRegion(drId, clearOplogEntryId); } break; case DiskInitFile.IFREC_CLEAR_REGION_WITH_RVV_ID: { long drId = readDiskRegionID(dis); int size = dis.readInt(); ConcurrentHashMap<DiskStoreID, RegionVersionHolder<DiskStoreID>> memberToVersion = new ConcurrentHashMap<DiskStoreID, RegionVersionHolder<DiskStoreID>>(size); for (int i = 0; i < size; i++) { DiskStoreID id = new DiskStoreID(); InternalDataSerializer.invokeFromData(id, dis); RegionVersionHolder holder = new RegionVersionHolder(dis); memberToVersion.put(id, holder); } readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_CLEAR_REGION_WITH_RVV_ID drId={} memberToVersion={}", drId, memberToVersion); } interpreter.cmnClearRegion(drId, memberToVersion); } break; case DiskInitFile.IFREC_CRF_CREATE: { long oplogId = dis.readLong(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_CRF_CREATE oplogId={}", oplogId); } interpreter.cmnCrfCreate(oplogId); } break; case DiskInitFile.IFREC_DRF_CREATE: { long oplogId = dis.readLong(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_DRF_CREATE oplogId={}", oplogId); } interpreter.cmnDrfCreate(oplogId); } break; case DiskInitFile.IFREC_KRF_CREATE: { long oplogId = dis.readLong(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_KRF_CREATE oplogId={}", oplogId); } interpreter.cmnKrfCreate(oplogId); } break; case DiskInitFile.IFREC_CRF_DELETE: { long oplogId = dis.readLong(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_CRF_DELETE oplogId={}", oplogId); } interpreter.cmnCrfDelete(oplogId); } break; case DiskInitFile.IFREC_DRF_DELETE: { long oplogId = dis.readLong(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_DRF_DELETE oplogId={}", oplogId); } interpreter.cmnDrfDelete(oplogId); } break; case DiskInitFile.IFREC_REGION_CONFIG_ID: { long drId = readDiskRegionID(dis); byte lruAlgorithm = dis.readByte(); byte lruAction = dis.readByte(); int lruLimit = dis.readInt(); int concurrencyLevel = dis.readInt(); int initialCapacity = dis.readInt(); float loadFactor = dis.readFloat(); boolean statisticsEnabled = dis.readBoolean(); boolean isBucket = dis.readBoolean(); EnumSet<DiskRegionFlag> flags = EnumSet.noneOf(DiskRegionFlag.class); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_REGION_CONFIG_ID drId={}", drId); } interpreter.cmnRegionConfig(drId, lruAlgorithm, lruAction, lruLimit, concurrencyLevel, initialCapacity, loadFactor, statisticsEnabled, isBucket, flags, ProxyBucketRegion.NO_FIXED_PARTITION_NAME, // fixes bug 43910 -1, null, false); } break; case DiskInitFile.IFREC_REGION_CONFIG_ID_66: { long drId = readDiskRegionID(dis); byte lruAlgorithm = dis.readByte(); byte lruAction = dis.readByte(); int lruLimit = dis.readInt(); int concurrencyLevel = dis.readInt(); int initialCapacity = dis.readInt(); float loadFactor = dis.readFloat(); boolean statisticsEnabled = dis.readBoolean(); boolean isBucket = dis.readBoolean(); EnumSet<DiskRegionFlag> flags = EnumSet.noneOf(DiskRegionFlag.class); String partitionName = dis.readUTF(); int startingBucketId = dis.readInt(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_REGION_CONFIG_ID drId={}", drId); } interpreter.cmnRegionConfig(drId, lruAlgorithm, lruAction, lruLimit, concurrencyLevel, initialCapacity, loadFactor, statisticsEnabled, isBucket, flags, partitionName, startingBucketId, null, false); } break; case DiskInitFile.IFREC_REGION_CONFIG_ID_80: { long drId = readDiskRegionID(dis); byte lruAlgorithm = dis.readByte(); byte lruAction = dis.readByte(); int lruLimit = dis.readInt(); int concurrencyLevel = dis.readInt(); int initialCapacity = dis.readInt(); float loadFactor = dis.readFloat(); boolean statisticsEnabled = dis.readBoolean(); boolean isBucket = dis.readBoolean(); EnumSet<DiskRegionFlag> flags = EnumSet.noneOf(DiskRegionFlag.class); String partitionName = dis.readUTF(); int startingBucketId = dis.readInt(); String compressorClassName = dis.readUTF(); if ("".equals(compressorClassName)) { compressorClassName = null; } if (dis.readBoolean()) { flags.add(DiskRegionFlag.IS_WITH_VERSIONING); } readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_REGION_CONFIG_ID drId={}", drId); } interpreter.cmnRegionConfig(drId, lruAlgorithm, lruAction, lruLimit, concurrencyLevel, initialCapacity, loadFactor, statisticsEnabled, isBucket, flags, partitionName, startingBucketId, compressorClassName, false); } break; case DiskInitFile.IFREC_REGION_CONFIG_ID_90: { long drId = readDiskRegionID(dis); byte lruAlgorithm = dis.readByte(); byte lruAction = dis.readByte(); int lruLimit = dis.readInt(); int concurrencyLevel = dis.readInt(); int initialCapacity = dis.readInt(); float loadFactor = dis.readFloat(); boolean statisticsEnabled = dis.readBoolean(); boolean isBucket = dis.readBoolean(); EnumSet<DiskRegionFlag> flags = EnumSet.noneOf(DiskRegionFlag.class); String partitionName = dis.readUTF(); int startingBucketId = dis.readInt(); String compressorClassName = dis.readUTF(); if ("".equals(compressorClassName)) { compressorClassName = null; } if (dis.readBoolean()) { flags.add(DiskRegionFlag.IS_WITH_VERSIONING); } boolean offHeap = dis.readBoolean(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_REGION_CONFIG_ID drId={}", drId); } interpreter.cmnRegionConfig(drId, lruAlgorithm, lruAction, lruLimit, concurrencyLevel, initialCapacity, loadFactor, statisticsEnabled, isBucket, flags, partitionName, startingBucketId, compressorClassName, offHeap); } break; case DiskInitFile.IFREC_OFFLINE_AND_EQUAL_MEMBER_ID: { long drId = readDiskRegionID(dis); PersistentMemberID pmid = readPMID(dis, gfversion); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_OFFLINE_AND_EQUAL_MEMBER_ID drId={} pmid={}", drId, pmid); } interpreter.cmdOfflineAndEqualMemberId(drId, pmid); } break; case DiskInitFile.IFREC_DISKSTORE_ID: { long leastSigBits = dis.readLong(); long mostSigBits = dis.readLong(); readEndOfRecord(dis); result = new DiskStoreID(mostSigBits, leastSigBits); interpreter.cmnDiskStoreID(result); } break; case DiskInitFile.OPLOG_MAGIC_SEQ_ID: readOplogMagicSeqRecord(dis, OPLOG_TYPE.IF); break; case DiskInitFile.IFREC_PR_CREATE: { String name = dis.readUTF(); int numBuckets = dis.readInt(); String colocatedWith = dis.readUTF(); readEndOfRecord(dis); PRPersistentConfig config = new PRPersistentConfig(numBuckets, colocatedWith); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_PR_CREATE name={}, config={}", name, config); } interpreter.cmnPRCreate(name, config); } break; case DiskInitFile.IFREC_GEMFIRE_VERSION: { short ver = Version.readOrdinal(dis); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_GEMFIRE_VERSION version={}", ver); } try { gfversion = Version.fromOrdinal(ver, false); } catch (UnsupportedVersionException e) { throw new DiskAccessException( String.format("Unknown version ordinal %s found when recovering Oplogs", ver), e, this.interpreter.getNameForError()); } interpreter.cmnGemfireVersion(gfversion); break; } case DiskInitFile.IFREC_PR_DESTROY: { String name = dis.readUTF(); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_PR_DESTROY name={}", name); } interpreter.cmnPRDestroy(name); } break; case DiskInitFile.IFREC_ADD_CANONICAL_MEMBER_ID: { int id = dis.readInt(); Object object = DataSerializer.readObject(dis); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_ADD_CANONICAL_MEMBER_ID id={} name={}", id, object); } interpreter.cmnAddCanonicalMemberId(id, object); break; } case DiskInitFile.IFREC_REVOKE_DISK_STORE_ID: { PersistentMemberPattern pattern = new PersistentMemberPattern(); InternalDataSerializer.invokeFromData(pattern, dis); readEndOfRecord(dis); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "IFREC_REVOKE_DISK_STORE_ID id={}" + pattern); } interpreter.cmnRevokeDiskStoreId(pattern); } break; default: throw new DiskAccessException( String.format("Unknown opCode %s found in disk initialization file.", opCode), this.interpreter.getNameForError()); } if (interpreter.isClosing()) { break; } } return result; } private void readOplogMagicSeqRecord(DataInput dis, OPLOG_TYPE type) throws IOException { byte[] seq = new byte[OPLOG_TYPE.getLen()]; dis.readFully(seq); for (int i = 0; i < OPLOG_TYPE.getLen(); i++) { if (seq[i] != type.getBytes()[i]) { if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "oplog magic code mismatched at byte:{}, value:{}", (i + 1), seq[i]); } throw new DiskAccessException("Invalid oplog (" + type.name() + ") file provided.", interpreter.getNameForError()); } } if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < OPLOG_TYPE.getLen(); i++) { sb.append(" " + seq[i]); } logger.trace(LogMarker.PERSIST_RECOVERY_VERBOSE, "oplog magic code: {}", sb); } readEndOfRecord(dis); } /** * Reads a class name from the given input stream, as written by writeClass, and loads the class. * * @return null if class can not be loaded; otherwise loaded Class */ private static Class<?> readClass(DataInput di) throws IOException { int len = di.readInt(); byte[] bytes = new byte[len]; di.readFully(bytes); String className = new String(bytes); // use default decoder Class<?> result = null; try { result = InternalDataSerializer.getCachedClass(className); // see bug 41206 } catch (ClassNotFoundException ignore) { } return result; } /** * Reads a class name from the given input stream. * * @return class name */ private static String readClassName(DataInput di) throws IOException { int len = di.readInt(); byte[] bytes = new byte[len]; di.readFully(bytes); return new String(bytes); // use default decoder } static long readDiskRegionID(CountingDataInputStream dis) throws IOException { int bytesToRead = dis.readUnsignedByte(); if (bytesToRead <= DiskStoreImpl.MAX_RESERVED_DRID && bytesToRead >= DiskStoreImpl.MIN_RESERVED_DRID) { long result = dis.readByte(); // we want to sign extend this first byte bytesToRead--; while (bytesToRead > 0) { result <<= 8; result |= dis.readUnsignedByte(); // no sign extension bytesToRead--; } return result; } else { return bytesToRead; } } private void readEndOfRecord(DataInput di) throws IOException { int b = di.readByte(); if (b != DiskInitFile.END_OF_RECORD_ID) { if (b == 0) { // this is expected if this is the last record and we died while writing it. throw new EOFException("found partial last record"); } else { // Our implementation currently relies on all unwritten bytes having // a value of 0. So throw this exception if we find one we didn't expect. throw new IllegalStateException("expected end of record (byte==" + DiskInitFile.END_OF_RECORD_ID + ") or zero but found " + b); } } } private PersistentMemberID readPMID(CountingDataInputStream dis, Version gfversion) throws IOException, ClassNotFoundException { int len = dis.readInt(); byte[] buf = new byte[len]; dis.readFully(buf); return bytesToPMID(buf, gfversion); } private PersistentMemberID bytesToPMID(byte[] bytes, Version gfversion) throws IOException, ClassNotFoundException { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); DataInputStream dis = new DataInputStream(bais); PersistentMemberID result = new PersistentMemberID(); if (Version.GFE_70.compareTo(gfversion) > 0) { result._fromData662(dis); } else { InternalDataSerializer.invokeFromData(result, dis); } return result; } public static void main(String[] args) throws IOException, ClassNotFoundException { if (args.length != 1) { System.err.println("Usage: parse filename"); ExitCode.FATAL.doSystemExit(); } dump(new File(args[0])); } public static void dump(File file) throws IOException, ClassNotFoundException { InputStream is = new FileInputStream(file); CountingDataInputStream dis = new CountingDataInputStream(is, file.length()); try { DiskInitFileInterpreter interpreter = createPrintingInterpreter(null); DiskInitFileParser parser = new DiskInitFileParser(dis, interpreter); parser.parse(); } finally { is.close(); } } private static DiskInitFileInterpreter createPrintingInterpreter( DiskInitFileInterpreter wrapped) { DiskInitFileInterpreter interpreter = (DiskInitFileInterpreter) Proxy.newProxyInstance( DiskInitFileInterpreter.class.getClassLoader(), new Class[] {DiskInitFileInterpreter.class}, new PrintingInterpreter(wrapped)); return interpreter; } private static class PrintingInterpreter implements InvocationHandler { private final DiskInitFileInterpreter delegate; public PrintingInterpreter(DiskInitFileInterpreter wrapped) { this.delegate = wrapped; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.getName().equals("isClosing")) { if (delegate == null) { return Boolean.FALSE; } else { return delegate.isClosing(); } } Object result = null; if (method.getReturnType().equals(boolean.class)) { result = Boolean.TRUE; } StringBuilder out = new StringBuilder(); out.append(method.getName()).append("("); for (Object arg : args) { out.append(arg); out.append(","); } out.replace(out.length() - 1, out.length(), ")"); if (logger.isDebugEnabled()) { logger.debug(out.toString()); } if (delegate == null) { return result; } else { return method.invoke(delegate, args); } } } public boolean gotEOF() { return this.gotEOF; } }
/* * Copyright (C) 2009 University of Washington * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.odk.collect.android.widgets; import android.content.Context; import android.database.Cursor; import android.graphics.BitmapFactory; import android.view.KeyEvent; import android.view.View; import android.view.inputmethod.InputMethodManager; import android.widget.CompoundButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RadioButton; import android.widget.RelativeLayout; import android.widget.TextView; import org.javarosa.core.model.FormDef; import org.javarosa.core.model.condition.EvaluationContext; import org.javarosa.core.model.data.IAnswerData; import org.javarosa.core.model.data.StringData; import org.javarosa.core.model.instance.TreeElement; import org.javarosa.form.api.FormEntryPrompt; import org.javarosa.xpath.XPathNodeset; import org.javarosa.xpath.XPathParseTool; import org.javarosa.xpath.expr.XPathExpression; import org.javarosa.xpath.parser.XPathSyntaxException; import org.odk.collect.android.R; import org.odk.collect.android.application.Collect; import org.odk.collect.android.database.ItemsetDbAdapter; import org.odk.collect.android.listeners.AdvanceToNextListener; import java.io.File; import java.util.ArrayList; import java.util.HashMap; /** * The most basic widget that allows for entry of any text. * * @author Carl Hartung (carlhartung@gmail.com) * @author Yaw Anokwa (yanokwa@gmail.com) */ public class ItemsetWidget extends QuestionWidget implements CompoundButton.OnCheckedChangeListener, View.OnClickListener { private static String tag = "ItemsetWidget"; boolean mReadOnly; private boolean mAutoAdvanceToNext; private ArrayList<RadioButton> mButtons; private String mAnswer = null; // Hashmap linking label:value private HashMap<String, String> mAnswers; private AdvanceToNextListener mAutoAdvanceToNextListener; protected ItemsetWidget(Context context, FormEntryPrompt prompt, boolean readOnlyOverride, boolean autoAdvanceToNext) { super(context, prompt); mReadOnly = prompt.isReadOnly() || readOnlyOverride; mAnswers = new HashMap<String, String>(); mButtons = new ArrayList<>(); mAutoAdvanceToNext = autoAdvanceToNext; if (autoAdvanceToNext) { mAutoAdvanceToNextListener = (AdvanceToNextListener) context; } // Layout holds the vertical list of buttons LinearLayout allOptionsLayout = new LinearLayout(context); String currentAnswer = prompt.getAnswerText(); // the format of the query should be something like this: // query="instance('cities')/root/item[state=/data/state and county=/data/county]" // "query" is what we're using to notify that this is an // itemset widget. String nodesetStr = prompt.getQuestion().getAdditionalAttribute(null, "query"); // parse out the list name, between the '' String list_name = nodesetStr.substring(nodesetStr.indexOf("'") + 1, nodesetStr.lastIndexOf("'")); // isolate the string between between the [ ] characters String queryString = nodesetStr.substring(nodesetStr.indexOf("[") + 1, nodesetStr.lastIndexOf("]")); StringBuilder selection = new StringBuilder(); // add the list name as the first argument, which will always be there selection.append("list_name=?"); // check to see if there are any arguments if (queryString.indexOf("=") != -1) { selection.append(" and "); } // can't just split on 'and' or 'or' because they have different // behavior, so loop through and break them off until we don't have any // more // must include the spaces in indexOf so we don't match words like // "land" int andIndex = -1; int orIndex = -1; ArrayList<String> arguments = new ArrayList<String>(); while ((andIndex = queryString.indexOf(" and ")) != -1 || (orIndex = queryString.indexOf(" or ")) != -1) { if (andIndex != -1) { String subString = queryString.substring(0, andIndex); String pair[] = subString.split("="); if (pair.length == 2) { selection.append(pair[0].trim() + "=? and "); arguments.add(pair[1].trim()); } else { // parse error } // move string forward to after " and " queryString = queryString.substring(andIndex + 5, queryString.length()); andIndex = -1; } else if (orIndex != -1) { String subString = queryString.substring(0, orIndex); String pair[] = subString.split("="); if (pair.length == 2) { selection.append(pair[0].trim() + "=? or "); arguments.add(pair[1].trim()); } else { // parse error } // move string forward to after " or " queryString = queryString.substring(orIndex + 4, queryString.length()); orIndex = -1; } } // parse the last segment (or only segment if there are no 'and' or 'or' // clauses String pair[] = queryString.split("="); if (pair.length == 2) { selection.append(pair[0].trim() + "=?"); arguments.add(pair[1].trim()); } if (pair.length == 1) { // this is probably okay, because then you just list all items in // the list } else { // parse error } // +1 is for the list_name String[] selectionArgs = new String[arguments.size() + 1]; boolean nullArgs = false; // can't have any null arguments selectionArgs[0] = list_name; // first argument is always listname // loop through the arguments, evaluate any expressions // and build the query string for the DB for (int i = 0; i < arguments.size(); i++) { XPathExpression xpr = null; try { xpr = XPathParseTool.parseXPath(arguments.get(i)); } catch (XPathSyntaxException e) { e.printStackTrace(); TextView error = new TextView(context); error.setText(String.format(getContext().getString(R.string.parser_exception), arguments.get(i))); addAnswerView(error); break; } if (xpr != null) { FormDef form = Collect.getInstance().getFormController().getFormDef(); TreeElement mTreeElement = form.getMainInstance().resolveReference( prompt.getIndex().getReference()); EvaluationContext ec = new EvaluationContext(form.getEvaluationContext(), mTreeElement.getRef()); Object value = xpr.eval(form.getMainInstance(), ec); if (value == null) { nullArgs = true; } else { if (value instanceof XPathNodeset) { XPathNodeset xpn = (XPathNodeset) value; value = xpn.getValAt(0); } selectionArgs[i + 1] = value.toString(); } } } File itemsetFile = new File( Collect.getInstance().getFormController().getMediaFolder().getAbsolutePath() + "/itemsets.csv"); if (nullArgs) { // we can't try to query with null values else it blows up // so just leave the screen blank // TODO: put an error? } else if (itemsetFile.exists()) { ItemsetDbAdapter ida = new ItemsetDbAdapter(); ida.open(); // name of the itemset table for this form String pathHash = ItemsetDbAdapter.getMd5FromString(itemsetFile.getAbsolutePath()); try { Cursor c = ida.query(pathHash, selection.toString(), selectionArgs); if (c != null) { c.move(-1); int index = 0; while (c.moveToNext()) { String label = ""; String val = ""; // try to get the value associated with the label:lang // string if that doen't exist, then just use label String lang = ""; if (Collect.getInstance().getFormController().getLanguages() != null && Collect.getInstance().getFormController().getLanguages().length > 0) { lang = Collect.getInstance().getFormController().getLanguage(); } // apparently you only need the double quotes in the // column name when creating the column with a : // included String labelLang = "label" + "::" + lang; int langCol = c.getColumnIndex(labelLang); if (langCol == -1) { label = c.getString(c.getColumnIndex("label")); } else { label = c.getString(c.getColumnIndex(labelLang)); } // the actual value is stored in name val = c.getString(c.getColumnIndex("name")); mAnswers.put(label, val); RadioButton rb = new RadioButton(context); rb.setOnCheckedChangeListener(this); rb.setOnClickListener(this); rb.setTextSize(mAnswerFontsize); rb.setText(label); rb.setTag(index); rb.setId(QuestionWidget.newUniqueId()); mButtons.add(rb); // have to add it to the radiogroup before checking it, // else it lets two buttons be checked... if (currentAnswer != null && val.compareTo(currentAnswer) == 0) { rb.setChecked(true); } RelativeLayout singleOptionLayout = new RelativeLayout(getContext()); RelativeLayout.LayoutParams textParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); textParams.addRule(RelativeLayout.ALIGN_PARENT_LEFT); textParams.addRule(RelativeLayout.ALIGN_PARENT_TOP); textParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); textParams.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); singleOptionLayout.addView(rb, textParams); if (mAutoAdvanceToNext) { ImageView rightArrow = new ImageView(getContext()); rightArrow.setImageBitmap( BitmapFactory.decodeResource(getContext().getResources(), R.drawable.expander_ic_right)); RelativeLayout.LayoutParams arrowParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); arrowParams.addRule(RelativeLayout.CENTER_VERTICAL); arrowParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); singleOptionLayout.addView(rightArrow, arrowParams); } if (!c.isLast()) { // Last, add the dividing line (except for the last element) ImageView divider = new ImageView(getContext()); divider.setBackgroundResource(android.R.drawable.divider_horizontal_bright); RelativeLayout.LayoutParams dividerParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); dividerParams.addRule(RelativeLayout.BELOW, rb.getId()); singleOptionLayout.addView(divider, dividerParams); } allOptionsLayout.addView(singleOptionLayout); index++; } allOptionsLayout.setOrientation(LinearLayout.VERTICAL); c.close(); } } finally { ida.close(); } addAnswerView(allOptionsLayout); } else { TextView error = new TextView(context); error.setText( getContext().getString(R.string.file_missing, itemsetFile.getAbsolutePath())); addAnswerView(error); } } @Override public void clearAnswer() { mAnswer = null; for (RadioButton button : mButtons) { if (button.isChecked()) { button.setChecked(false); clearNextLevelsOfCascadingSelect(); break; } } } @Override public IAnswerData getAnswer() { if (mAnswer == null) { return null; } else { return new StringData(mAnswer); } } @Override public void setFocus(Context context) { // Hide the soft keyboard if it's showing. InputMethodManager inputManager = (InputMethodManager) context .getSystemService(Context.INPUT_METHOD_SERVICE); inputManager.hideSoftInputFromWindow(this.getWindowToken(), 0); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (event.isAltPressed() == true) { return false; } return super.onKeyDown(keyCode, event); } @Override public void setOnLongClickListener(OnLongClickListener l) { for (RadioButton r : mButtons) { r.setOnLongClickListener(l); } } @Override public void cancelLongPress() { super.cancelLongPress(); for (RadioButton button : mButtons) { button.cancelLongPress(); } } @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { for (RadioButton button : mButtons) { if (button.isChecked() && !(buttonView == button)) { button.setChecked(false); clearNextLevelsOfCascadingSelect(); } else { mAnswer = mAnswers.get(buttonView.getText().toString()); } } } } @Override public void onClick(View v) { if (mAutoAdvanceToNext) { mAutoAdvanceToNextListener.advance(); } } }
/** * Retz * Copyright (C) 2016-2017 Nautilus Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.retz.web; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; import io.github.retz.cli.ClientCLIConfig; import io.github.retz.cli.TimestampHelper; import io.github.retz.db.Database; import io.github.retz.misc.ApplicationBuilder; import io.github.retz.protocol.*; import io.github.retz.protocol.data.*; import io.github.retz.scheduler.*; import org.apache.mesos.Protos; import org.hamcrest.Matchers; import org.junit.*; import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.*; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import static spark.Spark.awaitInitialization; // These tests must pass regardless of any client/server communication configuration @Ignore public class WebConsoleCommonTests { private static final List<String> BASE_ORDER_BY = Arrays.asList("id"); private Client webClient; private ObjectMapper mapper; private ServerConfiguration config; private ClientCLIConfig cliConfig; Launcher.Configuration makeConfig() throws Exception { throw new RuntimeException("This class shouldn't be tested"); } ClientCLIConfig makeClientConfig() throws Exception { throw new RuntimeException("This class shouldn't be tested"); } @Before public void setUp() throws Throwable { Protos.FrameworkInfo frameworkInfo = Protos.FrameworkInfo.newBuilder() .setUser("") .setName(RetzScheduler.FRAMEWORK_NAME) .build(); // Non-TLS tests are not to be done, I believe when it works with TLS tests, it should work // on Non-TLS setup too. I believe his is because Sparkjava does not cleanly clear TLS setting in // Spark.stop(), because with retz.properties it succeeds alone, but fails when right after TLS tests. // TODO: investigate and report this to sparkjava Launcher.Configuration conf = makeConfig(); mapper = new ObjectMapper(); mapper.registerModule(new Jdk8Module()); RetzScheduler scheduler = new RetzScheduler(conf, frameworkInfo); config = conf.getServerConfig(); Database.getInstance().init(config); assertTrue(Database.getMigrator().isFinished()); WebConsole.set(scheduler, null); WebConsole.start(config); awaitInitialization(); cliConfig = makeClientConfig(); System.err.println(config.authenticationEnabled()); System.err.println(config.toString()); webClient = Client.newBuilder(cliConfig.getUri()) .setAuthenticator(cliConfig.getAuthenticator()) .checkCert(!cliConfig.insecure()) .setVerboseLog(true) .build(); } @After public void tearDown() throws Exception { webClient.close(); WebConsole.stop(); Database.getInstance().clear(); Database.getInstance().stop(); // This is because Spark.stop() is impelemented in asynchronous way, // there are no way to wait for spark.Service.initialized become false. // If this sleep is too short, IllegalStateException("This must be done before route mapping has begun"); // will be thrown. Thread.sleep(1024); } @Test public void version() throws IOException { System.err.println(Client.VERSION_STRING); assertTrue(webClient.ping()); } @Test public void list() throws Exception { { ListJobResponse res = (ListJobResponse) webClient.list(Job.JobState.QUEUED, Optional.empty()); assertTrue(res.jobs().isEmpty()); assertFalse(res.more()); } { ListJobResponse res = (ListJobResponse) webClient.list(Job.JobState.STARTING, Optional.empty()); assertTrue(res.jobs().isEmpty()); assertFalse(res.more()); } { ListJobResponse res = (ListJobResponse) webClient.list(Job.JobState.FINISHED, Optional.empty()); assertTrue(res.jobs().isEmpty()); assertFalse(res.more()); } } @Test public void loadApp() throws Exception { { String[] files = {"http://example.com:234/foobar/test.tar.gz"}; Application app = new Application("foobar", Collections.emptyList(), Arrays.asList(files), Optional.empty(), config.getUser().keyId(), 0, new MesosContainer(), true); Response res = webClient.load(app); assertThat(res, instanceOf(LoadAppResponse.class)); assertThat(res.status(), Matchers.is("ok")); Optional<Application> app2 = Applications.get(app.getAppid()); assertTrue(app2.isPresent()); assertThat(app2.get().getAppid(), is(app.getAppid())); } { GetAppResponse res = (GetAppResponse) webClient.getApp("foobar"); assertThat(res.status(), is("ok")); Application app = res.application(); assertThat(app.getAppid(), is("foobar")); } webClient.unload("foobar"); } @Test public void schedule() throws Exception { List<Job> maybeJob = JobQueue.findFit(BASE_ORDER_BY, new ResourceQuantity(10000, 10000, 0, 0, 0, 0)); assertTrue(maybeJob.isEmpty()); { // Job request without app must fail String cmd = "Mmmmmmmmmy commmmmand1!!!!!"; Response res = webClient.schedule(new Job("foobar", cmd, null, 1, 256, 32)); assertThat(res, instanceOf(ErrorResponse.class)); maybeJob = JobQueue.findFit(BASE_ORDER_BY, new ResourceQuantity(1000, 10000, 0, 0, 0, 0)); assertTrue(maybeJob.isEmpty()); GetJobResponse getJobResponse = (GetJobResponse) webClient.getJob(235561234); assertFalse(getJobResponse.job().isPresent()); } { String[] files = {"http://example.com:234/foobar/test.tar.gz"}; Application app = new Application("foobar", Collections.emptyList(), Arrays.asList(files), Optional.empty(), config.getUser().keyId(), 0, new MesosContainer(), true); Response res = webClient.load(app); assertThat(res, instanceOf(LoadAppResponse.class)); assertThat(res.status(), is("ok")); Optional<Application> app2 = Applications.get("foobar"); assertTrue(app2.isPresent()); assertThat(app2.get().getAppid(), is("foobar")); res = webClient.getApp("foobar"); assertThat(res, instanceOf(GetAppResponse.class)); GetAppResponse getAppResponse = (GetAppResponse) res; assertThat(getAppResponse.application().getAppid(), is("foobar")); assertThat(getAppResponse.application().getFiles().size(), is(1)); } maybeJob = JobQueue.findFit(BASE_ORDER_BY, new ResourceQuantity(10000, 10000, 0, 0, 0, 0)); assertTrue(maybeJob.isEmpty()); { // You know, these spaces are to be normalized String cmd = "Mmmmmmmmmy commmmmand1!!!!!"; Response res = webClient.schedule(new Job("foobar", cmd, null, 1, 200, 32)); assertThat(res, instanceOf(ScheduleResponse.class)); ScheduleResponse sres = (ScheduleResponse) res; assertNotNull(sres.job.scheduled()); assertThat(sres.job.id(), is(greaterThanOrEqualTo(0))); System.err.println(sres.job.scheduled()); assertThat(ClientHelper.queue(webClient).size(), is(1)); GetJobResponse getJobResponse = (GetJobResponse) webClient.getJob(sres.job.id()); Assert.assertEquals(sres.job.cmd(), getJobResponse.job().get().cmd()); maybeJob = JobQueue.findFit(BASE_ORDER_BY, new ResourceQuantity(10000, 10000, 0, 0, 0, 0)); assertFalse(maybeJob.isEmpty()); assertThat(maybeJob.get(0).cmd(), is(cmd)); assertThat(maybeJob.get(0).appid(), is("foobar")); ListFilesResponse listFilesResponse = (ListFilesResponse) webClient.listFiles(sres.job.id(), ListFilesRequest.DEFAULT_SANDBOX_PATH); assertTrue(listFilesResponse.entries().isEmpty()); Response response = webClient.getFile(sres.job.id(), "stdout", 0, 20000); GetFileResponse getFileResponse = (GetFileResponse) response; assertFalse(getFileResponse.file().isPresent()); } { Response res = webClient.getApp("no such app"); assertThat(res, instanceOf(ErrorResponse.class)); } webClient.unload("foobar"); } @Test public void runFail() throws Exception { JobQueue.clear(); List<Job> maybeJob = JobQueue.findFit(BASE_ORDER_BY, new ResourceQuantity(10000, 10000, 0, 0, 0, 0)); assertTrue(maybeJob.isEmpty()); { // Job request without app must fail String cmd = "Mmmmmmmmmy commmmmand1!!!!!"; Job job = new Job("foobar-nosuchapp", cmd, null, 1, 32, 256); Job done = webClient.run(job); assertNull(done); maybeJob = JobQueue.findFit(BASE_ORDER_BY, new ResourceQuantity(10000, 10000, 0, 0, 0, 0)); assertTrue(maybeJob.isEmpty()); } } @Test public void kill() throws Exception { { Response res = webClient.kill(0); assertThat(res, instanceOf(ErrorResponse.class)); assertEquals("No such job: 0", res.status()); } Application app = new ApplicationBuilder("app", config.getUser().keyId()).build(); LoadAppResponse loadAppResponse = (LoadAppResponse) webClient.load(app); assertEquals("ok", loadAppResponse.status()); { Job job = new Job("app", "sleep 1000", new Properties(), 1, 64, 0); ScheduleResponse scheduleResponse = (ScheduleResponse) webClient.schedule(job); KillResponse killResponse = (KillResponse) webClient.kill(scheduleResponse.job().id()); assertEquals("ok", killResponse.status()); GetJobResponse getJobResponse = (GetJobResponse) webClient.getJob(scheduleResponse.job().id()); System.err.println(getJobResponse.job().get().pp()); assertEquals(Job.JobState.KILLED, getJobResponse.job().get().state()); } } @Test public void ping() throws IOException { Client c = Client.newBuilder(config.getUri()) .setAuthenticator(config.getAuthenticator()) .checkCert(!config.insecure()) .build(); assertTrue(c.ping()); } @Test public void status() throws Exception { Application app = new Application("fooapp", Collections.emptyList(), Collections.emptyList(), Optional.empty(), config.getUser().keyId(), 0, new MesosContainer(), true); Database.getInstance().addApplication(app); Job job = new Job(app.getAppid(), "foocmd", null, 12000, 12000, 12000); job.schedule(JobQueue.issueJobId(), TimestampHelper.now()); JobQueue.push(job); StatusCache.updateUsedResources(); try (Client c = Client.newBuilder(config.getUri()) .setAuthenticator(config.getAuthenticator()) .checkCert(!config.insecure()) .build()) { Response res = c.status(); assertThat(res, instanceOf(StatusResponse.class)); StatusResponse statusResponse = (StatusResponse) res; System.err.println(statusResponse.queueLength()); assertThat(statusResponse.queueLength(), is(1)); assertThat(statusResponse.runningLength(), is(0)); } } // Checks isolation between users. // All combination of client APIs must be safely excluded, // Any request to server imitation other users must fail. @Test public void isolation() throws Exception { // Prepare data Application app1 = new Application("app1", Collections.emptyList(), Collections.emptyList(), Optional.empty(), cliConfig.getUser().keyId(), 0, new MesosContainer(), true); Job job1 = new Job("app1", "ls", new Properties(), 1, 32, 32); { Response res = webClient.load(app1); assertEquals("ok", res.status()); } { Response res = webClient.schedule(job1); assertEquals("ok", res.status()); ScheduleResponse scheduleResponse = (ScheduleResponse) res; job1 = scheduleResponse.job(); } System.err.println("Job " + job1.id() + " has been scheduled"); // Here comes a new challenger!! User charlie = new User("charlie", "snoops!", true, "Charlie the theif"); Database.getInstance().addUser(charlie); ClientCLIConfig c2 = new ClientCLIConfig(cliConfig); c2.setUser(charlie); assertEquals("deadbeef", cliConfig.getUser().keyId()); assertEquals("charlie", c2.getUser().keyId()); try (Client client2 = Client.newBuilder(c2.getUri()) .setAuthenticator(c2.getAuthenticator()) .checkCert(!c2.insecure()) .setVerboseLog(true) .build()) { { Response res = client2.listApp(); assertEquals("ok", res.status()); ListAppResponse listAppResponse = (ListAppResponse) res; assertTrue(listAppResponse.applicationList().isEmpty()); } { Response res = client2.getApp("app1"); assertThat(res, instanceOf(ErrorResponse.class)); } { assertTrue(ClientHelper.finished(client2).isEmpty()); assertTrue(ClientHelper.queue(client2).isEmpty()); assertTrue(ClientHelper.running(client2).isEmpty()); } { // Charlie tries to snoop Job info of Alice Response res = client2.getJob(job1.id()); assertThat(res, instanceOf(GetJobResponse.class)); GetJobResponse getJobResponse = (GetJobResponse) res; assertFalse(getJobResponse.job().isPresent()); System.err.println(res.status()); } { // Charlie tries to kill Alice's job Response res = client2.kill(job1.id()); assertThat(res.status(), not(is("ok"))); assertThat(res, instanceOf(ErrorResponse.class)); GetJobResponse getJobResponse = (GetJobResponse) webClient.getJob(job1.id()); assertThat(getJobResponse.job().get().state(), is(Job.JobState.QUEUED)); } { // Charlie tries to snoop files in Alice's job sandbox Response res = client2.getFile(job1.id(), "stdout", 0, -1); assertThat(res, instanceOf(GetFileResponse.class)); GetFileResponse getFileResponse = (GetFileResponse) res; assertFalse(getFileResponse.job().isPresent()); assertFalse(getFileResponse.file().isPresent()); System.err.println(res.status()); } { // Charlie tries to snoop files in Alice's job sandbox Response res = client2.listFiles(job1.id(), ListFilesRequest.DEFAULT_SANDBOX_PATH); assertThat(res, instanceOf(ListFilesResponse.class)); ListFilesResponse listFilesResponse = (ListFilesResponse) res; assertFalse(listFilesResponse.job().isPresent()); assertTrue(listFilesResponse.entries().isEmpty()); System.err.println(res.status()); } { // Charlie tries to steal Alice's whole application Response res = client2.load(app1); assertThat(res, instanceOf(ErrorResponse.class)); System.err.println(res.status()); } { // Charlie tries to steal Alice's application name Application app2 = new Application("app1", Collections.emptyList(), Collections.emptyList(), Optional.empty(), c2.getUser().keyId(), 0, new MesosContainer(), true); Response res = client2.load(app2); assertThat(res, instanceOf(ErrorResponse.class)); System.err.println(res.status()); } { // Charlie tries to be Alice System.err.println(cliConfig.getUser().keyId()); Application app2 = new Application("app2", Collections.emptyList(), Collections.emptyList(), Optional.empty(), cliConfig.getUser().keyId(), 0, new MesosContainer(), true); Response res = client2.load(app2); assertThat(res, instanceOf(ErrorResponse.class)); System.err.println(res.status()); } Job job2 = new Job("app1", "ls", new Properties(), 1, 32, 32); { // Charlie tries to steal Alice's applications Response res = client2.schedule(job2); assertThat(res, instanceOf(ErrorResponse.class)); System.err.println(res.status()); } { // Charlie tries to steal Alice's applications Job job3 = client2.run(job2); assertEquals(null, job3); } } } @Test public void disableUser() throws Exception { User user = config.getUser(); List<String> e = Collections.emptyList(); Application application = new Application("t", e, e, Optional.empty(), user.keyId(), 0, new MesosContainer(), true); String jar = "/build/libs/retz-admin-all.jar"; String cfg = "/retz-persistent.properties"; Client client = webClient; Response res; res = client.load(application); assertEquals("ok", res.status()); res = client.schedule(new Job("t", "ls", new Properties(), 1, 32, 32)); ScheduleResponse scheduleResponse = (ScheduleResponse) res; Job job1 = scheduleResponse.job(); System.err.println("Disable user " + user.keyId()); Database.getInstance().enableUser(user.keyId(), false); { Optional<User> u = Database.getInstance().getUser(user.keyId()); assertFalse(u.get().enabled()); } res = client.getJob(job1.id()); System.err.println(res.status()); Assert.assertThat(res, instanceOf(ErrorResponse.class)); res = client.load(new Application("t2", e, e, Optional.empty(), user.keyId(), 0, new MesosContainer(), true)); System.err.println(res.status()); Assert.assertThat(res, instanceOf(ErrorResponse.class)); res = client.schedule(new Job("t", "echo prohibited job", new Properties(), 1, 32, 32)); System.err.println(res.status()); Assert.assertThat(res, instanceOf(ErrorResponse.class)); System.err.println("Enable user"); Database.getInstance().enableUser(user.keyId(), true); res = client.getJob(job1.id()); System.err.println(res.status()); assertEquals("ok", res.status()); res = client.load(new Application("t2", e, e, Optional.empty(), user.keyId(), 0, new MesosContainer(), true)); System.err.println(res.status()); assertEquals("ok", res.status()); res = client.schedule(new Job("t", "echo okay job", new Properties(), 1, 32, 32)); System.err.println(res.status()); assertEquals("ok", res.status()); } @Test public void getBinaryFile() throws Exception { Application app = new Application("fooapp", Collections.emptyList(), Collections.emptyList(), Optional.empty(), config.getUser().keyId(), 0, new MesosContainer(), true); Database.getInstance().addApplication(app); Job job = new Job(app.getAppid(), "hoge", null, 1, 200, 32); Database.getInstance().safeAddJob(job); ByteArrayOutputStream out = new ByteArrayOutputStream(); try { webClient.getBinaryFile(job.id(), "hoge +%/", out); } catch (FileNotFoundException e) { assertTrue(e.getMessage().endsWith("://localhost:9091/job/0/download?path=hoge+%2B%25%2F")); } } }
/** * Copyright (c) 2013-2014 Netflix, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.msl.crypto; import java.math.BigInteger; import java.security.KeyFactory; import java.security.KeyPair; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.interfaces.RSAPrivateKey; import java.security.interfaces.RSAPublicKey; import java.security.spec.InvalidKeySpecException; import java.security.spec.KeySpec; import java.security.spec.RSAPrivateKeySpec; import java.security.spec.RSAPublicKeySpec; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.Set; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONString; import com.netflix.msl.MslCryptoException; import com.netflix.msl.MslEncodingException; import com.netflix.msl.MslError; import com.netflix.msl.MslInternalException; import com.netflix.msl.util.JsonUtils; /** * This class implements the JSON web key structure as defined in * <a href="http://tools.ietf.org/html/draft-ietf-jose-json-web-key-08">JSON Web Key</a>. * * @author Wesley Miaw <wmiaw@netflix.com> */ public class JsonWebKey implements JSONString { /** JSON key key type. */ private static final String KEY_TYPE = "kty"; /** JSON key usage. */ private static final String KEY_USAGE = "use"; /** JSON key key operations. */ private static final String KEY_KEY_OPS = "key_ops"; /** JSON key algorithm. */ private static final String KEY_ALGORITHM = "alg"; /** JSON key extractable. */ private static final String KEY_EXTRACTABLE = "extractable"; /** JSON key key ID. */ private static final String KEY_KEY_ID = "kid"; // RSA keys. /** JSON key modulus. */ private static final String KEY_MODULUS = "n"; /** JSON key public exponent. */ private static final String KEY_PUBLIC_EXPONENT = "e"; /** JSON key private exponent. */ private static final String KEY_PRIVATE_EXPONENT = "d"; // Symmetric keys. /** JSON key key. */ private static final String KEY_KEY = "k"; /** Supported key types. */ public static enum Type { /** RSA */ rsa, /** Octet Sequence */ oct, } /** Supported key usages. */ public static enum Usage { /** Sign/verify. */ sig, /** Encrypt/decrypt. */ enc, /** Wrap/unwrap. */ wrap, } /** Supported key operations. */ public static enum KeyOp { sign, verify, encrypt, decrypt, wrapKey, unwrapKey, deriveKey, deriveBits } /** Supported key algorithms. */ public static enum Algorithm { /** HMAC-SHA256 */ HS256("HS256"), /** RSA PKCS#1 v1.5 */ RSA1_5("RSA1_5"), /** RSA OAEP */ RSA_OAEP("RSA-OAEP"), /** AES-128 Key Wrap */ A128KW("A128KW"), /** AES-128 CBC */ A128CBC("A128CBC"); /** * @param name JSON Web Algorithm name. */ private Algorithm(final String name) { this.name = name; } /** * @return the Java Cryptography Architecture standard algorithm name * for this JSON Web Algorithm. */ public String getJcaAlgorithmName() { switch (this) { case HS256: return "HmacSHA256"; case RSA1_5: case RSA_OAEP: return "RSA"; case A128KW: case A128CBC: return "AES"; default: throw new MslInternalException("No JCA standard algorithm name defined for " + this + "."); } } /* (non-Javadoc) * @see java.lang.Enum#toString() */ @Override public String toString() { return name; } /** * @param name JSON Web Algorithm name. * @return the algorithm. * @throws IllegalArgumentException if the algorithm name is unknown. */ public static Algorithm fromString(final String name) { for (final Algorithm algo : values()) { if (algo.toString().equals(name)) return algo; } throw new IllegalArgumentException("Algorithm " + name + " is unknown."); } /** JSON Web Algorithm name. */ private final String name; } /** * Returns the big integer in big-endian format without any leading sign * bits. * * @param bi the big integer. * @return the big integer in big-endian form. */ private static byte[] bi2bytes(final BigInteger bi) { final byte[] bib = bi.toByteArray(); final int len = (int)Math.ceil((double)bi.bitLength() / Byte.SIZE); return Arrays.copyOfRange(bib, bib.length - len, bib.length); } /** * Create a new JSON web key for an RSA public/private key pair with the * specified attributes. At least one of the public key or private key must * be encoded. * * @param usage key usage. May be null. * @param algo key algorithm. May be null. * @param extractable true if the key is extractable. * @param id key ID. May be null. * @param publicKey RSA public key. May be null. * @param privateKey RSA private key. May be null. * @throws MslInternalException if both keys are null or the algorithm * is incompatible. */ public JsonWebKey(final Usage usage, final Algorithm algo, final boolean extractable, final String id, final RSAPublicKey publicKey, final RSAPrivateKey privateKey) { if (publicKey == null && privateKey == null) throw new MslInternalException("At least one of the public key or private key must be provided."); if (algo != null) { switch (algo) { case RSA1_5: case RSA_OAEP: break; default: throw new MslInternalException("The algorithm must be an RSA algorithm."); } } this.type = Type.rsa; this.usage = usage; this.keyOps = null; this.algo = algo; this.extractable = extractable; this.id = id; this.keyPair = new KeyPair(publicKey, privateKey); this.key = null; this.secretKey = null; } /** * Create a new JSON web key for a symmetric key with the specified * attributes. * * @param usage key usage. May be null. * @param algo key algorithm. May be null. * @param extractable true if the key is extractable. * @param id key ID. May be null. * @param secretKey symmetric key. * @throws MslInternalException if the usage or algorithm is incompatible. */ public JsonWebKey(final Usage usage, final Algorithm algo, final boolean extractable, final String id, final SecretKey secretKey) { if (algo != null) { switch (algo) { case HS256: case A128KW: case A128CBC: break; default: throw new MslInternalException("The algorithm must be a symmetric key algorithm."); } } this.type = Type.oct; this.usage = usage; this.keyOps = null; this.algo = algo; this.extractable = extractable; this.id = id; this.keyPair = null; this.key = secretKey.getEncoded(); this.secretKey = secretKey; } /** * Create a new JSON web key for an RSA public/private key pair with the * specified attributes. At least one of the public key or private key must * be encoded. * * @param keyOps key operations. May be null. * @param algo key algorithm. May be null. * @param extractable true if the key is extractable. * @param id key ID. May be null. * @param publicKey RSA public key. May be null. * @param privateKey RSA private key. May be null. * @throws MslInternalException if both keys are null or the algorithm * is incompatible. */ public JsonWebKey(final Set<KeyOp> keyOps, final Algorithm algo, final boolean extractable, final String id, final RSAPublicKey publicKey, final RSAPrivateKey privateKey) { if (publicKey == null && privateKey == null) throw new MslInternalException("At least one of the public key or private key must be provided."); if (algo != null) { switch (algo) { case RSA1_5: case RSA_OAEP: break; default: throw new MslInternalException("The algorithm must be an RSA algorithm."); } } this.type = Type.rsa; this.usage = null; this.keyOps = (keyOps != null) ? Collections.unmodifiableSet(keyOps) : null; this.algo = algo; this.extractable = extractable; this.id = id; this.keyPair = new KeyPair(publicKey, privateKey); this.key = null; this.secretKey = null; } /** * Create a new JSON web key for a symmetric key with the specified * attributes. * * @param keyOps key operations. May be null. * @param algo key algorithm. May be null. * @param extractable true if the key is extractable. * @param id key ID. May be null. * @param secretKey symmetric key. * @throws MslInternalException if the usage or algorithm is incompatible. */ public JsonWebKey(final Set<KeyOp> keyOps, final Algorithm algo, final boolean extractable, final String id, final SecretKey secretKey) { if (algo != null) { switch (algo) { case HS256: case A128KW: case A128CBC: break; default: throw new MslInternalException("The algorithm must be a symmetric key algorithm."); } } this.type = Type.oct; this.usage = null; this.keyOps = (keyOps != null) ? Collections.unmodifiableSet(keyOps) : null; this.algo = algo; this.extractable = extractable; this.id = id; this.keyPair = null; this.key = secretKey.getEncoded(); this.secretKey = secretKey; } /** * Create a new JSON web key from the provided JSON. * * @param jsonObj JSON web key JSON object. * @throws MslCryptoException if the key type is unknown. * @throws MslEncodingException if there is an error parsing the JSON. */ public JsonWebKey(final JSONObject jsonObj) throws MslCryptoException, MslEncodingException { // Parse JSON object. final String typeName, usageName, algoName; final Set<String> keyOpsNames; try { typeName = jsonObj.getString(KEY_TYPE); usageName = jsonObj.has(KEY_USAGE) ? jsonObj.getString(KEY_USAGE) : null; if (jsonObj.has(KEY_KEY_OPS)) { keyOpsNames = new HashSet<String>(); final JSONArray ja = jsonObj.getJSONArray(KEY_KEY_OPS); for (int i = 0; i < ja.length(); ++i) keyOpsNames.add(ja.getString(i)); } else { keyOpsNames = null; } algoName = jsonObj.has(KEY_ALGORITHM) ? jsonObj.getString(KEY_ALGORITHM) : null; extractable = jsonObj.has(KEY_EXTRACTABLE) ? jsonObj.getBoolean(KEY_EXTRACTABLE) : false; id = jsonObj.has(KEY_KEY_ID) ? jsonObj.getString(KEY_KEY_ID) : null; } catch (final JSONException e) { throw new MslEncodingException(MslError.JSON_PARSE_ERROR, "jwk " + jsonObj.toString(), e); } // Set values. try { type = Type.valueOf(typeName); } catch (final IllegalArgumentException e) { throw new MslCryptoException(MslError.UNIDENTIFIED_JWK_TYPE, typeName, e); } try { usage = (usageName != null) ? Usage.valueOf(usageName) : null; } catch (final IllegalArgumentException e) { throw new MslCryptoException(MslError.UNIDENTIFIED_JWK_USAGE, usageName, e); } if (keyOpsNames != null) { final Set<KeyOp> keyOps = EnumSet.noneOf(KeyOp.class); for (final String keyOpName : keyOpsNames) { try { keyOps.add(KeyOp.valueOf(keyOpName)); } catch (final IllegalArgumentException e) { throw new MslCryptoException(MslError.UNIDENTIFIED_JWK_KEYOP, usageName, e); } } this.keyOps = Collections.unmodifiableSet(keyOps); } else { this.keyOps = null; } try { algo = (algoName != null) ? Algorithm.fromString(algoName) : null; } catch (final IllegalArgumentException e) { throw new MslCryptoException(MslError.UNIDENTIFIED_JWK_ALGORITHM, algoName, e); } // Reconstruct keys. try { // Handle symmetric keys. if (type == Type.oct) { key = JsonUtils.b64urlDecode(jsonObj.getString(KEY_KEY)); if (key == null || key.length == 0) throw new MslCryptoException(MslError.INVALID_JWK_KEYDATA, "symmetric key is empty"); secretKey = (algo != null) ? new SecretKeySpec(key, algo.getJcaAlgorithmName()) : null; keyPair = null; } // Handle public/private keys (RSA only). else { key = null; final KeyFactory factory = CryptoCache.getKeyFactory("RSA"); // Grab the modulus. final byte[] n = JsonUtils.b64urlDecode(jsonObj.getString(KEY_MODULUS)); if (n == null || n.length == 0) throw new MslCryptoException(MslError.INVALID_JWK_KEYDATA, "modulus is empty"); final BigInteger modulus = new BigInteger(1, n); // Reconstruct the public key if it exists. final PublicKey publicKey; if (jsonObj.has(KEY_PUBLIC_EXPONENT)) { final byte[] e = JsonUtils.b64urlDecode(jsonObj.getString(KEY_PUBLIC_EXPONENT)); if (e == null || e.length == 0) throw new MslCryptoException(MslError.INVALID_JWK_KEYDATA, "public exponent is empty"); final BigInteger exponent = new BigInteger(1, e); final KeySpec pubkeySpec = new RSAPublicKeySpec(modulus, exponent); publicKey = factory.generatePublic(pubkeySpec); } else { publicKey = null; } // Reconstruct the private key if it exists. final PrivateKey privateKey; if (jsonObj.has(KEY_PRIVATE_EXPONENT)) { final byte[] d = JsonUtils.b64urlDecode(jsonObj.getString(KEY_PRIVATE_EXPONENT)); if (d == null || d.length == 0) throw new MslCryptoException(MslError.INVALID_JWK_KEYDATA, "private exponent is empty"); final BigInteger exponent = new BigInteger(1, d); final KeySpec privkeySpec = new RSAPrivateKeySpec(modulus, exponent); privateKey = factory.generatePrivate(privkeySpec); } else { privateKey = null; } // Make sure there is at least one key. if (publicKey == null && privateKey == null) throw new MslEncodingException(MslError.JSON_PARSE_ERROR, "no public or private key"); keyPair = new KeyPair(publicKey, privateKey); secretKey = null; } } catch (final JSONException e) { throw new MslEncodingException(MslError.JSON_PARSE_ERROR, e); } catch (final NoSuchAlgorithmException e) { throw new MslCryptoException(MslError.UNSUPPORTED_JWK_ALGORITHM, e); } catch (final InvalidKeySpecException e) { throw new MslCryptoException(MslError.INVALID_JWK_KEYDATA, e); } } /** * @return the key type. */ public Type getType() { return type; } /** * @return the permitted key usage or null if not specified. */ public Usage getUsage() { return usage; } /** * @return the permitted key operations or null if not specified. */ public Set<KeyOp> getKeyOps() { return keyOps; } /** * @return the key algorithm or null if not specified. */ public Algorithm getAlgorithm() { return algo; } /** * @return true if the key is allowed to be extracted. */ public boolean isExtractable() { return extractable; } /** * @return the key ID or null if not specified. */ public String getId() { return id; } /** * Returns the stored RSA key pair if the JSON web key type is RSA. The * public or private key may be null if only one of the pair is stored in * this JSON web key. * * @return the stored RSA key pair or null if the type is not RSA. */ public KeyPair getRsaKeyPair() { return keyPair; } /** * Returns the stored symmetric key if the JSON web key type is OCT and an * algorithm was specified. Because Java {@code SecretKey} requires a known * algorithm when it is constructed, the key material may be present when * this method returns {@code null}. * * @return the stored symmetric key or null if the type is not OCT or no * algorithm was specified. * @see #getSecretKey(String) */ public SecretKey getSecretKey() { return secretKey; } /** * Returns the stored symmetric key if the JSON web key type is OCT. The * returned key algorithm will be the one specified by the JSON web key * algorithm. If no JSON web key algorithm was specified the provided * algorithm will be used instead. * * @param algorithm the symmetric key algorithm to use if one was not * specified in the JSON web key. * @return the stored symmetric key or null if the type is not OCT. * @throws MslCryptoException if the key cannot be constructed. * @see #getSecretKey() */ public SecretKey getSecretKey(final String algorithm) throws MslCryptoException { // Return the stored symmetric key if it already exists. if (secretKey != null) return secretKey; // Otherwise construct the secret key. if (key == null) return null; try { return new SecretKeySpec(key, algorithm); } catch (final IllegalArgumentException e) { throw new MslCryptoException(MslError.INVALID_SYMMETRIC_KEY, e); } } /* (non-Javadoc) * @see org.json.JSONString#toJSONString() */ @Override public String toJSONString() { try { final JSONObject jsonObj = new JSONObject(); // Encode key attributes. jsonObj.put(KEY_TYPE, type.name()); if (usage != null) jsonObj.put(KEY_USAGE, usage.name()); if (keyOps != null) { final JSONArray keyOpsJa = new JSONArray(); for (final KeyOp op : keyOps) keyOpsJa.put(op.name()); jsonObj.put(KEY_KEY_OPS, keyOpsJa); } if (algo != null) jsonObj.put(KEY_ALGORITHM, algo.toString()); jsonObj.put(KEY_EXTRACTABLE, extractable); if (id != null) jsonObj.put(KEY_KEY_ID, id); // Encode symmetric keys. if (type == Type.oct) { jsonObj.put(KEY_KEY, JsonUtils.b64urlEncode(key)); } // Encode public/private keys (RSA only). else { final RSAPublicKey publicKey = (RSAPublicKey)keyPair.getPublic(); final RSAPrivateKey privateKey = (RSAPrivateKey)keyPair.getPrivate(); // Encode modulus. final BigInteger modulus = (publicKey != null) ? publicKey.getModulus() : privateKey.getModulus(); final byte[] n = bi2bytes(modulus); jsonObj.put(KEY_MODULUS, JsonUtils.b64urlEncode(n)); // Encode public key. if (publicKey != null) { final BigInteger exponent = publicKey.getPublicExponent(); final byte[] e = bi2bytes(exponent); jsonObj.put(KEY_PUBLIC_EXPONENT, JsonUtils.b64urlEncode(e)); } // Encode private key. if (privateKey != null) { final BigInteger exponent = privateKey.getPrivateExponent(); final byte[] d = bi2bytes(exponent); jsonObj.put(KEY_PRIVATE_EXPONENT, JsonUtils.b64urlEncode(d)); } } // Return the result. return jsonObj.toString(); } catch (final JSONException e) { throw new MslInternalException("Error encoding " + this.getClass().getName() + " JSON.", e); } } /** Key type. */ private final Type type; /** Key usages. */ private final Usage usage; /** Key operations. */ private final Set<KeyOp> keyOps; /** Key algorithm. */ private final Algorithm algo; /** Extractable. */ private final boolean extractable; /** Key ID. */ private final String id; /** RSA key pair. May be null. */ private final KeyPair keyPair; /** Symmetric key raw bytes. May be null. */ private final byte[] key; /** Symmetric key. May be null. */ private final SecretKey secretKey; }
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.api.tools.framework.tools; import com.google.api.tools.framework.model.ConfigSource; import com.google.api.tools.framework.model.Diag; import com.google.api.tools.framework.model.DiagCollector; import com.google.api.tools.framework.model.Model; import com.google.api.tools.framework.model.ProtoServiceReader; import com.google.api.tools.framework.model.SimpleLocation; import com.google.api.tools.framework.snippet.Doc; import com.google.api.tools.framework.snippet.Doc.AnsiColor; import com.google.api.tools.framework.yaml.YamlReader; import com.google.common.base.Splitter; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.protobuf.ByteString; import com.google.protobuf.Message; import com.google.protobuf.TextFormat; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Map; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import javax.annotation.Nullable; /** Utilities for tools. */ public class ToolUtil { /** * Writes a set of files with directory structure to a .jar. The content is a map from file path * to one of {@link Doc}, {@link String}, or {@code byte[]}. */ public static void writeJar(Map<String, ?> content, String outputName) throws IOException { OutputStream outputStream = new FileOutputStream(outputName); JarOutputStream jarFile = new JarOutputStream(outputStream); OutputStreamWriter writer = new OutputStreamWriter(jarFile, StandardCharsets.UTF_8); try { for (Map.Entry<String, ?> entry : content.entrySet()) { jarFile.putNextEntry(new JarEntry(entry.getKey())); Object value = entry.getValue(); if (value instanceof Doc) { writer.write(((Doc) value).prettyPrint()); writer.flush(); } else if (value instanceof String) { writer.write((String) value); writer.flush(); } else if (value instanceof byte[]) { jarFile.write((byte[]) value); } else { throw new IllegalArgumentException("Expected one of Doc, String, or byte[]"); } jarFile.closeEntry(); } } finally { writer.close(); jarFile.close(); } } /** Writes a proto out to a file. */ public static void writeProto(Message content, String outputName) throws IOException { try (OutputStream outputStream = new FileOutputStream(outputName)) { content.writeTo(outputStream); } } /** Writes a proto out to a file. */ public static void writeTextProto(Message content, String outputName) throws IOException { try (BufferedWriter output = new BufferedWriter(new FileWriter(outputName))) { TextFormat.print(content, output); } } /** * Writes a content object into a set of output files. The content is one of {@link Doc}, {@link * String} or {@code byte[]}. */ public static void writeFiles(Map<String, ?> content, String baseName) throws IOException { for (Map.Entry<String, ?> entry : content.entrySet()) { File outputFile = Strings.isNullOrEmpty(baseName) ? new File(entry.getKey()) : new File(baseName, entry.getKey()); outputFile.getParentFile().mkdirs(); OutputStream outputStream = new FileOutputStream(outputFile); OutputStreamWriter writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8); try { Object value = entry.getValue(); if (value instanceof Doc) { writer.write(((Doc) value).prettyPrint()); writer.flush(); } else if (value instanceof String) { writer.write((String) value); writer.flush(); } else if (value instanceof byte[]) { outputStream.write((byte[]) value); outputStream.flush(); } else { throw new IllegalArgumentException("Expected one of Doc, String, or byte[]"); } } finally { writer.close(); } } } /** Report errors and warnings. */ public static void reportDiags(DiagCollector diagCollector, boolean colored) { for (Diag diag : diagCollector.getDiags()) { System.err.println(diagToString(diag, colored)); } } /** Produce a string for the diagnosis, with optional coloring. */ public static String diagToString(Diag diag, boolean colored) { Doc text; switch (diag.getKind()) { case ERROR: text = Doc.text("ERROR: "); if (colored) { text = Doc.color(AnsiColor.RED, text); } break; case WARNING: text = Doc.text("WARNING: "); if (colored) { text = Doc.color(AnsiColor.YELLOW, text); } break; default: text = Doc.text("HINT:"); break; } text = text.add(Doc.text(diag.getLocation().getDisplayString())) .add(Doc.text(": ")) .add(Doc.text(diag.getMessage())); return text.toString(); } public static Set<FileWrapper> sanitizeSourceFiles(List<FileWrapper> sources) { // Does nothing currently. return ImmutableSet.copyOf(sources); } /** Sets up the model configs, reading them from Yaml files and attaching to the model. */ public static List<FileWrapper> readModelConfigs( String dataPath, List<String> configs, DiagCollector diagCollector) { List<FileWrapper> files = Lists.newArrayList(); for (String filename : configs) { File file = findDataFile(filename, dataPath); if (file == null) { diagCollector.addDiag( Diag.error(SimpleLocation.TOPLEVEL, "Cannot find configuration file '%s'.", filename)); } else { try { files.add(FileWrapper.from(filename)); } catch (IOException ex) { diagCollector.addDiag( Diag.error( SimpleLocation.TOPLEVEL, "Cannot read input file '%s': %s", filename, ex.getMessage())); } } } if (diagCollector.hasErrors()) { return null; } return files; } @Nullable public static File findDataFile(String name, String dataPath) { Path file = Paths.get(name); if (file.isAbsolute()) { return java.nio.file.Files.exists(file) ? file.toFile() : null; } for (String path : Splitter.on(File.pathSeparator).split(dataPath)) { file = Paths.get(path, name); if (java.nio.file.Files.exists(file)) { return file.toFile(); } } return null; } @Nullable private static ConfigSource getConfigSourceFromFile( DiagCollector diag, String filename, ByteString fileContents) { if (filename.endsWith(".binarypb") || filename.endsWith(".binaryproto")) { return ProtoServiceReader.readBinaryConfig(diag, filename, fileContents); } if (filename.endsWith(".textproto")) { return ProtoServiceReader.readTextConfig(diag, filename, fileContents); } return YamlReader.readConfig(diag, filename, fileContents.toStringUtf8()); } /** Sets up the model configs, attaching to the model. */ public static void setupModelConfigs(Model model, Set<FileWrapper> files) { DiagCollector diagCollector = model.getDiagReporter().getDiagCollector(); ImmutableList.Builder<ConfigSource> builder = ImmutableList.builder(); for (FileWrapper file : files) { ConfigSource message = getConfigSourceFromFile(diagCollector, file.getFilename(), file.getFileContents()); if (message != null) { builder.add(message); } } if (diagCollector.hasErrors()) { return; } model.setConfigSources(builder.build()); } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.upgradeTests.Changes10_6 Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.upgradeTests; import org.apache.derbyTesting.junit.SupportFilesSetup; import org.apache.derbyTesting.junit.JDBCDataSource; import java.lang.reflect.Method; import java.sql.SQLException; import java.sql.Statement; import java.sql.Connection; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import javax.sql.DataSource; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derby.catalog.types.RoutineAliasInfo; import org.apache.derby.catalog.TypeDescriptor; import org.apache.derbyTesting.junit.JDBC; /** * Upgrade test cases for 10.6. * If the old version is 10.6 or later then these tests * will not be run. * <BR> 10.6 Upgrade issues <UL> <LI> testSetXplainSchemaProcedure - DERBY-2487 Make sure that SYSCS_UTIL.SYSCS_SET_XPLAIN_SCHEMA can only be run in Derby 10.5 and higher. </UL> */ public class Changes10_6 extends UpgradeChange { private static final String BAD_SYNTAX = "42X01"; private static final String TABLE_DOES_NOT_EXIST = "42X05"; private static final String UPGRADE_REQUIRED = "XCL47"; private static final String QUERY_4215 = "select r.grantor\n" + "from sys.sysroutineperms r, sys.sysaliases a\n" + "where r.aliasid = a.aliasid\n" + "and a.alias = 'SYSCS_INPLACE_COMPRESS_TABLE'\n" ; private static final String CREATE_TYPE_DDL = "create type fooType external name 'mypackage.foo' language java\n"; private static final String DROP_TYPE_DDL = "drop type fooType restrict\n"; private static final String HASH_ALGORITHM_PROPERTY = "derby.authentication.builtin.algorithm"; public Changes10_6(String name) { super(name); } /** * Return the suite of tests to test the changes made in 10.6. * @param phase an integer that indicates the current phase in * the upgrade test. * @return the test suite created. */ public static Test suite(int phase) { TestSuite suite = new TestSuite("Upgrade test for 10.6"); suite.addTestSuite(Changes10_6.class); return new SupportFilesSetup((Test) suite); } /** * Make sure that SYSCS_UTIL.SYSCS_SET_XPLAIN_STYLE can only be run in * Derby 10.5 and higher. * DERBY-2487 * Test added for 10.5. * @throws SQLException * */ public void testSetXplainStyleProcedure() throws SQLException { String []xplainProcedures = { "call SYSCS_UTIL.SYSCS_SET_XPLAIN_SCHEMA('XPLAIN')", "call SYSCS_UTIL.SYSCS_SET_XPLAIN_SCHEMA('')", "call SYSCS_UTIL.SYSCS_SET_XPLAIN_MODE(1)", "call SYSCS_UTIL.SYSCS_SET_XPLAIN_MODE(0)", "values SYSCS_UTIL.SYSCS_GET_XPLAIN_SCHEMA()", "values SYSCS_UTIL.SYSCS_GET_XPLAIN_MODE()", }; Statement s; //ERROR 42Y03: 'SYSCS_UTIL.SYSCS_SET_XPLAIN_MODE' is not // recognized as a function or procedure. switch (getPhase()) { case PH_SOFT_UPGRADE: // In soft-upgrade cases, XPLAIN should fail: case PH_POST_SOFT_UPGRADE: s = createStatement(); for (int i = 0; i < xplainProcedures.length; i++) assertStatementError("42Y03", s, xplainProcedures[i]); s.close(); break; case PH_HARD_UPGRADE: // After hard upgrade, XPLAIN should work: s = createStatement(); for (int i = 0; i < xplainProcedures.length; i++) s.execute(xplainProcedures[i]); s.close(); break; } } /** * Make sure that SYSIBM.CLOBGETSUBSTRING has the correct return value. * See https://issues.apache.org/jira/browse/DERBY-4214 */ public void testCLOBGETSUBSTRING() throws Exception { Version initialVersion = new Version( getOldMajor(), getOldMinor(), 0, 0 ); Version firstVersionHavingThisFunction = new Version( 10, 3, 0, 0 ); Version firstVersionHavingCorrectReturnType = new Version( 10, 5, 0, 0 ); int wrongLength = 32672; int correctLength = 10890; int actualJdbcType; int actualLength; Object returnType; boolean hasFunction = initialVersion.compareTo( firstVersionHavingThisFunction ) >= 0; boolean hasCorrectReturnType = initialVersion.compareTo( firstVersionHavingCorrectReturnType ) >= 0; Statement s = createStatement(); ResultSet rs = s.executeQuery ( "select a.aliasinfo\n" + "from sys.sysschemas s, sys.sysaliases a\n" + "where s.schemaid = a.schemaid\n" + "and s.schemaname = 'SYSIBM'\n" + "and alias = 'CLOBGETSUBSTRING'\n" ); rs.next(); switch (getPhase()) { case PH_CREATE: case PH_SOFT_UPGRADE: case PH_POST_SOFT_UPGRADE: if ( !hasFunction ) { break; } returnType = getTypeDescriptor( rs.getObject( 1 ) ); actualJdbcType = getJDBCTypeId( returnType ); actualLength = getMaximumWidth( returnType ); int expectedLength = hasCorrectReturnType ? correctLength : wrongLength; assertEquals( java.sql.Types.VARCHAR, actualJdbcType ); assertEquals( expectedLength, actualLength ); break; case PH_HARD_UPGRADE: RoutineAliasInfo rai = (RoutineAliasInfo) rs.getObject( 1 ); TypeDescriptor td = (TypeDescriptor) rai.getReturnType(); assertEquals( java.sql.Types.VARCHAR, td.getJDBCTypeId() ); assertEquals( correctLength, td.getMaximumWidth() ); break; } rs.close(); s.close(); } /** * Make sure that SYSCS_UTIL.SYSCS_INPLACE_COMPRESS_TABLE has the correct * permissons granted to it. * See https://issues.apache.org/jira/browse/DERBY-4215 */ public void testSYSCS_INPLACE_COMPRESS_TABLE() throws Exception { Version initialVersion = new Version( getOldMajor(), getOldMinor(), 0, 0 ); Version firstVersionHavingPermissions = new Version( 10, 2, 0, 0 ); boolean beforePermissionsWereAdded = ( initialVersion.compareTo( firstVersionHavingPermissions ) < 0 ); Statement s = createStatement(); switch (getPhase()) { case PH_CREATE: case PH_SOFT_UPGRADE: case PH_POST_SOFT_UPGRADE: if ( beforePermissionsWereAdded ) { assertStatementError( TABLE_DOES_NOT_EXIST, s, QUERY_4215 ); } else { vetDERBY_4215( s ); } break; case PH_HARD_UPGRADE: vetDERBY_4215( s ); break; } s.close(); } /** * Vet the permissions on SYSCS_UTIL.SYSCS_INPLACE_COMPRESS_TABLE. * There should be only one permissions tuple for this system procedure and * the grantor should be APP. */ private void vetDERBY_4215( Statement s ) throws Exception { String expectedGrantor = "APP"; ResultSet rs = s.executeQuery( QUERY_4215 ); assertTrue( rs.next() ); String actualGrantor = rs.getString( 1 ); assertEquals( expectedGrantor, actualGrantor ); assertFalse( rs.next() ); rs.close(); } /** * Make sure that you can only create UDTs in a hard-upgraded database. * See https://issues.apache.org/jira/browse/DERBY-651 */ public void testUDTs() throws Exception { Statement s = createStatement(); int phase = getPhase(); //println( "Phase = " + phase ); switch ( phase ) { case PH_CREATE: case PH_POST_SOFT_UPGRADE: assertStatementError( BAD_SYNTAX, s, CREATE_TYPE_DDL ); assertStatementError( BAD_SYNTAX, s, DROP_TYPE_DDL ); break; case PH_SOFT_UPGRADE: assertStatementError( UPGRADE_REQUIRED, s, CREATE_TYPE_DDL ); assertStatementError( UPGRADE_REQUIRED, s, DROP_TYPE_DDL ); break; case PH_HARD_UPGRADE: s.execute( CREATE_TYPE_DDL ); s.execute( DROP_TYPE_DDL ); break; } s.close(); } /** * We would like to just cast the alias descriptor to * RoutineAliasDescriptor. However, this doesn't work if we are running on * an old version because the descriptor comes from a different class * loader. We use reflection to get the information we need. */ private Object getTypeDescriptor( Object routineAliasDescriptor ) throws Exception { Method meth = routineAliasDescriptor.getClass().getMethod( "getReturnType", null ); return meth.invoke( routineAliasDescriptor, null ); } private int getJDBCTypeId( Object typeDescriptor ) throws Exception { Method meth = typeDescriptor.getClass().getMethod( "getJDBCTypeId", null ); return ((Integer) meth.invoke( typeDescriptor, null )).intValue(); } private int getMaximumWidth( Object typeDescriptor ) throws Exception { Method meth = typeDescriptor.getClass().getMethod( "getMaximumWidth", null ); return ((Integer) meth.invoke( typeDescriptor, null )).intValue(); } /** * Verify that we don't enable the configurable hash authentication * scheme when we upgrade a database. See DERBY-4483. */ public void testBuiltinAuthenticationHashNotChangedOnUpgrade() throws SQLException { // We enable the configurable hash authentication scheme by setting // a property, so check that it's NULL in all phases to verify that // it's not enabled on upgrade. assertNull(getDatabaseProperty(HASH_ALGORITHM_PROPERTY)); } /** * Make sure builtin authentication only uses the new configurable hash * scheme in hard-upgraded databases. See DERBY-4483. */ public void testBuiltinAuthenticationWithConfigurableHash() throws SQLException { // This test needs to enable authentication, which is not supported // in the default database for the upgrade tests, so roll our own. DataSource ds = JDBCDataSource.getDataSourceLogical("BUILTIN_10_6"); // Add create=true or upgrade=true, as appropriate, since we don't // get this for free when we don't use the default database. if (getPhase() == PH_CREATE) { JDBCDataSource.setBeanProperty(ds, "createDatabase", "create"); } else if (getPhase() == PH_HARD_UPGRADE) { JDBCDataSource.setBeanProperty( ds, "connectionAttributes", "upgrade=true"); } // Connect as database owner, possibly creating or upgrading the // database. Connection c = ds.getConnection("dbo", "the boss"); // Let's first verify that all the users can connect after the changes // in the previous phase. Would for instance fail in post soft upgrade // if soft upgrade saved passwords using the new scheme. verifyCanConnect(ds); CallableStatement setProp = c.prepareCall( "call syscs_util.syscs_set_database_property(?, ?)"); if (getPhase() == PH_CREATE) { // The database is being created. Make sure that builtin // authentication is enabled. setProp.setString(1, "derby.connection.requireAuthentication"); setProp.setString(2, "true"); setProp.execute(); setProp.setString(1, "derby.authentication.provider"); setProp.setString(2, "BUILTIN"); setProp.execute(); } // Set (or reset) passwords for all users. setPasswords(setProp); setProp.close(); // We should still be able to connect. verifyCanConnect(ds); // Check that the passwords are stored using the expected scheme (new // configurable hash scheme in hard upgrade, old scheme otherwise). verifyPasswords(c, getPhase() == PH_HARD_UPGRADE); c.close(); // The framework doesn't know how to shutdown a database using // authentication, so do it manually as database owner here. JDBCDataSource.setBeanProperty(ds, "user", "dbo"); JDBCDataSource.setBeanProperty(ds, "password", "the boss"); JDBCDataSource.shutdownDatabase(ds); } /** * Information about users for the test of builtin authentication with * configurable hash algorithm. Two-dimensional array of strings where * each row contains (1) a user name, (2) a password, (3) the name of a * digest algorithm with which the password should be hashed, (4) the * hashed password when the old scheme is used, and (5) the hashed * password when the new scheme is used. */ private static final String[][] USERS = { { "dbo", "the boss", null, "3b6071d99b1d48ab732e75a8de701b6c77632db65898", "3b6071d99b1d48ab732e75a8de701b6c77632db65898" }, { "pat", "postman", "MD5", "3b609129e181a7f7527697235c8aead65c461a0257f3", "3b61aaca567ed43d1ba2e6402cbf1a723407:MD5" }, { "sam", "fireman", "SHA-1", "3b609e5173cfa03620061518adc92f2a58c7b15cf04f", "3b6197160362c0122fcd7a63a9da58fd0781140901fb:SHA-1" }, }; /** * Set the passwords for all users specified in {@code USERS}. * * @param cs a callable statement that sets database properties */ private void setPasswords(CallableStatement cs) throws SQLException { for (int i = 0; i < USERS.length; i++) { // Use the specified algorithm, if possible. (Will be ignored if // the data dictionary doesn't support the new scheme.) cs.setString(1, HASH_ALGORITHM_PROPERTY); cs.setString(2, USERS[i][2]); cs.execute(); // Set the password. cs.setString(1, "derby.user." + USERS[i][0]); cs.setString(2, USERS[i][1]); cs.execute(); } } /** * Verify that all passwords for the users in {@code USERS} are stored * as expected. Raise an assert failure on mismatch. * * @param c a connection to the database * @param newScheme if {@code true}, the passwords are expected to have * been hashed with the new scheme; otherwise, the passwords are expected * to have been hashed with the old scheme */ private void verifyPasswords(Connection c, boolean newScheme) throws SQLException { PreparedStatement ps = c.prepareStatement( "values syscs_util.syscs_get_database_property(?)"); for (int i = 0; i < USERS.length; i++) { String expectedToken = USERS[i][newScheme ? 4 : 3]; ps.setString(1, "derby.user." + USERS[i][0]); JDBC.assertSingleValueResultSet(ps.executeQuery(), expectedToken); } ps.close(); } /** * Verify that all users specified in {@code USERS} can connect to the * database. * * @param ds a data source for connecting to the database * @throws SQLException if one of the users cannot connect to the database */ private void verifyCanConnect(DataSource ds) throws SQLException { for (int i = 0; i < USERS.length; i++) { Connection c = ds.getConnection(USERS[i][0], USERS[i][1]); c.close(); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.jps.incremental.artifacts.builders; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.containers.ClassMap; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.builders.BuildTarget; import org.jetbrains.jps.builders.TargetOutputIndex; import org.jetbrains.jps.builders.java.JavaModuleBuildTargetType; import org.jetbrains.jps.incremental.ModuleBuildTarget; import org.jetbrains.jps.incremental.artifacts.instructions.ArtifactCompilerInstructionCreator; import org.jetbrains.jps.incremental.artifacts.instructions.ArtifactInstructionsBuilderContext; import org.jetbrains.jps.incremental.artifacts.instructions.CopyToDirectoryInstructionCreator; import org.jetbrains.jps.model.artifact.JpsArtifact; import org.jetbrains.jps.model.artifact.elements.*; import org.jetbrains.jps.model.java.*; import org.jetbrains.jps.model.module.JpsModule; import org.jetbrains.jps.model.module.JpsModuleSourceRoot; import org.jetbrains.jps.service.JpsServiceManager; import org.jetbrains.jps.util.JpsPathUtil; import java.io.File; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; public final class LayoutElementBuildersRegistry { private static final Logger LOG = Logger.getInstance(LayoutElementBuildersRegistry.class); private static final class InstanceHolder { static final LayoutElementBuildersRegistry ourInstance = new LayoutElementBuildersRegistry(); } public static LayoutElementBuildersRegistry getInstance() { return InstanceHolder.ourInstance; } private final ClassMap<LayoutElementBuilderService> myBuilders; private LayoutElementBuildersRegistry() { myBuilders = new ClassMap<>(); LayoutElementBuilderService<?>[] standardBuilders = { new RootElementBuilder(), new DirectoryElementBuilder(), new ArchiveElementBuilder(), new DirectoryCopyElementBuilder(), new FileCopyElementBuilder(), new ExtractedDirectoryElementBuilder(), new ModuleOutputElementBuilder(), new ModuleSourceElementBuilder(), new ModuleTestOutputElementBuilder(), new ComplexElementBuilder(), new ArtifactOutputElementBuilder() }; for (LayoutElementBuilderService<?> builder : standardBuilders) { myBuilders.put(builder.getElementClass(), builder); } for (LayoutElementBuilderService builder : JpsServiceManager.getInstance().getExtensions(LayoutElementBuilderService.class)) { myBuilders.put(builder.getElementClass(), builder); } } public void generateInstructions(JpsArtifact artifact, CopyToDirectoryInstructionCreator creator, ArtifactInstructionsBuilderContext context) { context.enterArtifact(artifact); generateInstructions(artifact.getRootElement(), creator, context); } public Collection<BuildTarget<?>> getDependencies(JpsPackagingElement element, TargetOutputIndex outputIndex) { LayoutElementBuilderService builder = getElementBuilder(element); if (builder != null) { //noinspection unchecked return builder.getDependencies(element, outputIndex); } return Collections.emptyList(); } private void generateInstructions(JpsPackagingElement layoutElement, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { final LayoutElementBuilderService builder = getElementBuilder(layoutElement); if (builder != null) { //noinspection unchecked builder.generateInstructions(layoutElement, instructionCreator, builderContext); } } private LayoutElementBuilderService<?> getElementBuilder(JpsPackagingElement layoutElement) { final LayoutElementBuilderService<?> builder = myBuilders.get(layoutElement.getClass()); if (builder == null) { LOG.error("Builder not found for artifact output layout element of class " + layoutElement.getClass()); } return builder; } private void generateChildrenInstructions(JpsCompositePackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { generateInstructions(element.getChildren(), instructionCreator, builderContext); } private void generateSubstitutionInstructions(JpsComplexPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { final List<JpsPackagingElement> substitution = element.getSubstitution(); if (substitution != null) { generateInstructions(substitution, instructionCreator, builderContext); } } private void generateInstructions(final List<JpsPackagingElement> elements, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { for (JpsPackagingElement child : elements) { generateInstructions(child, instructionCreator, builderContext); } } private static void generateModuleSourceInstructions(@NotNull List<JpsModuleSourceRoot> roots, @NotNull ArtifactCompilerInstructionCreator creator, @NotNull JpsPackagingElement contextElement) { for (JpsModuleSourceRoot root : roots) { File source = root.getFile(); ArtifactCompilerInstructionCreator target; JavaSourceRootProperties javaProperties = root.getProperties(JavaModuleSourceRootTypes.SOURCES); if (javaProperties != null) { String prefix = javaProperties.getPackagePrefix().replace('.', '/'); target = creator.subFolderByRelativePath(prefix); } else { target = creator; } target.addDirectoryCopyInstructions(source, null, target.getInstructionsBuilder().createCopyingHandler(source, contextElement, target)); } } private static void generateModuleOutputInstructions(@Nullable String outputUrl, @NotNull ArtifactCompilerInstructionCreator creator, @NotNull JpsPackagingElement contextElement) { if (outputUrl != null) { File directory = JpsPathUtil.urlToFile(outputUrl); creator.addDirectoryCopyInstructions(directory, null, creator.getInstructionsBuilder().createCopyingHandler(directory, contextElement, creator)); } } private class RootElementBuilder extends LayoutElementBuilderService<JpsArtifactRootElement> { RootElementBuilder() { super(JpsArtifactRootElement.class); } @Override public void generateInstructions(JpsArtifactRootElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { generateChildrenInstructions(element, instructionCreator, builderContext); } } private class DirectoryElementBuilder extends LayoutElementBuilderService<JpsDirectoryPackagingElement> { DirectoryElementBuilder() { super(JpsDirectoryPackagingElement.class); } @Override public void generateInstructions(JpsDirectoryPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { generateChildrenInstructions(element, instructionCreator.subFolder(element.getDirectoryName()), builderContext); } } private class ArchiveElementBuilder extends LayoutElementBuilderService<JpsArchivePackagingElement> { ArchiveElementBuilder() { super(JpsArchivePackagingElement.class); } @Override public void generateInstructions(JpsArchivePackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { generateChildrenInstructions(element, instructionCreator.archive(element.getArchiveName()), builderContext); } } private static class DirectoryCopyElementBuilder extends LayoutElementBuilderService<JpsDirectoryCopyPackagingElement> { DirectoryCopyElementBuilder() { super(JpsDirectoryCopyPackagingElement.class); } @Override public void generateInstructions(JpsDirectoryCopyPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { final String dirPath = element.getDirectoryPath(); if (dirPath != null) { final File directory = new File(dirPath); instructionCreator.addDirectoryCopyInstructions(directory, null, instructionCreator.getInstructionsBuilder().createCopyingHandler(directory, element, instructionCreator)); } } @Override public Collection<? extends BuildTarget<?>> getDependencies(@NotNull JpsDirectoryCopyPackagingElement element, TargetOutputIndex outputIndex) { String dirPath = element.getDirectoryPath(); if (dirPath != null) { return outputIndex.getTargetsByOutputFile(new File(dirPath)); } return Collections.emptyList(); } } private static class FileCopyElementBuilder extends LayoutElementBuilderService<JpsFileCopyPackagingElement> { FileCopyElementBuilder() { super(JpsFileCopyPackagingElement.class); } @Override public void generateInstructions(JpsFileCopyPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { final String filePath = element.getFilePath(); if (filePath != null) { final File file = new File(filePath); final String fileName = element.getRenamedOutputFileName(); String outputFileName = fileName != null ? fileName : file.getName(); instructionCreator.addFileCopyInstruction(file, outputFileName, instructionCreator.getInstructionsBuilder().createCopyingHandler(file, element, instructionCreator)); } } @Override public Collection<? extends BuildTarget<?>> getDependencies(@NotNull JpsFileCopyPackagingElement element, TargetOutputIndex outputIndex) { String filePath = element.getFilePath(); if (filePath != null) { return outputIndex.getTargetsByOutputFile(new File(filePath)); } return Collections.emptyList(); } } private static class ExtractedDirectoryElementBuilder extends LayoutElementBuilderService<JpsExtractedDirectoryPackagingElement> { ExtractedDirectoryElementBuilder() { super(JpsExtractedDirectoryPackagingElement.class); } @Override public void generateInstructions(JpsExtractedDirectoryPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { final String jarPath = element.getFilePath(); final String pathInJar = element.getPathInJar(); instructionCreator.addExtractDirectoryInstruction(new File(jarPath), pathInJar); } } private static class ModuleOutputElementBuilder extends LayoutElementBuilderService<JpsProductionModuleOutputPackagingElement> { ModuleOutputElementBuilder() { super(JpsProductionModuleOutputPackagingElement.class); } @Override public void generateInstructions(JpsProductionModuleOutputPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { generateModuleOutputInstructions(element.getOutputUrl(), instructionCreator, element); } @Override public Collection<? extends BuildTarget<?>> getDependencies(@NotNull JpsProductionModuleOutputPackagingElement element, TargetOutputIndex outputIndex) { JpsModule module = element.getModuleReference().resolve(); if (module != null) { return Collections.singletonList(new ModuleBuildTarget(module, JavaModuleBuildTargetType.PRODUCTION)); } return Collections.emptyList(); } } private static class ModuleSourceElementBuilder extends LayoutElementBuilderService<JpsProductionModuleSourcePackagingElement> { ModuleSourceElementBuilder() { super(JpsProductionModuleSourcePackagingElement.class); } @Override public void generateInstructions(JpsProductionModuleSourcePackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { JpsModule module = element.getModuleReference().resolve(); if (module != null) { List<JpsModuleSourceRoot> productionSources = ContainerUtil.filter(module.getSourceRoots(), root -> JavaModuleSourceRootTypes.PRODUCTION.contains(root.getRootType())); generateModuleSourceInstructions(productionSources, instructionCreator, element); } } @Override public Collection<? extends BuildTarget<?>> getDependencies(@NotNull JpsProductionModuleSourcePackagingElement element, TargetOutputIndex outputIndex) { return Collections.emptyList(); } } private static class ModuleTestOutputElementBuilder extends LayoutElementBuilderService<JpsTestModuleOutputPackagingElement> { ModuleTestOutputElementBuilder() { super(JpsTestModuleOutputPackagingElement.class); } @Override public void generateInstructions(JpsTestModuleOutputPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { generateModuleOutputInstructions(element.getOutputUrl(), instructionCreator, element); } @Override public Collection<? extends BuildTarget<?>> getDependencies(@NotNull JpsTestModuleOutputPackagingElement element, TargetOutputIndex outputIndex) { JpsModule module = element.getModuleReference().resolve(); if (module != null) { return Collections.singletonList(new ModuleBuildTarget(module, JavaModuleBuildTargetType.TEST)); } return Collections.emptyList(); } } private class ComplexElementBuilder extends LayoutElementBuilderService<JpsComplexPackagingElement> { ComplexElementBuilder() { super(JpsComplexPackagingElement.class); } @Override public void generateInstructions(JpsComplexPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { generateSubstitutionInstructions(element, instructionCreator, builderContext); } } private class ArtifactOutputElementBuilder extends LayoutElementBuilderService<JpsArtifactOutputPackagingElement> { ArtifactOutputElementBuilder() { super(JpsArtifactOutputPackagingElement.class); } @Override public void generateInstructions(JpsArtifactOutputPackagingElement element, ArtifactCompilerInstructionCreator instructionCreator, ArtifactInstructionsBuilderContext builderContext) { final JpsArtifact artifact = element.getArtifactReference().resolve(); if (artifact == null) return; Set<JpsArtifact> parentArtifacts = builderContext.getParentArtifacts(); List<JpsPackagingElement> customLayout = getCustomArtifactLayout(artifact, parentArtifacts); final String outputPath = artifact.getOutputPath(); if (StringUtil.isEmpty(outputPath) || customLayout != null) { try { if (builderContext.enterArtifact(artifact)) { if (customLayout != null) { LayoutElementBuildersRegistry.this.generateInstructions(customLayout, instructionCreator, builderContext); } else { generateSubstitutionInstructions(element, instructionCreator, builderContext); } } } finally { builderContext.leaveArtifact(artifact); } return; } final JpsPackagingElement rootElement = artifact.getRootElement(); final File outputDir = new File(outputPath); if (rootElement instanceof JpsArchivePackagingElement) { final String fileName = ((JpsArchivePackagingElement)rootElement).getArchiveName(); instructionCreator.addFileCopyInstruction(new File(outputDir, fileName), fileName); } else { instructionCreator.addDirectoryCopyInstructions(outputDir); } } @Nullable private List<JpsPackagingElement> getCustomArtifactLayout(@NotNull JpsArtifact artifact, @NotNull Set<JpsArtifact> parentArtifacts) { for (ArtifactLayoutCustomizationService service : JpsServiceManager.getInstance().getExtensions(ArtifactLayoutCustomizationService.class)) { List<JpsPackagingElement> elements = service.getCustomizedLayout(artifact, parentArtifacts); if (elements != null) { return elements; } } return null; } } }
/** * Copyright 2015 NICTA * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this * file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.n1analytics.paillier; import com.n1analytics.paillier.util.BigIntegerUtil; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collection; import java.util.Random; import static com.n1analytics.paillier.TestConfiguration.CONFIGURATIONS; import static com.n1analytics.paillier.TestUtil.*; import static org.junit.Assert.assertEquals; @RunWith(Parameterized.class) @Category(SlowTests.class) public class AdditionTest { private PaillierContext context; private PaillierPrivateKey privateKey; static private int MAX_ITERATIONS = TestConfiguration.MAX_ITERATIONS; @Parameterized.Parameters public static Collection<Object[]> configurations() { Collection<Object[]> configurationParams = new ArrayList<>(); for(TestConfiguration[] confs : CONFIGURATIONS) { for(TestConfiguration conf : confs) { configurationParams.add(new Object[]{conf}); } } return configurationParams; } public AdditionTest(TestConfiguration conf) { context = conf.context(); privateKey = conf.privateKey(); } interface EncryptedToEncryptedAdder { public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated); } interface EncryptedToEncodedAdder { public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2); } interface EncodedToEncodedAdder { public EncodedNumber eval(EncodedNumber arg1, EncodedNumber arg2); } /** * Combinations of adding Encrypted number to test: * - adding using Object api * - adding using context * - with arguments reversed * - adding obfuscated numbers * - adding non-obfuscated with obfuscated * */ EncryptedToEncryptedAdder encryptedToEncryptedAdders[] = new EncryptedToEncryptedAdder[]{new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return arg1_nonObfuscated.add(arg2_nonObfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return arg2_nonObfuscated.add(arg1_nonObfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return context.add(arg1_nonObfuscated, arg2_nonObfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return context.add(arg2_nonObfuscated, arg1_nonObfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return arg1_obfuscated.add(arg2_obfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return arg2_obfuscated.add(arg1_obfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return context.add(arg1_obfuscated, arg2_obfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return context.add(arg2_obfuscated, arg1_obfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return arg1_nonObfuscated.add(arg2_obfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return arg1_obfuscated.add(arg2_nonObfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return context.add(arg1_nonObfuscated, arg2_obfuscated); } }, new EncryptedToEncryptedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncryptedNumber arg2_nonObfuscated, EncryptedNumber arg2_obfuscated) { return context.add(arg1_obfuscated, arg2_nonObfuscated); } } }; EncryptedToEncodedAdder encryptedToEncodedAdders[] = new EncryptedToEncodedAdder[]{new EncryptedToEncodedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2) { return arg1_nonObfuscated.add(arg2); } }, new EncryptedToEncodedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2) { return arg1_obfuscated.add(arg2); } }, new EncryptedToEncodedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2) { return arg2.add(arg1_nonObfuscated); } }, new EncryptedToEncodedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2) { return arg2.add(arg1_obfuscated); } }, new EncryptedToEncodedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2) { return context.add(arg1_nonObfuscated, arg2); } }, new EncryptedToEncodedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2) { return context.add(arg1_obfuscated, arg2); } }, new EncryptedToEncodedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2) { return context.add(arg2, arg1_nonObfuscated); } }, new EncryptedToEncodedAdder() { @Override public EncryptedNumber eval(EncryptedNumber arg1_nonObfuscated, EncryptedNumber arg1_obfuscated, EncodedNumber arg2) { return context.add(arg2, arg1_obfuscated); } }}; EncodedToEncodedAdder encodedToEncodedAdders[] = new EncodedToEncodedAdder[]{new EncodedToEncodedAdder() { @Override public EncodedNumber eval(EncodedNumber arg1, EncodedNumber arg2) { return arg1.add(arg2); } }, new EncodedToEncodedAdder() { @Override public EncodedNumber eval(EncodedNumber arg1, EncodedNumber arg2) { return arg2.add(arg1); } }, new EncodedToEncodedAdder() { @Override public EncodedNumber eval(EncodedNumber arg1, EncodedNumber arg2) { return context.add(arg1, arg2); } }, new EncodedToEncodedAdder() { @Override public EncodedNumber eval(EncodedNumber arg1, EncodedNumber arg2) { return context.add(arg2, arg1); } }}; @Test public void testDoubleAddition() { double a, b, plainResult, decodedResult, tolerance; EncryptedNumber cipherTextA, cipherTextA_obf, cipherTextB, cipherTextB_obf, encryptedResult; EncodedNumber encodedA, encodedB, encodedResult, decryptedResult; Random rnd = new Random(); int maxExponentDiff = (int)(0.5 * context.getPublicKey().getModulus().bitLength() / (Math.log(context.getBase()) / Math.log(2))); for(int i = 0; i < MAX_ITERATIONS; i++) { a = randomFiniteDouble(); b = randomFiniteDouble(); if(context.isUnsigned() && (a < 0 || b < 0)) { if (a < 0) { a = -a; } if (b < 0) { b = -b; } } encodedA = context.encode(a); encodedB = context.encode(b); //check for overflows if (Math.abs(encodedA.exponent - encodedB.exponent) > maxExponentDiff) { int newExp = encodedA.exponent - (int)Math.round((rnd.nextDouble()) * maxExponentDiff); encodedB = new EncodedNumber(context, encodedB.value, newExp); } b = encodedB.decodeDouble(); encodedB = context.encode(b); plainResult = a + b; cipherTextA = context.encrypt(a); cipherTextB = context.encrypt(b); cipherTextA_obf = cipherTextA.obfuscate(); cipherTextB_obf = cipherTextB.obfuscate(); double absValue = Math.abs(plainResult); if (absValue == 0.0 || absValue > 1.0) { tolerance = EPSILON * Math.pow(2.0, Math.getExponent(plainResult)); } else { tolerance = EPSILON; } for (EncryptedToEncryptedAdder adder : encryptedToEncryptedAdders) { encryptedResult = adder.eval(cipherTextA, cipherTextA_obf, cipherTextB, cipherTextB_obf); decryptedResult = encryptedResult.decrypt(privateKey); try { decodedResult = decryptedResult.decodeDouble(); assertEquals(plainResult, decodedResult, tolerance); } catch (ArithmeticException e) { } catch (DecodeException e) { } } for (EncryptedToEncodedAdder adder : encryptedToEncodedAdders) { encryptedResult = adder.eval(cipherTextA, cipherTextA_obf, encodedB); decryptedResult = encryptedResult.decrypt(privateKey); try { decodedResult = decryptedResult.decodeDouble(); assertEquals(plainResult, decodedResult, tolerance); } catch (ArithmeticException e) { } catch (DecodeException e) { } } for (EncodedToEncodedAdder adder : encodedToEncodedAdders) { encodedResult = adder.eval(encodedA, encodedB); try { decodedResult = encodedResult.decodeDouble(); assertEquals(plainResult, decodedResult, tolerance); } catch (ArithmeticException e) { } catch (DecodeException e) { } } } } @Test public void testLongAddition() { long a, b, plainResult, decodedResult; EncryptedNumber cipherTextA, cipherTextA_obf, cipherTextB, cipherTextB_obf, encryptedResult; EncodedNumber encodedA, encodedB, encodedResult, decryptedResult; for(int i = 0; i < MAX_ITERATIONS; i++) { a = random.nextLong(); b = random.nextLong(); if(context.isUnsigned() && (a < 0 || b < 0)) { if (a < 0) { a = -a; } if (b < 0) { b = -b; } } plainResult = a + b; cipherTextA = context.encrypt(a); cipherTextA_obf = cipherTextA.obfuscate(); cipherTextB = context.encrypt(b); cipherTextB_obf = cipherTextB.obfuscate(); encodedA = context.encode(a); encodedB = context.encode(b); for (EncryptedToEncryptedAdder adder : encryptedToEncryptedAdders) { encryptedResult = adder.eval(cipherTextA, cipherTextA_obf, cipherTextB, cipherTextB_obf); decryptedResult = encryptedResult.decrypt(privateKey); try { decodedResult = decryptedResult.decodeLong(); assertEquals(plainResult, decodedResult); } catch (ArithmeticException e) { } catch (DecodeException e) { } } for (EncryptedToEncodedAdder adder : encryptedToEncodedAdders) { encryptedResult = adder.eval(cipherTextA, cipherTextA_obf, encodedB); decryptedResult = encryptedResult.decrypt(privateKey); try { decodedResult = decryptedResult.decodeLong(); assertEquals(plainResult, decodedResult); } catch (ArithmeticException e) { } catch (DecodeException e) { } } for (EncodedToEncodedAdder adder : encodedToEncodedAdders) { encodedResult = adder.eval(encodedA, encodedB); try { decodedResult = encodedResult.decodeLong(); assertEquals(plainResult, decodedResult); } catch (ArithmeticException e) { } catch (DecodeException e) { } } } } @Test public void testBigIntegerAddition() { BigInteger a, b, plainResult, decodedResult; EncryptedNumber cipherTextA, cipherTextB, cipherTextA_obf, cipherTextB_obf, encryptedResult; EncodedNumber encodedA, encodedB, encodedResult, decryptedResult; for(int i = 0; i < MAX_ITERATIONS; i++) { do { a = new BigInteger(context.getPrecision(), random); } while(BigIntegerUtil.greater(a, context.getMaxSignificand()) || BigIntegerUtil.less(a, context.getMinSignificand())); do { b = new BigInteger(context.getPrecision(), random); } while(BigIntegerUtil.greater(b, context.getMaxSignificand()) || BigIntegerUtil.less(b, context.getMinSignificand())); // The random generator above only generates positive BigIntegers, the following code // negates some inputs. if(context.isSigned()) { if(i % 4 == 1) { b = b.negate(); } else if(i % 4 == 2) { a = a.negate(); } else if(i % 4 == 3) { a = a.negate(); b = b.negate(); } } plainResult = a.add(b); while(!isValid(context, plainResult)) { b = b.shiftRight(1); plainResult = a.add(b); } cipherTextA = context.encrypt(a); cipherTextB = context.encrypt(b); cipherTextA_obf = cipherTextA.obfuscate(); cipherTextB_obf = cipherTextB.obfuscate(); encodedA = context.encode(a); encodedB = context.encode(b); for (EncryptedToEncryptedAdder adder : encryptedToEncryptedAdders) { encryptedResult = adder.eval(cipherTextA, cipherTextA_obf, cipherTextB, cipherTextB_obf); decryptedResult = encryptedResult.decrypt(privateKey); try { decodedResult = decryptedResult.decodeBigInteger(); assertEquals(plainResult, decodedResult); } catch (ArithmeticException e) { } catch (DecodeException e) { } } for (EncryptedToEncodedAdder adder : encryptedToEncodedAdders) { encryptedResult = adder.eval(cipherTextA, cipherTextA_obf, encodedB); decryptedResult = encryptedResult.decrypt(privateKey); try { decodedResult = decryptedResult.decodeBigInteger(); assertEquals(plainResult, decodedResult); } catch (ArithmeticException e) { } catch (DecodeException e) { } } for (EncodedToEncodedAdder adder : encodedToEncodedAdders) { encodedResult = adder.eval(encodedA, encodedB); try { decodedResult = encodedResult.decodeBigInteger(); assertEquals(plainResult, decodedResult); } catch (ArithmeticException e) { } catch (DecodeException e) { } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.nodemanager.webapp; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService; import org.apache.hadoop.yarn.server.nodemanager.NodeManager; import org.apache.hadoop.yarn.server.nodemanager.ResourceView; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerImpl; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService; import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class TestNMWebServer { private static final File testRootDir = new File("target", TestNMWebServer.class.getSimpleName()); private static File testLogDir = new File("target", TestNMWebServer.class.getSimpleName() + "LogDir"); @Before public void setup() { testRootDir.mkdirs(); testLogDir.mkdir(); } @After public void tearDown() { FileUtil.fullyDelete(testRootDir); FileUtil.fullyDelete(testLogDir); } private int startNMWebAppServer(String webAddr) { Context nmContext = new NodeManager.NMContext(null, null, null, null, null); ResourceView resourceView = new ResourceView() { @Override public long getVmemAllocatedForContainers() { return 0; } @Override public long getPmemAllocatedForContainers() { return 0; } @Override public long getVCoresAllocatedForContainers() { return 0; } @Override public boolean isVmemCheckEnabled() { return true; } @Override public boolean isPmemCheckEnabled() { return true; } }; Configuration conf = new Configuration(); conf.set(YarnConfiguration.NM_LOCAL_DIRS, testRootDir.getAbsolutePath()); conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath()); NodeHealthCheckerService healthChecker = new NodeHealthCheckerService(); healthChecker.init(conf); LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler(); conf.set(YarnConfiguration.NM_WEBAPP_ADDRESS, webAddr); WebServer server = new WebServer(nmContext, resourceView, new ApplicationACLsManager(conf), dirsHandler); try { server.init(conf); server.start(); return server.getPort(); } finally { server.stop(); healthChecker.stop(); } } @Test public void testNMWebAppWithOutPort() throws IOException { int port = startNMWebAppServer("0.0.0.0"); validatePortVal(port); } private void validatePortVal(int portVal) { Assert.assertTrue("Port is not updated", portVal > 0); Assert.assertTrue("Port is default "+ YarnConfiguration.DEFAULT_NM_PORT, portVal !=YarnConfiguration.DEFAULT_NM_PORT); } @Test public void testNMWebAppWithEphemeralPort() throws IOException { int port = startNMWebAppServer("0.0.0.0:0"); validatePortVal(port); } @Test public void testNMWebApp() throws IOException, YarnException { Context nmContext = new NodeManager.NMContext(null, null, null, null, null); ResourceView resourceView = new ResourceView() { @Override public long getVmemAllocatedForContainers() { return 0; } @Override public long getPmemAllocatedForContainers() { return 0; } @Override public long getVCoresAllocatedForContainers() { return 0; } @Override public boolean isVmemCheckEnabled() { return true; } @Override public boolean isPmemCheckEnabled() { return true; } }; Configuration conf = new Configuration(); conf.set(YarnConfiguration.NM_LOCAL_DIRS, testRootDir.getAbsolutePath()); conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath()); NodeHealthCheckerService healthChecker = new NodeHealthCheckerService(); healthChecker.init(conf); LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler(); WebServer server = new WebServer(nmContext, resourceView, new ApplicationACLsManager(conf), dirsHandler); server.init(conf); server.start(); // Add an application and the corresponding containers RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(conf); Dispatcher dispatcher = new AsyncDispatcher(); String user = "nobody"; long clusterTimeStamp = 1234; ApplicationId appId = BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp, 1); Application app = mock(Application.class); when(app.getUser()).thenReturn(user); when(app.getAppId()).thenReturn(appId); nmContext.getApplications().put(appId, app); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId( appId, 1); ContainerId container1 = BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, 0); ContainerId container2 = BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, 1); NodeManagerMetrics metrics = mock(NodeManagerMetrics.class); NMStateStoreService stateStore = new NMNullStateStoreService(); for (ContainerId containerId : new ContainerId[] { container1, container2}) { // TODO: Use builder utils ContainerLaunchContext launchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); long currentTime = System.currentTimeMillis(); Token containerToken = BuilderUtils.newContainerToken(containerId, "127.0.0.1", 1234, user, BuilderUtils.newResource(1024, 1), currentTime + 10000L, 123, "password".getBytes(), currentTime); Context context = mock(Context.class); Container container = new ContainerImpl(conf, dispatcher, launchContext, null, metrics, BuilderUtils.newContainerTokenIdentifier(containerToken), context) { @Override public ContainerState getContainerState() { return ContainerState.RUNNING; }; }; nmContext.getContainers().put(containerId, container); //TODO: Gross hack. Fix in code. ApplicationId applicationId = containerId.getApplicationAttemptId().getApplicationId(); nmContext.getApplications().get(applicationId).getContainers() .put(containerId, container); writeContainerLogs(nmContext, containerId, dirsHandler); } // TODO: Pull logs and test contents. // Thread.sleep(1000000); } private void writeContainerLogs(Context nmContext, ContainerId containerId, LocalDirsHandlerService dirsHandler) throws IOException, YarnException { // ContainerLogDir should be created File containerLogDir = ContainerLogsUtils.getContainerLogDirs(containerId, dirsHandler).get(0); containerLogDir.mkdirs(); for (String fileType : new String[] { "stdout", "stderr", "syslog" }) { Writer writer = new FileWriter(new File(containerLogDir, fileType)); writer.write(ConverterUtils.toString(containerId) + "\n Hello " + fileType + "!"); writer.close(); } } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.internal.policy; import android.test.ActivityInstrumentationTestCase2; import android.test.UiThreadTest; import android.test.suitebuilder.annotation.SmallTest; import android.view.ActionMode; import android.view.ActionMode.Callback; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.MotionEvent; import android.view.SearchEvent; import android.view.View; import android.view.Window; import android.view.WindowManager.LayoutParams; import android.view.accessibility.AccessibilityEvent; import java.util.List; /** * Tests {@link PhoneWindow}'s {@link ActionMode} related methods. */ @SmallTest public final class PhoneWindowActionModeTest extends ActivityInstrumentationTestCase2<PhoneWindowActionModeTestActivity> { private PhoneWindow mPhoneWindow; private MockWindowCallback mWindowCallback; private MockActionModeCallback mActionModeCallback; public PhoneWindowActionModeTest() { super(PhoneWindowActionModeTestActivity.class); } @Override protected void setUp() throws Exception { super.setUp(); mPhoneWindow = (PhoneWindow) getActivity().getWindow(); mWindowCallback = new MockWindowCallback(); mPhoneWindow.setCallback(mWindowCallback); mActionModeCallback = new MockActionModeCallback(); } public void testStartActionModeWithCallback() { mWindowCallback.mShouldReturnOwnActionMode = true; ActionMode mode = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_FLOATING); assertEquals(mWindowCallback.mLastCreatedActionMode, mode); } public void testStartActionModePrimaryFinishesPreviousMode() { // Use custom callback to control the provided ActionMode. mWindowCallback.mShouldReturnOwnActionMode = true; ActionMode mode1 = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_PRIMARY); ActionMode mode2 = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_PRIMARY); assertTrue(mode1 instanceof MockActionMode); assertTrue(((MockActionMode) mode1).mIsFinished); assertNotNull(mode2); } public void testStartActionModeFloatingFinishesPreviousMode() { // Use custom callback to control the provided ActionMode. mWindowCallback.mShouldReturnOwnActionMode = true; ActionMode mode1 = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_FLOATING); ActionMode mode2 = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_FLOATING); assertTrue(mode1 instanceof MockActionMode); assertTrue(((MockActionMode) mode1).mIsFinished); assertNotNull(mode2); } public void testStartActionModePreservesPreviousModeOfDifferentType1() { // Use custom callback to control the provided ActionMode. mWindowCallback.mShouldReturnOwnActionMode = true; ActionMode mode1 = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_FLOATING); ActionMode mode2 = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_PRIMARY); assertTrue(mode1 instanceof MockActionMode); assertFalse(((MockActionMode) mode1).mIsFinished); assertNotNull(mode2); } public void testStartActionModePreservesPreviousModeOfDifferentType2() { // Use custom callback to control the provided ActionMode. mWindowCallback.mShouldReturnOwnActionMode = true; ActionMode mode1 = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_PRIMARY); ActionMode mode2 = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_FLOATING); assertTrue(mode1 instanceof MockActionMode); assertFalse(((MockActionMode) mode1).mIsFinished); assertNotNull(mode2); } public void testWindowCallbackModesLifecycleIsNotHandled() { mWindowCallback.mShouldReturnOwnActionMode = true; ActionMode mode = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_PRIMARY); assertNotNull(mode); assertEquals(mWindowCallback.mLastCreatedActionMode, mode); assertFalse(mActionModeCallback.mIsCreateActionModeCalled); assertTrue(mWindowCallback.mIsActionModeStarted); } @UiThreadTest public void testCreatedPrimaryModeLifecycleIsHandled() { mWindowCallback.mShouldReturnOwnActionMode = false; ActionMode mode = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_PRIMARY); assertNotNull(mode); assertEquals(ActionMode.TYPE_PRIMARY, mode.getType()); assertTrue(mActionModeCallback.mIsCreateActionModeCalled); assertTrue(mWindowCallback.mIsActionModeStarted); } @UiThreadTest public void testCreatedFloatingModeLifecycleIsHandled() { mWindowCallback.mShouldReturnOwnActionMode = false; ActionMode mode = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_FLOATING); assertNotNull(mode); assertEquals(ActionMode.TYPE_FLOATING, mode.getType()); assertTrue(mActionModeCallback.mIsCreateActionModeCalled); assertTrue(mWindowCallback.mIsActionModeStarted); } @UiThreadTest public void testCreatedModeIsNotStartedIfCreateReturnsFalse() { mWindowCallback.mShouldReturnOwnActionMode = false; mActionModeCallback.mShouldCreateActionMode = false; ActionMode mode = mPhoneWindow.getDecorView().startActionMode( mActionModeCallback, ActionMode.TYPE_FLOATING); assertTrue(mActionModeCallback.mIsCreateActionModeCalled); assertFalse(mWindowCallback.mIsActionModeStarted); assertNull(mode); } private static final class MockWindowCallback implements Window.Callback { private boolean mShouldReturnOwnActionMode = false; private MockActionMode mLastCreatedActionMode; private boolean mIsActionModeStarted = false; @Override public boolean dispatchKeyEvent(KeyEvent event) { return false; } @Override public boolean dispatchKeyShortcutEvent(KeyEvent event) { return false; } @Override public boolean dispatchTouchEvent(MotionEvent event) { return false; } @Override public boolean dispatchTrackballEvent(MotionEvent event) { return false; } @Override public boolean dispatchGenericMotionEvent(MotionEvent event) { return false; } @Override public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) { return false; } @Override public View onCreatePanelView(int featureId) { return null; } @Override public boolean onCreatePanelMenu(int featureId, Menu menu) { return false; } @Override public boolean onPreparePanel(int featureId, View view, Menu menu) { return false; } @Override public boolean onMenuOpened(int featureId, Menu menu) { return false; } @Override public boolean onMenuItemSelected(int featureId, MenuItem item) { return false; } @Override public void onWindowAttributesChanged(LayoutParams attrs) {} @Override public void onContentChanged() {} @Override public void onWindowFocusChanged(boolean hasFocus) {} @Override public void onAttachedToWindow() {} @Override public void onDetachedFromWindow() {} @Override public void onPanelClosed(int featureId, Menu menu) {} @Override public boolean onSearchRequested() { return false; } @Override public boolean onSearchRequested(SearchEvent searchEvent) { return false; } @Override public ActionMode onWindowStartingActionMode(Callback callback) { if (mShouldReturnOwnActionMode) { MockActionMode mode = new MockActionMode(); mLastCreatedActionMode = mode; return mode; } return null; } @Override public ActionMode onWindowStartingActionMode(Callback callback, int type) { if (mShouldReturnOwnActionMode) { MockActionMode mode = new MockActionMode(); mode.mActionModeType = type; mLastCreatedActionMode = mode; return mode; } return null; } @Override public void onActionModeStarted(ActionMode mode) { mIsActionModeStarted = true; } @Override public void onActionModeFinished(ActionMode mode) {} } private static final class MockActionModeCallback implements ActionMode.Callback { private boolean mShouldCreateActionMode = true; private boolean mIsCreateActionModeCalled = false; @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { return true; } @Override public void onDestroyActionMode(ActionMode mode) {} @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { mIsCreateActionModeCalled = true; return mShouldCreateActionMode; } @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { return false; } } private static final class MockActionMode extends ActionMode { private int mActionModeType = ActionMode.TYPE_PRIMARY; private boolean mIsFinished = false; @Override public int getType() { return mActionModeType; } @Override public void setTitle(CharSequence title) {} @Override public void setTitle(int resId) {} @Override public void setSubtitle(CharSequence subtitle) {} @Override public void setSubtitle(int resId) {} @Override public void setCustomView(View view) {} @Override public void invalidate() {} @Override public void finish() { mIsFinished = true; } @Override public Menu getMenu() { return null; } @Override public CharSequence getTitle() { return null; } @Override public CharSequence getSubtitle() { return null; } @Override public View getCustomView() { return null; } @Override public MenuInflater getMenuInflater() { return null; } } }
/* * Copyright 2009-2015 DigitalGlobe, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.mrgeo.mapalgebra; import org.apache.hadoop.conf.Configuration; import org.mrgeo.mapreduce.formats.TileClusterInfo; import org.mrgeo.mapreduce.job.JobCancelledException; import org.mrgeo.mapreduce.job.JobFailedException; import org.mrgeo.mapreduce.job.JobListener; import org.mrgeo.progress.Progress; import org.mrgeo.progress.ProgressHierarchy; import org.mrgeo.utils.Bounds; import org.mrgeo.utils.HadoopUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.Vector; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; public class MapAlgebraExecutioner { private static final Logger _log = LoggerFactory.getLogger(MapAlgebraExecutioner.class); private JobListener jobListener = null; private static ExecutorService executorSvc = null; private final Vector<Future<RunnableMapOp>> futures = new Vector<>(); private class RunnableMapOp implements Runnable { Throwable exception = null; MapOp _op; ProgressHierarchy _progress; RunnableMapOp(Configuration conf, MapOp op, ProgressHierarchy progress) { _op = op; _progress = progress; } @Override public void run() { try { // Set up progress for execute listeners if there are any Progress opProgress = _progress; List<MapOp> executeListeners = _op.getExecuteListeners(); if (_progress != null) { _progress.starting(); } // Run the operation itself if (opProgress != null) { opProgress.starting(); } if (_op instanceof OutputProducer && ((OutputProducer)_op).getOutputName() == null) { ((OutputProducer) _op).resolveOutputName(); } _op.build(opProgress); if (opProgress != null) { opProgress.complete(); } // Build execute listeners if there are any if (executeListeners != null) { for (MapOp listener : executeListeners) { if (listener != null) { ProgressHierarchy listenerProgress = new ProgressHierarchy(); if (listener instanceof OutputProducer) { ((OutputProducer) listener).resolveOutputName(); } listener.build(listenerProgress); } } } if (_progress != null) { _progress.complete(); } } catch (Throwable t) { t.printStackTrace(); exception = t; } } } MapOp _root; String output = null; public void setOutputName(String p) { output = p; } public void setRoot(MapOp root) { _root = root; } public void setJobListener(JobListener jl) { jobListener = jl; } public void cancel() { synchronized (futures) { for (Future<RunnableMapOp> future : futures) { // cancel (true - may interrupt the job if running) future.cancel(true); } } } public void execute(Configuration conf, Progress p) throws JobFailedException, JobCancelledException { execute(conf, p, false); } public void execute(Configuration conf, Progress progress, boolean buildPyramid) throws JobFailedException, JobCancelledException { try { if (_root == null) { throw new IllegalArgumentException("You must specify a root node."); } if (!(_root instanceof OutputProducer)) { throw new IllegalArgumentException("The last operation in the map algebra must produce output"); } ProgressHierarchy ph = new ProgressHierarchy(progress); ph.createChild(1f); ph.createChild(4f); ph.createChild(2f); if (Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } TileClusterInfo tileClusterInfo = calculateTileClusterInfo(_root); MapAlgebraExecutioner.setOverallTileClusterInfo(_root, tileClusterInfo); executeChildren(conf, _root, ph.getChild(0)); // If the root is deferred, then at this point it has been "prepared" // but not built. So we need to build it to get the final result. ((OutputProducer)_root).setOutputName(output); _root.build(ph.getChild(1)); _root.postBuild(ph.getChild(2), buildPyramid); } catch (JobFailedException | JobCancelledException e) { // job interrupted cancel(); throw (e); } catch (InterruptedException e) { // job interrupted cancel(); throw new JobCancelledException(e.getMessage()); } catch (Exception e) { cancel(); e.printStackTrace(); throw new JobFailedException(e.getMessage()); } finally { // Free up unneeded memory in the MapOp tree we just executed and saved _log.info("Clearing memory from the map op tree"); _root.clear(); try { _log.info("Cleaning temp files from the map op tree"); cleanupMapOp(_root); } catch (IOException e) { _log.error("Failure while deleting temporary resources", e); } } } void executeChildren(final Configuration conf, final MapOp mapOp, final ProgressHierarchy ph) throws JobFailedException, InterruptedException, ExecutionException { // Set up all of the progress hierarchy we need ahead of time ProgressHierarchy phParent = null; ProgressHierarchy phChildren = null; if (ph != null) { ph.starting(); phParent = ph.createChild(1f); phChildren = ph.createChild(1f); for (int i=0; i < mapOp.getInputs().size(); i++) { ProgressHierarchy phChild = phChildren.createChild(1f); phChild.createChild(1f); phChild.createChild(2f); } } mapOp.setJobListener(jobListener); // Store a list of the children that need to be built. After we've traversed // all the children, they are built in parallel. Vector<RunnableMapOp> v = new Vector<>(); int i = 0; for (MapOp child : mapOp.getInputs()) { ProgressHierarchy pc1 = null; ProgressHierarchy pc2 = null; if (ph != null) { pc1 = phChildren.getChild(i).getChild(0); pc2 = phChildren.getChild(i).getChild(1); } executeChildren(conf, child, pc2); // The build() call to the MapOp must be invoked while processing it's // parent because at the child level, there is not enough context // to decide if build() should be called. if (!(child instanceof DeferredExecutor) || !(mapOp instanceof DeferredExecutor)) { v.add(new RunnableMapOp(conf, child, pc1)); } // see if the child has any execute listeners (like a save(...)), if so, make sure we run // it here, otherwise the listeners are quietly ignored else if (child.getExecuteListeners() != null && child.getExecuteListeners().size() > 0) { v.add(new RunnableMapOp(conf, child, pc1)); } else { // If both the child and the parent are deferred executors, but the child // is a different type of deferred executor, then we need to build the // child since they can't built together. DeferredExecutor deferredChild = (DeferredExecutor) child; DeferredExecutor deferredMapOp = (DeferredExecutor) mapOp; if (!deferredChild.getOperationId().equals(deferredMapOp.getOperationId())) { v.add(new RunnableMapOp(conf, child, pc1)); } } i++; } // If there are children to be built, do so in parallel if (v.size() != 0) { runThreads(conf, v); } // The prepare() is called at the level being recursed because there is // nothing more to know than whether or not it is a deferred map op. if (mapOp instanceof DeferredExecutor) { try { ((DeferredExecutor)mapOp).prepare(phParent); } catch (IOException e) { _log.error("Failure running map algebra", e); throw new JobFailedException(e.getMessage()); } } if (ph != null) { ph.complete(); } } private static void cleanupMapOp(final MapOp mapOp) throws IOException { // Cleanup the entire tree of map ops for (MapOp child : mapOp.getInputs()) { cleanupMapOp(child); } mapOp.cleanup(); } private void addToFutures(Future<RunnableMapOp> f) { synchronized (futures) { futures.add(f); } } private void runThreads(Configuration conf, Vector<? extends RunnableMapOp> threads) throws JobFailedException, InterruptedException, ExecutionException { if (Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } Vector<Future<RunnableMapOp>> futureList = new Vector<>(); for (RunnableMapOp r : threads) { Future<RunnableMapOp> future = submit(conf, r); addToFutures(future); futureList.add(future); } // wait for all tasks to complete before continuing for (Future<RunnableMapOp> f : futureList) { f.get(); } Throwable firstProblem = null; for (RunnableMapOp thread : threads) { if (thread.exception != null) { if (firstProblem == null) { firstProblem = thread.exception; } thread.exception.printStackTrace(); } } if (firstProblem != null) { throw new JobFailedException(firstProblem.getMessage()); } } private synchronized Future<RunnableMapOp> submit(Configuration conf, RunnableMapOp r) { if (executorSvc == null) { // we don't want to overload Hadoop w/ lots of job requests. 10 at a time // should be plenty to maximize use of the system. - TODO: move to a // config? // if we're using the local job tracker, we only want 1 thread, because // many versions of the // localJobTracker class are NOT threadsafe. if (HadoopUtils.isLocal(conf)) { executorSvc = Executors.newFixedThreadPool(1); } else { executorSvc = Executors.newFixedThreadPool(10); } } @SuppressWarnings("unchecked") Future<RunnableMapOp> future = (Future<RunnableMapOp>) executorSvc.submit(r); return future; } /** * Calculate the input pyramid paths from the entire tree this image. This should _not_ call * getOutput() on its input MapOps. * */ public static void calculateInputs(MapOp mapOp, Set<String> inputPyramids) { if (mapOp instanceof InputsCalculator) { inputPyramids.addAll(((InputsCalculator)mapOp).calculateInputs()); } else { for (final MapOp input : mapOp.getInputs()) { calculateInputs(input, inputPyramids); } } } /** * Calculate the combined bounds of all the inputs, which gives us the total bounds of this image. * This should _not_ call getOutput() on its input MapOps. * */ public static Bounds calculateBounds(MapOp mapOp) throws IOException { if (mapOp instanceof BoundsCalculator) { return ((BoundsCalculator)mapOp).calculateBounds(); } else { final Bounds b = new Bounds(); for (final MapOp input : mapOp.getInputs()) { if (input != null) { b.expand(calculateBounds(input)); } } return b; } } /** * Calculate the maximum (most detailed) zoom level of all the inputs below the * mapOp passed in. This should _not_ call getOutput() on its input MapOps. * */ public static int calculateMaximumZoomlevel(MapOp mapOp) throws IOException { if (mapOp instanceof MaximumZoomLevelCalculator) { return ((MaximumZoomLevelCalculator)mapOp).calculateMaximumZoomlevel(); } else { int zoom = 0; for (final MapOp input : mapOp.getInputs()) { final int z = calculateMaximumZoomlevel(input); if (z > zoom) { zoom = z; } } return zoom; } } /** * Calculates the combined neighborhood of raster tiles to input. * @throws IOException */ public static TileClusterInfo calculateTileClusterInfo(MapOp mapOp) throws IOException { if (mapOp instanceof TileClusterInfoCalculator) { return ((TileClusterInfoCalculator)mapOp).calculateTileClusterInfo(); } else { final TileClusterInfo tileClusterInfo = new TileClusterInfo(); if (mapOp.getInputs() != null) { for (final MapOp input : mapOp.getInputs()) { if (input != null) { tileClusterInfo.expand(calculateTileClusterInfo(input)); } } } return tileClusterInfo; } } /** * Calculate the tile size of all the inputs. The tile size is expected to be the same for all * inputs. This should _not_ call getOutput() on its input MapOps. * */ public static int calculateTileSize(MapOp mapOp) throws IOException { // Return the first tile size greater than 0. It is assumed at this point // that tile sizes of all rasters will be the same. if (mapOp instanceof TileSizeCalculator) { return ((TileSizeCalculator)mapOp).calculateTileSize(); } else { for (final MapOp input : mapOp.getInputs()) { int tileSize = calculateTileSize(input); if (tileSize > 0) { return tileSize; } } } return 0; } /** * After the tile cluster info has been computed across the entire MapOp tree, the results are set * using this method. MapOp's that require neighborhood tiles to do their job should store the * passed tileClusterInfo for use during their execution. The base MapOp implementation of this * function does not store the tile cluster info, it only passes it along to its children. * */ public static void setOverallTileClusterInfo(final MapOp mapOp, final TileClusterInfo tileClusterInfo) { if (mapOp instanceof TileClusterInfoConsumer) { ((TileClusterInfoConsumer)mapOp).setOverallTileClusterInfo(tileClusterInfo); } // Recurse through the children if (mapOp.getInputs() != null) { for (final MapOp input : mapOp.getInputs()) { setOverallTileClusterInfo(input, tileClusterInfo); } } } }
/* * Copyright (c) 2009-2013 David Soergel <dev@davidsoergel.com> * Licensed under the Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ package com.davidsoergel.trees; import org.jetbrains.annotations.NotNull; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; // TODO make ListHierarchyNode, etc. extend this /** * Abstract implementation of some of the most basic HierarchyNode functionality. Concrete classes extending this need * implement only getChildren() and newChild(), because they must choose what kind of Collection to use for the * children. * * @author <a href="mailto:dev@davidsoergel.com">David Soergel</a> * @version $Id: AbstractHierarchyNode.java 524 2009-10-10 07:55:47Z soergel $ */ public abstract class AbstractHierarchyNode<KV, H extends HierarchyNode<KV, H>> implements HierarchyNode<KV, H> { // ------------------------------ FIELDS ------------------------------ protected H parent;//HierarchyNode<? extends T, I> protected KV payload; protected AbstractHierarchyNode(KV payload) { this.payload = payload; } protected AbstractHierarchyNode() { } // --------------------- GETTER / SETTER METHODS --------------------- /** * {@inheritDoc} */ public KV getPayload() { return payload; } /** * {@inheritDoc} */ public void setPayload(KV payload) { this.payload = payload; } /** * {@inheritDoc} */ public H getParent()//HierarchyNode<? extends T, I> { return parent; } public void setParent(H parent) { if (this.parent == parent) { return; } if (this.parent != null) { this.parent.unregisterChild((H) this); } this.parent = parent; if (this.parent != null) { this.parent.registerChild((H) this); } // parent.getChildren().add((I) this); } // ------------------------ INTERFACE METHODS ------------------------ // --------------------- Interface HierarchyNode --------------------- //private Collection<HierarchyNode<T>> children; /** * {@inheritDoc} */ @NotNull public abstract Collection<? extends H> getChildren(); // -------------------------- OTHER METHODS -------------------------- /** * {@inheritDoc} */ /* public void addChild(HierarchyNode<? extends T, I> child) { getChildren().add(child); // NO! // child.setParent(this); }*/ //public abstract I newChild(T contents); /** * {@inheritDoc} */ @NotNull public H getChildWithPayload(KV payload) throws NoSuchNodeException {// We could map the children collection as a Map; but that's some hassle, and since there are generally just 2 children anyway, this is simpler // also, the child id is often not known when it is added to the children Set, so putting the child into a children Map wouldn't work for (H child : getChildren()) { final KV cp = child.getPayload(); if ((cp == null && payload == null) || cp.equals(payload)) { return child; } } throw new NoSuchNodeException("No node found with payload " + payload); } // ------------------------ INTERFACE METHODS ------------------------ // --------------------- Interface HierarchyNode --------------------- public boolean isLeaf() { Collection<? extends H> children = getChildren(); return children == null || children.isEmpty(); } public List<HierarchyNode<KV, H>> getAncestorPath() { return getAncestorPath(true); } public List<HierarchyNode<KV, H>> getAncestorPath(boolean includeSelf) { List<HierarchyNode<KV, H>> result = new LinkedList<HierarchyNode<KV, H>>(); HierarchyNode<KV, H> trav = includeSelf ? this : getParent(); while (trav != null) { result.add(0, trav); trav = trav.getParent(); } return result; } public List<KV> getAncestorPathPayloads() { List<KV> result = new LinkedList<KV>(); HierarchyNode<KV, H> trav = this; while (trav != null) { result.add(0, trav.getPayload()); trav = trav.getParent(); } return result; } /** * Returns an iterator over a set of elements of type T. * * @return an Iterator. */ public Iterator<H> iterator() { return new DepthFirstTreeIteratorImpl(this); } public DepthFirstTreeIterator<KV, H> depthFirstIterator() { return new DepthFirstTreeIteratorImpl(this); } /* public HierarchyNode<T, I> getSelf() { return this; }*/ public HierarchyNode<KV, H> getSelfNode() { return this; } public int countDescendantsIncludingThis() { int result = 1; for (H c : getChildren()) { result += c.countDescendantsIncludingThis(); } return result; } @NotNull public Collection<? extends H> getDescendantLeaves() { Set<H> result = new HashSet<H>(); for (H n : this) { if (n.isLeaf()) { result.add(n); } } return result; } }
/** * @author X. Chen, Tested by Curtis Lewis * @date 22 Feb '14 * @brief Stores information about a Piece object. * @details Stores a piece object in a specific board position by having 2 identical * 2D arrays where one stores an integer which corresponds to the other having * a piece at the same index location. * */ public class C4AndOthelloBoardStore { /** Method to get the 2D array (board) of pieces */ public Piece[][] getBoard() { return m_Pieces; } public Piece getPiece(int i, int j){ return m_Pieces[i][j]; } /** * Returns the heiht of the board * @return the m_BoardHeight */ public int getBoardHeight(){ return m_BoardHeight; } /** Method to set the boards height */ public void setBoardHeight(int boardHeight){ m_BoardHeight = boardHeight; } /** * Returns the width of the board * @return the m_BoardWidth */ public int getBoardWidth(){ return m_BoardWidth; } /** Method to set the boards width */ public void setBoardWidth(int boardWidth){ m_BoardWidth = boardWidth; } /** * Method to "setBoard" which instantiate both 2D arrays * Initializes board for given board size values * @param set the the Width of the board * @param set the the height of the board */ public void setBoard(int boardWidth, int boardHeight) { setBoardHeight(boardHeight); setBoardWidth(boardWidth); m_Board = new int[boardWidth][boardHeight]; m_Pieces = new Piece[boardWidth][boardHeight]; for(int j = 0; j < boardHeight; j++){ for(int i = 0; i < boardWidth; i++){ m_Pieces[i][j] = new Piece(" "); } } } /** * Method to set a piece in both arrays * initializes piece for given position values and colour * @param set the the colour of the piece * @param set the the row number of the piece * @param set the the column number of the piece */ public void setPiece(Piece piece, int column, int row) { System.out.println("Board::setPiece()"); if(m_Board[column][row] != HAS_PIECE_OBJECT){ m_Board[column][row] = HAS_PIECE_OBJECT; m_Pieces[column][row] = piece; } } public void setPiece2(Piece piece, int column, int row) { System.out.println("Board::setPiece()"); m_Board[column][row] = HAS_PIECE_OBJECT; m_Pieces[column][row] = piece; } public void foobar(int i, int j){ m_Board[i][j] = 1; } /** * Method to check if a board has a piece object in it i.e. is not empty */ public boolean isEmpty(int column, int row) { return m_Board[column][row] != HAS_PIECE_OBJECT; } public static void main(String[] args) { m_BoardStore = new C4AndOthelloBoardStore(); m_BoardStore.setBoard(NORMAL_BOARD_SIZE, NORMAL_BOARD_SIZE); // get the right size values. if(m_BoardStore.getBoardWidth() == NORMAL_BOARD_SIZE && m_BoardStore.getBoardHeight() == NORMAL_BOARD_SIZE) { System.out.println("C4AndOthelloBoardStore Test One NormalSize Evaluated: Correct"); } else { System.out.println("C4AndOthelloBoardStore Test One NormalSize Evaluated: Incorrect"); } // Set board with no size. m_BoardStore = new C4AndOthelloBoardStore(); m_BoardStore.setBoard(0, 0); // get the right size values. if(m_BoardStore.getBoardWidth() == 0 && m_BoardStore.getBoardHeight() == 0) { System.out.println("C4AndOthelloBoardStore Test Two NoSize Evaluated: Correct"); } else { System.out.println("C4AndOthelloBoardStore Test Two NoSize Evaluated: Incorrect"); } // Set board with sizes very big. m_BoardStore = new C4AndOthelloBoardStore(); m_BoardStore.setBoard(LARGE_BOARD_SIZE, LARGE_BOARD_SIZE); // get the right size values. if(m_BoardStore.getBoardWidth() == LARGE_BOARD_SIZE && m_BoardStore.getBoardHeight() == LARGE_BOARD_SIZE) { System.out.println("C4AndOthelloBoardStore Test Three BigSize Evaluated: Correct"); } else { System.out.println("C4AndOthelloBoardStore Test Three BigSize Evaluated: Incorrect"); } // Set board with negative sizes which is impossible. // so there is no board here. try { m_BoardStore = new C4AndOthelloBoardStore(); m_BoardStore.setBoard(NEGATIVE_BOARD_SIZE, NEGATIVE_BOARD_SIZE); if(m_BoardStore.getBoardWidth() == NEGATIVE_BOARD_SIZE && m_BoardStore.getBoardHeight() == NEGATIVE_BOARD_SIZE) { System.out.println("C4AndOthelloBoardStore Test Four MinusSize Evaluated: Correct"); } } catch (Exception e) { System.out.println("C4AndOthelloBoardStore Test Four MinusSize Evaluated: Incorrect"); } try { // create a board and try to set piece outside the board. m_BoardStore = new C4AndOthelloBoardStore(); m_BoardStore.setBoard(NORMAL_BOARD_SIZE, NORMAL_BOARD_SIZE); m_Piece = new Piece("black"); m_BoardStore.setPiece(m_Piece, OUTSIDE_BOARD, OUTSIDE_BOARD); // maybe it just stores the data. no change on the board. if(m_BoardStore.getBoard()[OUTSIDE_BOARD][OUTSIDE_BOARD].getColour().equals ("black")) { System.out.println("C4AndOthelloBoardStore Test Five OutsideBoard Evaluated: Correct"); } } catch (Exception e) { System.out.println("C4AndOthelloBoardStore Test Five OutsideBoard Evaluated: Incorrect"); } try { // set piece in a negative position which is not exist. m_BoardStore = new C4AndOthelloBoardStore(); m_BoardStore.setBoard(NORMAL_BOARD_SIZE, NORMAL_BOARD_SIZE); m_Piece = new Piece("black"); m_BoardStore.setPiece(m_Piece, NEGATIVE_BOARD_SIZE, NEGATIVE_BOARD_SIZE); // maybe it just stores the data. no change on the board. if(m_BoardStore.getBoard()[-1][-1].getColour().equals ("black")) { System.out.println("C4AndOthelloBoardStore Test Six NegativeBoard Evaluated: Correct"); } } catch (Exception e) { System.out.println("C4AndOthelloBoardStore Test Six NegativeBoard Evaluated: Incorrect"); } // set a black piece on the edge of the board m_BoardStore = new C4AndOthelloBoardStore(); m_BoardStore.setBoard(NORMAL_BOARD_SIZE, NORMAL_BOARD_SIZE); m_Piece = new Piece("black"); m_BoardStore.setPiece(m_Piece, 0, ROW_SEVEN); // yeah! there is a black piece! if(m_BoardStore.getBoard()[0][ROW_SEVEN].getColour().equals ("black")) { System.out.println("C4AndOthelloBoardStore Test Seven BlackOnSide Evaluated: Correct"); } else { System.out.println("C4AndOthelloBoardStore Test Seven BlackOnSide Evaluated: Incorrect"); } /** set a black piece on the board * then set a white piece at the same position * please do not override it ! */ m_BoardStore = new C4AndOthelloBoardStore(); m_BoardStore.setBoard(NORMAL_BOARD_SIZE, NORMAL_BOARD_SIZE); // set a black piece on column 0, row 7. m_Piece = new Piece("black"); m_BoardStore.setPiece(m_Piece, 0, ROW_SEVEN); // set a white piece again on column 0, row 7. m_Piece2 = new Piece("white"); m_BoardStore.setPiece(m_Piece2, 0, ROW_SEVEN); // you cannot put the white piece here !!! if(m_BoardStore.getBoard()[0][ROW_SEVEN].getColour().equals ("black")) { System.out.println("C4AndOthelloBoardStore Test Eight Override Evaluated: Correct"); } else { System.out.println("C4AndOthelloBoardStore Test Eight Override Evaluated: Incorrect"); } } /** * Member variables to store both 2D arrays and the boards height and width */ private int[][] m_Board; private Piece[][] m_Pieces; private int m_BoardWidth; private int m_BoardHeight; private static C4AndOthelloBoardStore m_BoardStore; private static Piece m_Piece; private static Piece m_Piece2; private static final int ROW_SEVEN = 7; private static final int NORMAL_BOARD_SIZE = 8; private static final int OUTSIDE_BOARD = 10; private static final int LARGE_BOARD_SIZE = 800; private static final int NEGATIVE_BOARD_SIZE = -1; /** Symbolic constant */ private final int HAS_PIECE_OBJECT = 1; }
package org.aries.test; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Hashtable; import java.util.Iterator; import java.util.logging.Logger; import javax.management.MBeanServerConnection; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import junit.framework.TestCase; import org.aries.util.dom.DomWriter; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; public abstract class JaxwsTest extends TestCase { protected Logger log = Logger.getLogger(getClass().getName()); private JaxwsTestHelper delegate = new JaxwsTestHelper(); public JaxwsTest() { } public JaxwsTest(String name) { super(name); } public MBeanServerConnection getServer() throws NamingException { return JaxwsTestHelper.getServer(); } public boolean isIntegrationNative() { return delegate.isIntegrationNative(); } public boolean isIntegrationMetro() { return delegate.isIntegrationMetro(); } public boolean isIntegrationCXF() { return delegate.isIntegrationCXF(); } /** * Deploy the given archive */ public void deploy(String archive) throws Exception { delegate.deploy(archive); } public void undeploy(String archive) throws Exception { delegate.undeploy(archive); } public String getServerHost() { return JaxwsTestHelper.getServerHost(); } public File getArchiveFile(String archive) { return delegate.getArchiveFile(archive); } public URL getArchiveURL(String archive) throws MalformedURLException { return delegate.getArchiveURL(archive); } public File getResourceFile(String resource) { return delegate.getResourceFile(resource); } public URL getResourceURL(String resource) throws MalformedURLException { return delegate.getResourceURL(resource); } public File createResourceFile(String filename) { File resDir = new File(JaxwsTestHelper.getTestResourcesDir()); File file = new File(resDir.getAbsolutePath() + File.separator + filename); return file; } public File createResourceFile(File parent, String filename) { File file = new File(parent, filename); return file; } /** * Get the client's env context for a given name. */ @SuppressWarnings("unchecked") protected InitialContext getInitialContext(String clientName) throws NamingException { InitialContext iniCtx = new InitialContext(); Hashtable env = iniCtx.getEnvironment(); env.put(Context.URL_PKG_PREFIXES, "org.jboss.naming.client"); env.put("j2ee.clientName", clientName); return new InitialContext(env); } /** * Get the client's env context */ protected InitialContext getInitialContext() throws NamingException { return getInitialContext("jbossws-client"); } public static void assertEquals(Element expElement, Element wasElement, boolean ignoreWhitespace) { normalizeWhitespace(expElement, ignoreWhitespace); normalizeWhitespace(wasElement, ignoreWhitespace); String expStr = DomWriter.printNode(expElement, false); String wasStr = DomWriter.printNode(wasElement, false); if (expStr.equals(wasStr) == false) { System.out.println("\nExp: " + expStr + "\nWas: " + wasStr); } assertEquals(expStr, wasStr); } public static void assertEquals(Element expElement, Element wasElement) { assertEquals(expElement, wasElement, false); } public static void assertEquals(Object exp, Object was) { if (exp instanceof Object[] && was instanceof Object[]) assertEqualsArray((Object[])exp, (Object[])was); else if (exp instanceof byte[] && was instanceof byte[]) assertEqualsArray((byte[])exp, (byte[])was); else if (exp instanceof boolean[] && was instanceof boolean[]) assertEqualsArray((boolean[])exp, (boolean[])was); else if (exp instanceof short[] && was instanceof short[]) assertEqualsArray((short[])exp, (short[])was); else if (exp instanceof int[] && was instanceof int[]) assertEqualsArray((int[])exp, (int[])was); else if (exp instanceof long[] && was instanceof long[]) assertEqualsArray((long[])exp, (long[])was); else if (exp instanceof float[] && was instanceof float[]) assertEqualsArray((float[])exp, (float[])was); else if (exp instanceof double[] && was instanceof double[]) assertEqualsArray((double[])exp, (double[])was); else TestCase.assertEquals(exp, was); } private static void assertEqualsArray(Object[] exp, Object[] was) { if (exp == null && was == null) return; if (exp != null && was != null) { if (exp.length != was.length) { fail("Expected <" + exp.length + "> array items, but was <" + was.length + ">"); } else { for (int i = 0; i < exp.length; i++) { Object compExp = exp[i]; Object compWas = was[i]; assertEquals(compExp, compWas); } } } else if (exp == null) { fail("Expected a null array, but was: " + Arrays.asList(was)); } else if (was == null) { fail("Expected " + Arrays.asList(exp) + ", but was: null"); } } private static void assertEqualsArray(byte[] exp, byte[] was) { assertTrue("Arrays don't match", Arrays.equals(exp, was)); } private static void assertEqualsArray(boolean[] exp, boolean[] was) { assertTrue("Arrays don't match", Arrays.equals(exp, was)); } private static void assertEqualsArray(short[] exp, short[] was) { assertTrue("Arrays don't match", Arrays.equals(exp, was)); } private static void assertEqualsArray(int[] exp, int[] was) { assertTrue("Arrays don't match", Arrays.equals(exp, was)); } private static void assertEqualsArray(long[] exp, long[] was) { assertTrue("Arrays don't match", Arrays.equals(exp, was)); } private static void assertEqualsArray(float[] exp, float[] was) { assertTrue("Arrays don't match", Arrays.equals(exp, was)); } private static void assertEqualsArray(double[] exp, double[] was) { assertTrue("Arrays don't match", Arrays.equals(exp, was)); } /** * Removes whitespace text nodes if they have an element sibling. */ private static void normalizeWhitespace(Element element, boolean ignoreWhitespace) { boolean hasChildElement = false; ArrayList<Node> toDetach = new ArrayList<Node>(); NodeList childNodes = element.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node node = childNodes.item(i); if (node.getNodeType() == Node.TEXT_NODE) { String nodeValue = node.getNodeValue(); if (nodeValue.trim().length() == 0) toDetach.add(node); } if (node.getNodeType() == Node.ELEMENT_NODE) { normalizeWhitespace((Element)node, ignoreWhitespace); hasChildElement = true; } } // remove whitespace nodes if (hasChildElement || ignoreWhitespace) { Iterator<Node> it = toDetach.iterator(); while (it.hasNext()) { Node whiteSpaceNode = it.next(); element.removeChild(whiteSpaceNode); } } } }
/* Copyright (c) 2015 "Naftoreiclag" https://github.com/Naftoreiclag * * Distributed under the Apache License Version 2.0 (http://www.apache.org/licenses/) * See accompanying file LICENSE */ package naftoreiclag.villagefive.console; import com.jme3.app.Application; import com.jme3.app.state.AbstractAppState; import com.jme3.app.state.AppStateManager; import com.jme3.niftygui.NiftyJmeDisplay; import de.lessvoid.nifty.Nifty; import de.lessvoid.nifty.NiftyEventSubscriber; import de.lessvoid.nifty.builder.PanelBuilder; import de.lessvoid.nifty.controls.Scrollbar; import de.lessvoid.nifty.controls.ScrollbarChangedEvent; import de.lessvoid.nifty.controls.TextField; import de.lessvoid.nifty.controls.TextFieldChangedEvent; import de.lessvoid.nifty.controls.label.builder.LabelBuilder; import de.lessvoid.nifty.elements.Element; import de.lessvoid.nifty.input.NiftyInputEvent; import de.lessvoid.nifty.input.NiftyInputMapping; import de.lessvoid.nifty.input.keyboard.KeyboardInputEvent; import de.lessvoid.nifty.screen.KeyInputHandler; import de.lessvoid.nifty.screen.Screen; import de.lessvoid.nifty.screen.ScreenController; import naftoreiclag.villagefive.Main; import naftoreiclag.villagefive.OverworldAppState; import naftoreiclag.villagefive.util.HistoryArray; import naftoreiclag.villagefive.util.KeyKeys; import naftoreiclag.villagefive.world.World; // Attaching this AppState allows a developer console to be shown. The actual console logic is handled in Console. public class DevConsoleAppState extends AbstractAppState implements ScreenController, KeyInputHandler { // Recall prepareConsole(); after resizing the screen. // true: Re-draws the console with every println(); call. Potentially slower, but will avoid glitchy text. // false: Only re-draw console at most once per tick. Definitely faster, but will cause glitchy text. // Togglable with the secret command "toggle instant console updates" protected boolean instantConsoleUpdates = true; // How many lines are remembered public static final int historyLength = 100; // Font public static final String fontName = "Interface/Fonts/Default.fnt"; // Height of each line. Used to calculate the size of the console. public static final int lineHeight = 18; // Number of lines shown on screen int numLines; // Why do I need this? Main app; NiftyJmeDisplay niftyDisplay; Nifty nifty; Screen screen; // Important elements declared in the xml file TextField textField; Element outputBox; Scrollbar scrollBar; // Scrollbar int scrollBarLoc; @NiftyEventSubscriber(id = "scrollBar") public void onScrollBarChange(final String id, final ScrollbarChangedEvent event) { int newScrollPos = (int) Math.floor(event.getValue()); if(newScrollPos != scrollBarLoc) { scrollBarLoc = newScrollPos; markOutputContainersForUpdate(); } } // Fassad Console console; // private OverworldAppState game = null; public void setGame(OverworldAppState overworldAppState) { if(console != null) { console.game = overworldAppState; } this.game = overworldAppState; } // Getting input from the textfield. String textFieldContents; @NiftyEventSubscriber(id = "input") public void onTextfieldChange(final String id, final TextFieldChangedEvent event) { textFieldContents = event.getText(); } // Mappings for this screen. After all, I am a ScreenController. It's my job to do this. NiftyInputMapping mapping = new NiftyInputMapping() { @Override public NiftyInputEvent convert(KeyboardInputEvent inputEvent) { boolean pressed = inputEvent.isKeyDown(); int key = inputEvent.getKey(); if(pressed) { if(key == KeyKeys.consoleKey) { return NiftyInputEvent.ConsoleToggle; } else if(key == KeyboardInputEvent.KEY_RETURN) { return NiftyInputEvent.Activate; } else if(key == KeyboardInputEvent.KEY_TAB) { if(inputEvent.isShiftDown()) { return NiftyInputEvent.PrevInputElement; } else { return NiftyInputEvent.NextInputElement; } } } return null; } }; // Called by JME3 when the AppState is intialized. However, the gui may not yet be, so all that stuff is in bind(); @Override public void initialize(AppStateManager stateManager, Application application) { super.initialize(stateManager, application); // Remember the application app = (Main) application; // Begin Nifty stuff. niftyDisplay = new NiftyJmeDisplay(app.getAssetManager(), app.getInputManager(), app.getAudioRenderer(), app.getGuiViewPort()); nifty = niftyDisplay.getNifty(); nifty.fromXml("Interface/DeveloperConsole.xml", "hide", this); app.getGuiViewPort().addProcessor(niftyDisplay); } // Called by Nifty when the gui has started up. @Override public void bind(Nifty nifty, Screen screen) { screen.addPreKeyboardInputHandler(mapping, this); this.screen = screen; textField = screen.findNiftyControl("input", TextField.class); textField.setFocus(); outputBox = screen.findElementByName("output"); scrollBar = screen.findNiftyControl("scrollBar", Scrollbar.class); scrollBar.setWorldMax(historyLength); prepareConsole(); // console = new Console(this); console.game = game; } // When the user presses "Enter" private void sendInput() { if(textFieldContents == null) { return; } // Remove any preceeding spaces. for(int i = 0; i < textFieldContents.length(); ++ i) { if(textFieldContents.charAt(i) != ' ') { if(i > 0) { textFieldContents = textFieldContents.substring(i); } break; } else if(i == textFieldContents.length() - 1) { textFieldContents = ""; } } // Ignore empty inputs if(!textFieldContents.equals("")) { printLine(textFieldContents); console.processRawInput(textFieldContents); } // Regardless, clear textField.setText(""); textFieldContents = ""; } // Each line has more information than just being a string. HistoryArray<ConsoleLine> outputHistory = new HistoryArray<ConsoleLine>(historyLength); void clearOutput() { outputHistory.clear(); for(int i = 0; i < outputContainers.length; ++ i) { outputContainers[i].setText(""); } } private class ConsoleLine { final String message; int repeats = 0; ConsoleLine(String text) { this.message = text; } @Override public boolean equals(Object other) { if(other instanceof ConsoleLine) { ConsoleLine o = (ConsoleLine) other; return o.message.equals(this.message); } else { return false; } } // Why do I need this? Only dUKE knows. @Override public int hashCode() { int hash = 5; hash = 97 * hash + (this.message != null ? this.message.hashCode() : 0); return hash; } @Override public String toString() { String ret = message; ret = ret.replaceAll("\t", " "); if(repeats > 0) { ret += " (Repeated " + repeats + " times)"; } return ret; } } // Print a line to the console. Can accept strings with \n. protected void printLine(String message) { // Repeat this method for each line String[] lines = message.split("\n"); for(String line : lines) { ConsoleLine addMe = new ConsoleLine(line); ConsoleLine mostRecent = outputHistory.get(0); // Check if this line is a repeat if(addMe.equals(mostRecent)) { ++ mostRecent.repeats; } else { // Append this line outputHistory.add(addMe); } // Mark the panels for an update. This is to avoid unnecessary updates to the panels. markOutputContainersForUpdate(); } // Update scrollbar accordingly. int oldSize = Math.round(scrollBar.getWorldMax()); int newSize = outputHistory.getLength() > numLines ? outputHistory.getLength() : numLines; if(newSize != oldSize) { scrollBar.setWorldMax(newSize); scrollBar.setValue(scrollBar.getValue() + (newSize - oldSize)); } } // Call this whenever the output containers need updating. See also "instantConsoleUpdates". private void markOutputContainersForUpdate() { if(instantConsoleUpdates) { outputContainersNeedUpdating = true; updateOutputContainers(); } else { outputContainersNeedUpdating = true; } } // Update all the output panels to show a changed scrollBarLoc, output, etc... private boolean outputContainersNeedUpdating = false; private void updateOutputContainers() { if(outputContainersNeedUpdating) { int scrollDisp = calculateScrollFromBottom(); for(int i = 0; i < outputContainers.length; ++ i) { ConsoleLine e = outputHistory.get(i + scrollDisp); if(e != null) { outputContainers[i].setText(e.toString()); } else { outputContainers[i].setText(""); } } outputContainersNeedUpdating = false; } } // Calculate how far up the scroll bar is. 0 means the most recent line should be viewable. private int calculateScrollFromBottom() { return (Math.round(scrollBar.getWorldMax()) - this.numLines) - scrollBarLoc; } @Override public void update(float tpf) { if(!instantConsoleUpdates) { updateOutputContainers(); } } // Lines are shown using Nifty by making this array of panels each containing a label. I've made this class to make management simpler. private class ConsoleElement { Element panel; Element label; String lastText = ""; ConsoleElement(Nifty nifty, Screen screen, Element outputBox) { // Portable javadoc v //new LabelBuilder() //new PanelBuilder() panel = new PanelBuilder() {{ childLayoutHorizontal(); height(lineHeight + "px"); }}.build(nifty, screen, outputBox); label = new LabelBuilder() {{ alignLeft(); font(fontName); label(""); }}.build(nifty, screen, panel); } // Example of Java efficiency at its finest. void setText(String message) { if(message == null) { message = ""; } if(!lastText.equals(message)) { label.markForRemoval(); final String dumbJava = message; LabelBuilder lb = new LabelBuilder(); lb.alignLeft(); lb.font(fontName); lb.label(dumbJava); label = lb.build(nifty, screen, panel); lastText = dumbJava; } } String getText() { return lastText; } void removeSelf() { panel.markForRemoval(); } } // All the line-holding containers, with zero being the lowest element. ConsoleElement[] outputContainers; // Populate the "id=output" panel with the containers. private void prepareConsole() { // Calculate how many lines we can fit in there. int height = outputBox.getHeight(); numLines = height / lineHeight; // Update scrollbar accordingly. scrollBar.setWorldPageSize(numLines); scrollBar.setWorldMax(numLines); // If there are already containers in the panel, clear remove those. if(outputContainers != null) { for(int i = 0; i < numLines; ++ i) { outputContainers[i].removeSelf(); } } // Make a new array and populate it. outputContainers = new ConsoleElement[numLines]; for(int i = numLines - 1; i >= 0; -- i) { outputContainers[i] = new ConsoleElement(nifty, screen, outputBox); } } // Called by JME3. When switching between enabled and disabled states, simply switch between Nifty Screens. @Override public void setEnabled(boolean enable) { super.setEnabled(enable); if(this.initialized){ if(enable) { nifty.gotoScreen("show"); } else { nifty.gotoScreen("hide"); } } } // Disables the console and flushes input @Override public boolean keyEvent(NiftyInputEvent inputEvent) { if(textField != null) { textField.setFocus(); } if(inputEvent == NiftyInputEvent.ConsoleToggle) { this.setEnabled(false); return true; } else if(inputEvent == NiftyInputEvent.Activate) { sendInput(); } return false; } // Unused stuff below @Override public void cleanup() {} @Override public void onStartScreen() {} @Override public void onEndScreen() {} }
package kr.jihee.text_toolkit.lang; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Stream; import kr.jihee.text_toolkit.lang.JObject.DoubleCaster; import kr.jihee.text_toolkit.lang.JObject.IntCaster; import kr.jihee.text_toolkit.lang.JObject.JMapCaster; import kr.jihee.text_toolkit.lang.JObject.ListCaster; import kr.jihee.text_toolkit.lang.JObject.LongCaster; import kr.jihee.text_toolkit.lang.JObject.MapCaster; import kr.jihee.text_toolkit.lang.JObject.StrCaster; public interface JList<E> extends List<E> { default String getString(int index) { return StrCaster.to(get(index)); } default Integer getInt(int index) { return IntCaster.to(get(index)); } default Long getLong(int index) { return LongCaster.to(get(index)); } default Double getDouble(int index) { return DoubleCaster.to(get(index)); } default <T extends List<?>> T getList(int index) { return ListCaster.to(get(index)); } default <T extends Map<?, ?>> T getMap(int index) { return MapCaster.to(get(index)); } public static class ListList extends LinkedList<List<Object>> { private static final long serialVersionUID = 674697948134483888L; private ListList() { } public static ListList empty() { return new ListList(); } public ListList append(Object... objs) { add(AnyList.of(objs)); return this; } } public static class GList<E> extends LinkedList<E>implements JList<E> { private static final long serialVersionUID = -1222837942217112751L; private GList() { super(); } public static <E> GList<E> empty() { return new GList<>(); } @SuppressWarnings("unchecked") public static <E> GList<E> of(E... objs) { return GList.<E> empty().append(objs); } public static <E> GList<E> by(Collection<E> objs) { return GList.<E> empty().appendAll(objs); } public static <E> GList<E> by(Iterable<E> objs) { return GList.<E> empty().appendAll(objs); } public static <E> GList<E> by(E[] objs) { return GList.<E> empty().appendAll(objs); } @SuppressWarnings("unchecked") public GList<E> append(E... objs) { for (E obj : objs) add(obj); return this; } public GList<E> appendAll(Collection<E> objs) { for (E obj : objs) add(obj); return this; } public GList<E> appendAll(Iterable<E> objs) { for (E obj : objs) add(obj); return this; } public GList<E> appendAll(E[] objs) { for (E obj : objs) add(obj); return this; } } public static class AnyList extends LinkedList<Object>implements JList<Object> { private static final long serialVersionUID = -8717677411924231998L; private AnyList() { super(); } public static AnyList empty() { return new AnyList(); } public static AnyList of(Object... objs) { return AnyList.empty().append(objs); } public static AnyList by(Collection<?> objs) { return AnyList.empty().appendAll(objs); } public static AnyList by(Iterable<?> objs) { return AnyList.empty().appendAll(objs); } public static AnyList by(Object[] objs) { return AnyList.empty().appendAll(objs); } public AnyList append(Object... objs) { for (Object obj : objs) add(obj); return this; } public AnyList appendAll(Collection<?> objs) { for (Object obj : objs) add(obj); return this; } public AnyList appendAll(Iterable<?> objs) { for (Object obj : objs) add(obj); return this; } public AnyList appendAll(Object[] objs) { for (Object obj : objs) add(obj); return this; } public StrList toStrList() { return StrList.empty().appendAll(this.toArray()); } public IntList toIntList() { return IntList.empty().appendAll(this.toArray()); } } public static class IntList extends LinkedList<Integer>implements JList<Integer> { private static final long serialVersionUID = 2669870464154862034L; private IntList() { super(); } public static IntList empty() { return new IntList(); } public static IntList of(Object... objs) { return IntList.empty().append(objs); } public static IntList by(Collection<?> objs) { return IntList.empty().appendAll(objs); } public static IntList by(Iterable<?> objs) { return IntList.empty().appendAll(objs); } public static IntList by(Object[] objs) { return IntList.empty().appendAll(objs); } public static IntList by(int[] objs) { return IntList.empty().appendAll(objs); } public IntList append(Object... objs) { for (Object obj : objs) add(IntCaster.to(obj)); return this; } public IntList appendAll(Collection<?> objs) { for (Object obj : objs) add(IntCaster.to(obj)); return this; } public IntList appendAll(Iterable<?> objs) { for (Object obj : objs) add(IntCaster.to(obj)); return this; } public IntList appendAll(Object[] objs) { for (Object obj : objs) add(IntCaster.to(obj)); return this; } public IntList appendAll(int[] objs) { for (int obj : objs) add(obj); return this; } public IntList map(Function<? super Integer, ? extends Integer> mapper) { for (int i = 0; i < this.size(); i++) this.set(i, mapper.apply(this.get(i))); return this; } public Stream<JMap> toMapStream(String key) { return this.stream().map(x -> JMap.of(key, x)); } public IntList sorted() { Collections.sort(this); return this; } public int min() { return this.stream().mapToInt(x -> x).min().orElse(Integer.MAX_VALUE); } public int max() { return this.stream().mapToInt(x -> x).max().orElse(Integer.MIN_VALUE); } public int sum() { return this.stream().mapToInt(x -> x).sum(); } public double avg() { return this.stream().mapToInt(x -> x).average().orElse(Double.MIN_VALUE); } } public static class StrList extends LinkedList<String>implements JList<String> { private static final long serialVersionUID = 2669870464154862034L; private StrList() { super(); } public static StrList empty() { return new StrList(); } public static StrList of(Object... objs) { return StrList.empty().append(objs); } public static StrList by(Collection<?> objs) { return StrList.empty().appendAll(objs); } public static StrList by(Iterable<?> objs) { return StrList.empty().appendAll(objs); } public static StrList by(Object[] objs) { return StrList.empty().appendAll(objs); } public StrList append(Object... objs) { for (Object obj : objs) add(StrCaster.to(obj)); return this; } public StrList appendAll(Collection<?> objs) { for (Object obj : objs) add(StrCaster.to(obj)); return this; } public StrList appendAll(Iterable<?> objs) { for (Object obj : objs) add(StrCaster.to(obj)); return this; } public StrList appendAll(Object[] objs) { for (Object obj : objs) add(StrCaster.to(obj)); return this; } public StrList sorted() { Collections.sort(this); return this; } public String join() { return join(", "); } public String join(String delimiter) { return String.join(delimiter, this); } public StrList repeat(int times) { StrList self = StrList.empty(); for (int i = 1; i <= times; i++) this.forEach(str -> self.add(str)); return self; } public StrList repeat(int times, BiFunction<String, Integer, String> function) { StrList self = StrList.empty(); for (AtomicInteger i = new AtomicInteger(1); i.get() <= times; i.incrementAndGet()) this.forEach(str -> self.add(function.apply(str, i.get()))); return self; } } public static class MapList extends LinkedList<JMap>implements JList<JMap> { private static final long serialVersionUID = -7328438735388600593L; private MapList() { super(); } public static MapList empty() { return new MapList(); } public static MapList of(Object... objs) { return MapList.empty().append(objs); } public static MapList by(Collection<?> objs) { return MapList.empty().appendAll(objs); } public static MapList by(Iterable<?> objs) { return MapList.empty().appendAll(objs); } public static MapList by(Object[] objs) { return MapList.empty().appendAll(objs); } public MapList append(Object... objs) { add(JMap.of(objs)); return this; } public MapList appendAll(Collection<?> objs) { for (Object obj : objs) add(JMapCaster.to(obj)); return this; } public MapList appendAll(Iterable<?> objs) { for (Object obj : objs) add(JMapCaster.to(obj)); return this; } public MapList appendAll(Object[] objs) { for (Object obj : objs) add(JMapCaster.to(obj)); return this; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.internal.index; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.geode.cache.Region; import org.apache.geode.cache.query.Index; import org.apache.geode.cache.query.IndexExistsException; import org.apache.geode.cache.query.IndexNameConflictException; import org.apache.geode.cache.query.IndexStatistics; import org.apache.geode.cache.query.IndexType; import org.apache.geode.cache.query.QueryException; import org.apache.geode.cache.query.QueryInvocationTargetException; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.query.TypeMismatchException; import org.apache.geode.cache.query.internal.CompiledValue; import org.apache.geode.cache.query.internal.ExecutionContext; import org.apache.geode.cache.query.internal.RuntimeIterator; import org.apache.geode.cache.query.types.ObjectType; import org.apache.geode.internal.cache.BucketRegion; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.PartitionedRegionDataStore; import org.apache.geode.internal.cache.RegionEntry; import org.apache.geode.internal.cache.execute.BucketMovedException; /** * This class implements a Partitioned index over a group of partitioned region buckets. * * @since GemFire 5.1 */ public class PartitionedIndex extends AbstractIndex { /** * Contains the reference for all the local indexed buckets. */ private Map<Region, List<Index>> bucketIndexes = Collections.synchronizedMap(new HashMap<Region, List<Index>>()); // An arbitrary bucket index from this PartiionedIndex that is used as a representative // index for the entire PartitionIndex. Usually used for scoring/sizing of an index when // selecting which index to use private volatile Index arbitraryBucketIndex; /** * Type on index represented by this partitioned index. * * @see IndexType#FUNCTIONAL * @see IndexType#PRIMARY_KEY * @see IndexType#HASH */ private IndexType type; /** * Number of remote buckets indexed when creating an index on the partitioned region instance. */ private int numRemoteBucektsIndexed; /** * String for imports if needed for index creations */ private String imports; protected Set mapIndexKeys = Collections.newSetFromMap(new ConcurrentHashMap()); // Flag indicating that the populationg of this index is in progress private volatile boolean populateInProgress; /** * Constructor for partitioned indexed. Creates the partitioned index on given a partitioned * region. An index can be created programmatically or through cache.xml during initialization. */ public PartitionedIndex(InternalCache cache, IndexType iType, String indexName, Region r, String indexedExpression, String fromClause, String imports) { super(cache, indexName, r, fromClause, indexedExpression, null, fromClause, indexedExpression, null, null); this.type = iType; this.imports = imports; if (iType == IndexType.HASH) { if (!getRegion().getAttributes().getIndexMaintenanceSynchronous()) { throw new UnsupportedOperationException( "Hash index is currently not supported for regions with Asynchronous index maintenance."); } } } /** * Adds an index on a bucket to the list of already indexed buckets in the partitioned region. * * @param index bucket index to be added to the list. */ public void addToBucketIndexes(Region r, Index index) { synchronized (this.bucketIndexes) { setArbitraryBucketIndex(index); List<Index> indexes = this.bucketIndexes.get(r); if (indexes == null) { indexes = new ArrayList<Index>(); } indexes.add(index); bucketIndexes.put(r, indexes); } } public void removeFromBucketIndexes(Region r, Index index) { synchronized (this.bucketIndexes) { List<Index> indexes = this.bucketIndexes.get(r); if (indexes != null) { indexes.remove(index); if (indexes.isEmpty()) { this.bucketIndexes.remove(r); } } if (index == arbitraryBucketIndex) { resetArbitraryBucketIndex(retrieveArbitraryBucketIndex()); } } } /** * Returns the number of locally indexed buckets. * * @return int number of buckets. */ public int getNumberOfIndexedBuckets() { synchronized (this.bucketIndexes) { int size = 0; for (List<Index> indexList : bucketIndexes.values()) { size += indexList.size(); } return size; } } /** * Gets a collection of all the bucket indexes created so far. * * @return bucketIndexes collection of all the bucket indexes. */ public List getBucketIndexes() { synchronized (this.bucketIndexes) { List<Index> indexes = new ArrayList<>(); for (List<Index> indexList : bucketIndexes.values()) { indexes.addAll(indexList); } return indexes; } } public List<Index> getBucketIndexes(Region r) { synchronized (this.bucketIndexes) { List<Index> indexes = new ArrayList<Index>(); List<Index> indexList = bucketIndexes.get(r); if (indexList != null) { indexes.addAll(indexList); } return indexes; } } public void setArbitraryBucketIndex(Index index) { if (arbitraryBucketIndex == null) { arbitraryBucketIndex = index; } } private void resetArbitraryBucketIndex(Index index) { arbitraryBucketIndex = index; } public Index retrieveArbitraryBucketIndex() { Index index = null; synchronized (this.bucketIndexes) { if (this.bucketIndexes.size() > 0) { List<Index> indexList = this.bucketIndexes.values().iterator().next(); if (indexList != null && indexList.size() > 0) { index = indexList.get(0); } } } return index; } public Index getBucketIndex() { return arbitraryBucketIndex; } protected Map.Entry<Region, List<Index>> getFirstBucketIndex() { Map.Entry<Region, List<Index>> firstIndexEntry = null; synchronized (this.bucketIndexes) { if (this.bucketIndexes.size() > 0) { firstIndexEntry = this.bucketIndexes.entrySet().iterator().next(); } } return firstIndexEntry; } /** * Returns the type of index this partitioned index represents. * * @return indexType type of partitioned index. */ @Override public IndexType getType() { return type; } /** * Returns the index for the bucket. */ public static AbstractIndex getBucketIndex(PartitionedRegion pr, String indexName, Integer bId) throws QueryInvocationTargetException { try { pr.checkReadiness(); } catch (Exception ex) { throw new QueryInvocationTargetException(ex.getMessage()); } PartitionedRegionDataStore prds = pr.getDataStore(); BucketRegion bukRegion; bukRegion = (BucketRegion) prds.getLocalBucketById(bId); if (bukRegion == null) { throw new BucketMovedException("Bucket not found for the id :" + bId); } AbstractIndex index = null; if (bukRegion.getIndexManager() != null) { index = (AbstractIndex) (bukRegion.getIndexManager().getIndex(indexName)); } else { if (pr.getCache().getLogger().fineEnabled()) { pr.getCache().getLogger().fine("Index Manager not found for the bucket region " + bukRegion.getFullPath() + " unable to fetch the index " + indexName); } throw new QueryInvocationTargetException( "Index Manager not found, " + " unable to fetch the index " + indexName); } return index; } /** * Verify if the index is available of the buckets. If not create index on the bucket. */ public void verifyAndCreateMissingIndex(List buckets) throws QueryInvocationTargetException { PartitionedRegion pr = (PartitionedRegion) this.getRegion(); PartitionedRegionDataStore prds = pr.getDataStore(); for (Object bId : buckets) { // create index BucketRegion bukRegion = (BucketRegion) prds.getLocalBucketById((Integer) bId); if (bukRegion == null) { throw new QueryInvocationTargetException("Bucket not found for the id :" + bId); } IndexManager im = IndexUtils.getIndexManager(cache, bukRegion, true); if (im != null && im.getIndex(indexName) == null) { try { if (pr.getCache().getLogger().fineEnabled()) { pr.getCache().getLogger() .fine("Verifying index presence on bucket region. " + " Found index " + this.indexName + " not present on the bucket region " + bukRegion.getFullPath() + ", index will be created on this region."); } ExecutionContext externalContext = new ExecutionContext(null, bukRegion.getCache()); externalContext.setBucketRegion(pr, bukRegion); im.createIndex(this.indexName, this.type, this.originalIndexedExpression, this.fromClause, this.imports, externalContext, this, true); } catch (IndexExistsException iee) { // Index exists. } catch (IndexNameConflictException ince) { // ignore. } } } } @Override protected boolean isCompactRangeIndex() { return false; } /** * Set the number of remotely indexed buckets when this partitioned index was created. * * @param remoteBucketsIndexed int representing number of remote buckets. */ public void setRemoteBucketesIndexed(int remoteBucketsIndexed) { this.numRemoteBucektsIndexed = remoteBucketsIndexed; } /** * Returns the number of remotely indexed buckets by this partitioned index. * * @return int number of remote indexed buckets. */ public int getNumRemoteBucketsIndexed() { return this.numRemoteBucektsIndexed; } /** * The Region this index is on. * * @return the Region for this index */ @Override public Region getRegion() { return super.getRegion(); } /** * Not supported on partitioned index. */ @Override void addMapping(RegionEntry entry) throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } /** * Not supported on partitioned index. */ @Override public void initializeIndex(boolean loadEntries) throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } /** * Not supported on partitioned index. */ @Override void lockedQuery(Object key, int operator, Collection results, CompiledValue iterOps, RuntimeIterator indpndntItr, ExecutionContext context, List projAttrib, SelectResults intermediateResults, boolean isIntersection) { throw new RuntimeException( "Not supported on partitioned index"); } /** * Not supported on partitioned index. */ @Override void recreateIndexData() throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } /** * Not supported on partitioned index. */ @Override void removeMapping(RegionEntry entry, int opCode) { throw new RuntimeException( "Not supported on partitioned index"); } /** * Returns false, clear is not supported on partitioned index. */ @Override public boolean clear() throws QueryException { return false; } /* * Not supported on partitioned index. */ /* * public void destroy() { throw new * RuntimeException("Not supported on partitioned index". * toLocalizedString()); } */ /** * Not supported on partitioned index. */ @Override public IndexStatistics getStatistics() { return this.internalIndexStats; } /** * Returns string representing imports. */ public String getImports() { return imports; } /** * String representing the state. * * @return string representing all the relevant information. */ public String toString() { StringBuffer st = new StringBuffer(); st.append(super.toString()).append("imports : ").append(imports); return st.toString(); } @Override protected InternalIndexStatistics createStats(String indexName) { if (this.internalIndexStats == null) { this.internalIndexStats = new PartitionedIndexStatistics(this.indexName); } return this.internalIndexStats; } /** * This will create extra {@link IndexStatistics} statistics for MapType PartitionedIndex. * * @return new PartitionedIndexStatistics */ protected InternalIndexStatistics createExplicitStats(String indexName) { return new PartitionedIndexStatistics(indexName); } /** * Internal class for partitioned index statistics. Statistics are not supported right now. */ class PartitionedIndexStatistics extends InternalIndexStatistics { private IndexStats vsdStats; public PartitionedIndexStatistics(String indexName) { this.vsdStats = new IndexStats(getRegion().getCache().getDistributedSystem(), indexName); } /** * Return the total number of times this index has been updated */ @Override public long getNumUpdates() { return this.vsdStats.getNumUpdates(); } @Override public void incNumValues(int delta) { this.vsdStats.incNumValues(delta); } @Override public void incNumUpdates() { this.vsdStats.incNumUpdates(); } @Override public void incNumUpdates(int delta) { this.vsdStats.incNumUpdates(delta); } @Override public void updateNumKeys(long numKeys) { this.vsdStats.updateNumKeys(numKeys); } @Override public void incNumKeys(long numKeys) { this.vsdStats.incNumKeys(numKeys); } @Override public void incNumMapIndexKeys(long numKeys) { this.vsdStats.incNumMapIndexKeys(numKeys); } @Override public void incUpdateTime(long delta) { this.vsdStats.incUpdateTime(delta); } @Override public void incUpdatesInProgress(int delta) { this.vsdStats.incUpdatesInProgress(delta); } @Override public void incNumUses() { this.vsdStats.incNumUses(); } @Override public void incUseTime(long delta) { this.vsdStats.incUseTime(delta); } @Override public void incUsesInProgress(int delta) { this.vsdStats.incUsesInProgress(delta); } @Override public void incReadLockCount(int delta) { this.vsdStats.incReadLockCount(delta); } @Override public void incNumBucketIndexes(int delta) { this.vsdStats.incNumBucketIndexes(delta); } /** * Returns the number of keys in this index at the highest level */ @Override public long getNumberOfMapIndexKeys() { return this.vsdStats.getNumberOfMapIndexKeys(); } /** * Returns the total amount of time (in nanoseconds) spent updating this index. */ @Override public long getTotalUpdateTime() { return this.vsdStats.getTotalUpdateTime(); } /** * Returns the total number of times this index has been accessed by a query. */ @Override public long getTotalUses() { return this.vsdStats.getTotalUses(); } /** * Returns the number of keys in this index. */ @Override public long getNumberOfKeys() { return this.vsdStats.getNumberOfKeys(); } /** * Returns the number of values in this index. */ @Override public long getNumberOfValues() { return this.vsdStats.getNumberOfValues(); } /** * Return the number of read locks taken on this index */ @Override public int getReadLockCount() { return this.vsdStats.getReadLockCount(); } @Override public int getNumberOfBucketIndexes() { return vsdStats.getNumberOfBucketIndexes(); } @Override public void close() { this.vsdStats.close(); } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("No Keys = ").append(getNumberOfKeys()).append("\n"); sb.append("No Map Index Keys = ").append(getNumberOfMapIndexKeys()).append("\n"); sb.append("No Values = ").append(getNumberOfValues()).append("\n"); sb.append("No Uses = ").append(getTotalUses()).append("\n"); sb.append("No Updates = ").append(getNumUpdates()).append("\n"); sb.append("Total Update time = ").append(getTotalUpdateTime()).append("\n"); return sb.toString(); } } @Override void instantiateEvaluator(IndexCreationHelper indexCreationHelper) { throw new UnsupportedOperationException(); } @Override public ObjectType getResultSetType() { throw new UnsupportedOperationException(); } /** * Not supported on partitioned index. */ @Override void lockedQuery(Object lowerBoundKey, int lowerBoundOperator, Object upperBoundKey, int upperBoundOperator, Collection results, Set keysToRemove, ExecutionContext context) throws TypeMismatchException { throw new RuntimeException( "Not supported on partitioned index"); } @Override public int getSizeEstimate(Object key, int op, int matchLevel) { throw new UnsupportedOperationException("This method should not have been invoked"); } @Override void lockedQuery(Object key, int operator, Collection results, Set keysToRemove, ExecutionContext context) throws TypeMismatchException { throw new RuntimeException("Not supported on partitioned index"); } @Override void addMapping(Object key, Object value, RegionEntry entry) throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } @Override void saveMapping(Object key, Object value, RegionEntry entry) throws IMQException { throw new RuntimeException( "Not supported on partitioned index"); } public void incNumMapKeysStats(Object mapKey) { if (internalIndexStats != null) { if (!mapIndexKeys.contains(mapKey)) { mapIndexKeys.add(mapKey); this.internalIndexStats.incNumMapIndexKeys(1); } } } public void incNumBucketIndexes() { if (internalIndexStats != null) { this.internalIndexStats.incNumBucketIndexes(1); } } @Override public boolean isEmpty() { boolean empty = true; for (Object index : getBucketIndexes()) { empty = ((AbstractIndex) index).isEmpty(); if (!empty) { return false; } } return empty; } public boolean isPopulateInProgress() { return populateInProgress; } public void setPopulateInProgress(boolean populateInProgress) { this.populateInProgress = populateInProgress; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.expressions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.DateWritableV2; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.io.Text; import org.apache.hive.common.util.DateParser; public class VectorUDFDateAddColCol extends VectorExpression { private static final long serialVersionUID = 1L; private final int colNum1; private final int colNum2; protected boolean isPositive = true; private transient final Text text = new Text(); private transient final DateParser dateParser = new DateParser(); // Transient members initialized by transientInit method. private transient PrimitiveCategory primitiveCategory; public VectorUDFDateAddColCol(int colNum1, int colNum2, int outputColumnNum) { super(outputColumnNum); this.colNum1 = colNum1; this.colNum2 = colNum2; } public VectorUDFDateAddColCol() { super(); // Dummy final assignments. colNum1 = -1; colNum2 = -1; } @Override public void transientInit(Configuration conf) throws HiveException { super.transientInit(conf); primitiveCategory = ((PrimitiveTypeInfo) inputTypeInfos[0]).getPrimitiveCategory(); } @Override public void evaluate(VectorizedRowBatch batch) throws HiveException { if (childExpressions != null) { super.evaluateChildren(batch); } ColumnVector inputColVector1 = batch.cols[colNum1]; LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2]; int[] sel = batch.selected; int n = batch.size; long[] vector2 = inputColVector2.vector; LongColumnVector outV = (LongColumnVector) batch.cols[outputColumnNum]; long[] outputVector = outV.vector; if (n <= 0) { // Nothing to do return; } /* * Propagate null values for a two-input operator and set isRepeating and noNulls appropriately. */ NullUtil.propagateNullsColCol(inputColVector1, inputColVector2, outV, batch.selected, batch.size, batch.selectedInUse); switch (primitiveCategory) { case DATE: // Now disregard null in second pass. if ((inputColVector1.isRepeating) && (inputColVector2.isRepeating)) { // All must be selected otherwise size would be zero // Repeating property will not change. outV.isRepeating = true; outputVector[0] = evaluateDate(inputColVector1, 0, vector2[0]); } else if (inputColVector1.isRepeating) { evaluateRepeatedDate(inputColVector1, vector2, outV, batch.selectedInUse, batch.selected, n); } else if (inputColVector2.isRepeating) { final long repeatedNumDays = vector2[0]; if (batch.selectedInUse) { for (int j = 0; j != n; j++) { int i = sel[j]; outputVector[i] = evaluateDate(inputColVector1, i, repeatedNumDays); } } else { for (int i = 0; i != n; i++) { outputVector[i] = evaluateDate(inputColVector1, i, repeatedNumDays); } } } else if (batch.selectedInUse) { for (int j = 0; j != n; j++) { int i = sel[j]; outputVector[i] = evaluateDate(inputColVector1, i, vector2[i]); } } else { for (int i = 0; i != n; i++) { outputVector[i] = evaluateDate(inputColVector1, i, vector2[i]); } } break; case TIMESTAMP: // Now disregard null in second pass. if ((inputColVector1.isRepeating) && (inputColVector2.isRepeating)) { // All must be selected otherwise size would be zero // Repeating property will not change. outV.isRepeating = true; outputVector[0] = evaluateTimestamp(inputColVector1, 0, vector2[0]); } else if (inputColVector1.isRepeating) { evaluateRepeatedTimestamp(inputColVector1, vector2, outV, batch.selectedInUse, batch.selected, n); } else if (inputColVector2.isRepeating) { final long repeatedNumDays = vector2[0]; if (batch.selectedInUse) { for (int j = 0; j != n; j++) { int i = sel[j]; outputVector[i] = evaluateTimestamp(inputColVector1, i, repeatedNumDays); } } else { for (int i = 0; i != n; i++) { outputVector[i] = evaluateTimestamp(inputColVector1, i, repeatedNumDays); } } } else if (batch.selectedInUse) { for (int j = 0; j != n; j++) { int i = sel[j]; outputVector[i] = evaluateTimestamp(inputColVector1, i, vector2[i]); } } else { for (int i = 0; i != n; i++) { outputVector[i] = evaluateTimestamp(inputColVector1, i, vector2[i]); } } break; case STRING: case CHAR: case VARCHAR: // Now disregard null in second pass. if ((inputColVector1.isRepeating) && (inputColVector2.isRepeating)) { // All must be selected otherwise size would be zero // Repeating property will not change. outV.isRepeating = true; evaluateString((BytesColumnVector) inputColVector1, outV, 0, vector2[0]); } else if (inputColVector1.isRepeating) { evaluateRepeatedString((BytesColumnVector) inputColVector1, vector2, outV, batch.selectedInUse, batch.selected, n); } else if (inputColVector2.isRepeating) { final long repeatedNumDays = vector2[0]; if (batch.selectedInUse) { for (int j = 0; j != n; j++) { int i = sel[j]; evaluateString((BytesColumnVector) inputColVector1, outV, i, repeatedNumDays); } } else { for (int i = 0; i != n; i++) { evaluateString((BytesColumnVector) inputColVector1, outV, i, repeatedNumDays); } } } else if (batch.selectedInUse) { for (int j = 0; j != n; j++) { int i = sel[j]; evaluateString((BytesColumnVector) inputColVector1, outV, i, vector2[i]); } } else { for (int i = 0; i != n; i++) { evaluateString((BytesColumnVector) inputColVector1, outV, i, vector2[i]); } } break; default: throw new Error("Unsupported input type " + primitiveCategory.name()); } } protected void evaluateRepeatedCommon(long days, long[] vector2, LongColumnVector outputVector, boolean selectedInUse, int[] selected, int n) { if (isPositive) { if (selectedInUse) { for (int j = 0; j != n; j++) { int i = selected[j]; outputVector.vector[i] = days + vector2[i]; } } else { for (int i = 0; i != n; i++) { outputVector.vector[i] = days + vector2[i]; } } } else { if (selectedInUse) { for (int j = 0; j != n; j++) { int i = selected[j]; outputVector.vector[i] = days - vector2[i]; } } else { for (int i = 0; i != n; i++) { outputVector.vector[i] = days - vector2[i]; } } } } protected long evaluateDate(ColumnVector columnVector, int index, long numDays) { LongColumnVector lcv = (LongColumnVector) columnVector; long days = lcv.vector[index]; if (isPositive) { days += numDays; } else { days -= numDays; } return days; } protected void evaluateRepeatedDate(ColumnVector columnVector, long[] vector2, LongColumnVector outputVector, boolean selectedInUse, int[] selected, int n) { if (columnVector.isNull[0]) { outputVector.noNulls = false; outputVector.isNull[0] = true; outputVector.isRepeating = true; return; } LongColumnVector lcv = (LongColumnVector) columnVector; long days = lcv.vector[0]; evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n); } protected long evaluateTimestamp(ColumnVector columnVector, int index, long numDays) { TimestampColumnVector tcv = (TimestampColumnVector) columnVector; // Convert to date value (in days) long days = DateWritableV2.millisToDays(tcv.getTime(index)); if (isPositive) { days += numDays; } else { days -= numDays; } return days; } protected void evaluateRepeatedTimestamp(ColumnVector columnVector, long[] vector2, LongColumnVector outputVector, boolean selectedInUse, int[] selected, int n) { if (columnVector.isNull[0]) { outputVector.noNulls = false; outputVector.isNull[0] = true; outputVector.isRepeating = true; return; } TimestampColumnVector tcv = (TimestampColumnVector) columnVector; // Convert to date value (in days) long days = DateWritableV2.millisToDays(tcv.getTime(0)); evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n); } protected void evaluateString(BytesColumnVector inputColumnVector1, LongColumnVector outputVector, int index, long numDays) { if (inputColumnVector1.isNull[index]) { outputVector.noNulls = false; outputVector.isNull[index] = true; } else { text.set(inputColumnVector1.vector[index], inputColumnVector1.start[index], inputColumnVector1.length[index]); Date hDate = new Date(); boolean parsed = dateParser.parseDate(text.toString(), hDate); if (!parsed) { outputVector.noNulls = false; outputVector.isNull[index] = true; return; } long days = DateWritableV2.millisToDays(hDate.toEpochMilli()); if (isPositive) { days += numDays; } else { days -= numDays; } outputVector.vector[index] = days; } } protected void evaluateRepeatedString(BytesColumnVector inputColumnVector1, long[] vector2, LongColumnVector outputVector, boolean selectedInUse, int[] selected, int n) { if (inputColumnVector1.isNull[0]) { outputVector.noNulls = false; outputVector.isNull[0] = true; outputVector.isRepeating = true; return; } text.set( inputColumnVector1.vector[0], inputColumnVector1.start[0], inputColumnVector1.length[0]); Date date = new Date(); boolean parsed = dateParser.parseDate(text.toString(), date); if (!parsed) { outputVector.noNulls = false; outputVector.isNull[0] = true; outputVector.isRepeating = true; return; } long days = DateWritableV2.millisToDays(date.toEpochMilli()); evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n); } @Override public String vectorExpressionParameters() { return getColumnParamString(0, colNum1) + ", " + getColumnParamString(1, colNum2); } @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder(); b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(2) .setArgumentTypes( VectorExpressionDescriptor.ArgumentType.STRING_DATETIME_FAMILY, VectorExpressionDescriptor.ArgumentType.INT_FAMILY) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN, VectorExpressionDescriptor.InputExpressionType.COLUMN); return b.build(); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.host.dao; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.TimeZone; import javax.annotation.PostConstruct; import javax.inject.Inject; import javax.persistence.TableGenerator; import org.apache.log4j.Logger; import com.cloud.agent.api.VgpuTypesInfo; import com.cloud.cluster.agentlb.HostTransferMapVO; import com.cloud.cluster.agentlb.dao.HostTransferMapDao; import com.cloud.configuration.ManagementServiceConfiguration; import com.cloud.dc.ClusterVO; import com.cloud.dc.dao.ClusterDao; import com.cloud.gpu.dao.HostGpuGroupsDao; import com.cloud.gpu.dao.VGPUTypesDao; import com.cloud.host.Host; import com.cloud.host.Host.Type; import com.cloud.host.HostTagVO; import com.cloud.host.HostVO; import com.cloud.host.Status; import com.cloud.host.Status.Event; import com.cloud.hypervisor.Hypervisor; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.info.RunningHostCountInfo; import com.cloud.org.Grouping; import com.cloud.org.Managed; import com.cloud.resource.ResourceState; import com.cloud.utils.DateUtil; import com.cloud.utils.db.Attribute; import com.cloud.utils.db.DB; import com.cloud.utils.db.Filter; import com.cloud.utils.db.GenericDaoBase; import com.cloud.utils.db.GenericSearchBuilder; import com.cloud.utils.db.JoinBuilder; import com.cloud.utils.db.JoinBuilder.JoinType; import com.cloud.utils.db.SearchBuilder; import com.cloud.utils.db.SearchCriteria; import com.cloud.utils.db.SearchCriteria.Func; import com.cloud.utils.db.SearchCriteria.Op; import com.cloud.utils.db.TransactionLegacy; import com.cloud.utils.db.UpdateBuilder; import com.cloud.utils.exception.CloudRuntimeException; @DB @TableGenerator(name = "host_req_sq", table = "op_host", pkColumnName = "id", valueColumnName = "sequence", allocationSize = 1) public class HostDaoImpl extends GenericDaoBase<HostVO, Long> implements HostDao { //FIXME: , ExternalIdDao { private static final Logger s_logger = Logger.getLogger(HostDaoImpl.class); private static final Logger status_logger = Logger.getLogger(Status.class); private static final Logger state_logger = Logger.getLogger(ResourceState.class); private static final String LIST_CLUSTERID_FOR_HOST_TAG = "select distinct cluster_id from host join host_tags on host.id = host_tags.host_id and host_tags.tag = ?"; protected SearchBuilder<HostVO> TypePodDcStatusSearch; protected SearchBuilder<HostVO> IdStatusSearch; protected SearchBuilder<HostVO> TypeDcSearch; protected SearchBuilder<HostVO> TypeDcStatusSearch; protected SearchBuilder<HostVO> TypeClusterStatusSearch; protected SearchBuilder<HostVO> MsStatusSearch; protected SearchBuilder<HostVO> DcPrivateIpAddressSearch; protected SearchBuilder<HostVO> DcStorageIpAddressSearch; protected SearchBuilder<HostVO> PublicIpAddressSearch; protected SearchBuilder<HostVO> AnyIpAddressSearch; protected SearchBuilder<HostVO> GuidSearch; protected SearchBuilder<HostVO> DcSearch; protected SearchBuilder<HostVO> PodSearch; protected SearchBuilder<HostVO> ClusterSearch; protected SearchBuilder<HostVO> TypeSearch; protected SearchBuilder<HostVO> StatusSearch; protected SearchBuilder<HostVO> ResourceStateSearch; protected SearchBuilder<HostVO> NameLikeSearch; protected SearchBuilder<HostVO> NameSearch; protected SearchBuilder<HostVO> SequenceSearch; protected SearchBuilder<HostVO> DirectlyConnectedSearch; protected SearchBuilder<HostVO> UnmanagedDirectConnectSearch; protected SearchBuilder<HostVO> UnmanagedApplianceSearch; protected SearchBuilder<HostVO> MaintenanceCountSearch; protected SearchBuilder<HostVO> ClusterStatusSearch; protected SearchBuilder<HostVO> TypeNameZoneSearch; protected SearchBuilder<HostVO> AvailHypevisorInZone; protected SearchBuilder<HostVO> DirectConnectSearch; protected SearchBuilder<HostVO> ManagedDirectConnectSearch; protected SearchBuilder<HostVO> ManagedRoutingServersSearch; protected SearchBuilder<HostVO> SecondaryStorageVMSearch; protected GenericSearchBuilder<HostVO, Long> HostIdSearch; protected GenericSearchBuilder<HostVO, Long> HostsInStatusSearch; protected GenericSearchBuilder<HostVO, Long> CountRoutingByDc; protected SearchBuilder<HostTransferMapVO> HostTransferSearch; protected SearchBuilder<ClusterVO> ClusterManagedSearch; protected SearchBuilder<HostVO> RoutingSearch; protected SearchBuilder<HostVO> HostsForReconnectSearch; protected GenericSearchBuilder<HostVO, Long> ClustersOwnedByMSSearch; protected GenericSearchBuilder<HostVO, Long> ClustersForHostsNotOwnedByAnyMSSearch; protected GenericSearchBuilder<ClusterVO, Long> AllClustersSearch; protected SearchBuilder<HostVO> HostsInClusterSearch; protected Attribute _statusAttr; protected Attribute _resourceStateAttr; protected Attribute _msIdAttr; protected Attribute _pingTimeAttr; @Inject protected HostDetailsDao _detailsDao; @Inject protected HostGpuGroupsDao _hostGpuGroupsDao; @Inject protected VGPUTypesDao _vgpuTypesDao; @Inject protected HostTagsDao _hostTagsDao; @Inject protected HostTransferMapDao _hostTransferDao; @Inject protected ClusterDao _clusterDao; @Inject ManagementServiceConfiguration mgmtServiceConf; public HostDaoImpl() { super(); } @PostConstruct public void init() { MaintenanceCountSearch = createSearchBuilder(); MaintenanceCountSearch.and("cluster", MaintenanceCountSearch.entity().getClusterId(), SearchCriteria.Op.EQ); MaintenanceCountSearch.and("resourceState", MaintenanceCountSearch.entity().getResourceState(), SearchCriteria.Op.IN); MaintenanceCountSearch.done(); TypePodDcStatusSearch = createSearchBuilder(); HostVO entity = TypePodDcStatusSearch.entity(); TypePodDcStatusSearch.and("type", entity.getType(), SearchCriteria.Op.EQ); TypePodDcStatusSearch.and("pod", entity.getPodId(), SearchCriteria.Op.EQ); TypePodDcStatusSearch.and("dc", entity.getDataCenterId(), SearchCriteria.Op.EQ); TypePodDcStatusSearch.and("cluster", entity.getClusterId(), SearchCriteria.Op.EQ); TypePodDcStatusSearch.and("status", entity.getStatus(), SearchCriteria.Op.EQ); TypePodDcStatusSearch.and("resourceState", entity.getResourceState(), SearchCriteria.Op.EQ); TypePodDcStatusSearch.done(); MsStatusSearch = createSearchBuilder(); MsStatusSearch.and("ms", MsStatusSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); MsStatusSearch.and("type", MsStatusSearch.entity().getType(), SearchCriteria.Op.EQ); MsStatusSearch.and("resourceState", MsStatusSearch.entity().getResourceState(), SearchCriteria.Op.NIN); MsStatusSearch.done(); TypeDcSearch = createSearchBuilder(); TypeDcSearch.and("type", TypeDcSearch.entity().getType(), SearchCriteria.Op.EQ); TypeDcSearch.and("dc", TypeDcSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); TypeDcSearch.done(); SecondaryStorageVMSearch = createSearchBuilder(); SecondaryStorageVMSearch.and("type", SecondaryStorageVMSearch.entity().getType(), SearchCriteria.Op.EQ); SecondaryStorageVMSearch.and("dc", SecondaryStorageVMSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); SecondaryStorageVMSearch.and("status", SecondaryStorageVMSearch.entity().getStatus(), SearchCriteria.Op.EQ); SecondaryStorageVMSearch.done(); TypeDcStatusSearch = createSearchBuilder(); TypeDcStatusSearch.and("type", TypeDcStatusSearch.entity().getType(), SearchCriteria.Op.EQ); TypeDcStatusSearch.and("dc", TypeDcStatusSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); TypeDcStatusSearch.and("status", TypeDcStatusSearch.entity().getStatus(), SearchCriteria.Op.EQ); TypeDcStatusSearch.and("resourceState", TypeDcStatusSearch.entity().getResourceState(), SearchCriteria.Op.EQ); TypeDcStatusSearch.done(); TypeClusterStatusSearch = createSearchBuilder(); TypeClusterStatusSearch.and("type", TypeClusterStatusSearch.entity().getType(), SearchCriteria.Op.EQ); TypeClusterStatusSearch.and("cluster", TypeClusterStatusSearch.entity().getClusterId(), SearchCriteria.Op.EQ); TypeClusterStatusSearch.and("status", TypeClusterStatusSearch.entity().getStatus(), SearchCriteria.Op.EQ); TypeClusterStatusSearch.and("resourceState", TypeClusterStatusSearch.entity().getResourceState(), SearchCriteria.Op.EQ); TypeClusterStatusSearch.done(); IdStatusSearch = createSearchBuilder(); IdStatusSearch.and("id", IdStatusSearch.entity().getId(), SearchCriteria.Op.EQ); IdStatusSearch.and("states", IdStatusSearch.entity().getStatus(), SearchCriteria.Op.IN); IdStatusSearch.done(); DcPrivateIpAddressSearch = createSearchBuilder(); DcPrivateIpAddressSearch.and("privateIpAddress", DcPrivateIpAddressSearch.entity().getPrivateIpAddress(), SearchCriteria.Op.EQ); DcPrivateIpAddressSearch.and("dc", DcPrivateIpAddressSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); DcPrivateIpAddressSearch.done(); DcStorageIpAddressSearch = createSearchBuilder(); DcStorageIpAddressSearch.and("storageIpAddress", DcStorageIpAddressSearch.entity().getStorageIpAddress(), SearchCriteria.Op.EQ); DcStorageIpAddressSearch.and("dc", DcStorageIpAddressSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); DcStorageIpAddressSearch.done(); PublicIpAddressSearch = createSearchBuilder(); PublicIpAddressSearch.and("publicIpAddress", PublicIpAddressSearch.entity().getPublicIpAddress(), SearchCriteria.Op.EQ); PublicIpAddressSearch.done(); AnyIpAddressSearch = createSearchBuilder(); AnyIpAddressSearch.or("publicIpAddress", AnyIpAddressSearch.entity().getPublicIpAddress(), SearchCriteria.Op.EQ); AnyIpAddressSearch.or("privateIpAddress", AnyIpAddressSearch.entity().getPrivateIpAddress(), SearchCriteria.Op.EQ); AnyIpAddressSearch.done(); GuidSearch = createSearchBuilder(); GuidSearch.and("guid", GuidSearch.entity().getGuid(), SearchCriteria.Op.EQ); GuidSearch.done(); DcSearch = createSearchBuilder(); DcSearch.and("dc", DcSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); DcSearch.and("type", DcSearch.entity().getType(), Op.EQ); DcSearch.and("status", DcSearch.entity().getStatus(), Op.EQ); DcSearch.and("resourceState", DcSearch.entity().getResourceState(), Op.EQ); DcSearch.done(); ClusterStatusSearch = createSearchBuilder(); ClusterStatusSearch.and("cluster", ClusterStatusSearch.entity().getClusterId(), SearchCriteria.Op.EQ); ClusterStatusSearch.and("status", ClusterStatusSearch.entity().getStatus(), SearchCriteria.Op.EQ); ClusterStatusSearch.done(); TypeNameZoneSearch = createSearchBuilder(); TypeNameZoneSearch.and("name", TypeNameZoneSearch.entity().getName(), SearchCriteria.Op.EQ); TypeNameZoneSearch.and("type", TypeNameZoneSearch.entity().getType(), SearchCriteria.Op.EQ); TypeNameZoneSearch.and("zoneId", TypeNameZoneSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); TypeNameZoneSearch.done(); PodSearch = createSearchBuilder(); PodSearch.and("podId", PodSearch.entity().getPodId(), SearchCriteria.Op.EQ); PodSearch.done(); ClusterSearch = createSearchBuilder(); ClusterSearch.and("clusterId", ClusterSearch.entity().getClusterId(), SearchCriteria.Op.EQ); ClusterSearch.done(); TypeSearch = createSearchBuilder(); TypeSearch.and("type", TypeSearch.entity().getType(), SearchCriteria.Op.EQ); TypeSearch.done(); StatusSearch = createSearchBuilder(); StatusSearch.and("status", StatusSearch.entity().getStatus(), SearchCriteria.Op.IN); StatusSearch.done(); ResourceStateSearch = createSearchBuilder(); ResourceStateSearch.and("resourceState", ResourceStateSearch.entity().getResourceState(), SearchCriteria.Op.IN); ResourceStateSearch.done(); NameLikeSearch = createSearchBuilder(); NameLikeSearch.and("name", NameLikeSearch.entity().getName(), SearchCriteria.Op.LIKE); NameLikeSearch.done(); NameSearch = createSearchBuilder(); NameSearch.and("name", NameSearch.entity().getName(), SearchCriteria.Op.EQ); NameSearch.done(); SequenceSearch = createSearchBuilder(); SequenceSearch.and("id", SequenceSearch.entity().getId(), SearchCriteria.Op.EQ); // SequenceSearch.addRetrieve("sequence", SequenceSearch.entity().getSequence()); SequenceSearch.done(); DirectlyConnectedSearch = createSearchBuilder(); DirectlyConnectedSearch.and("resource", DirectlyConnectedSearch.entity().getResource(), SearchCriteria.Op.NNULL); DirectlyConnectedSearch.and("ms", DirectlyConnectedSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); DirectlyConnectedSearch.and("statuses", DirectlyConnectedSearch.entity().getStatus(), SearchCriteria.Op.EQ); DirectlyConnectedSearch.and("resourceState", DirectlyConnectedSearch.entity().getResourceState(), SearchCriteria.Op.NOTIN); DirectlyConnectedSearch.done(); UnmanagedDirectConnectSearch = createSearchBuilder(); UnmanagedDirectConnectSearch.and("resource", UnmanagedDirectConnectSearch.entity().getResource(), SearchCriteria.Op.NNULL); UnmanagedDirectConnectSearch.and("server", UnmanagedDirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); UnmanagedDirectConnectSearch.and("lastPinged", UnmanagedDirectConnectSearch.entity().getLastPinged(), SearchCriteria.Op.LTEQ); UnmanagedDirectConnectSearch.and("resourceStates", UnmanagedDirectConnectSearch.entity().getResourceState(), SearchCriteria.Op.NIN); UnmanagedDirectConnectSearch.and("clusterIn", UnmanagedDirectConnectSearch.entity().getClusterId(), SearchCriteria.Op.IN); /* * UnmanagedDirectConnectSearch.op(SearchCriteria.Op.OR, "managementServerId", * UnmanagedDirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); * UnmanagedDirectConnectSearch.and("lastPinged", UnmanagedDirectConnectSearch.entity().getLastPinged(), * SearchCriteria.Op.LTEQ); UnmanagedDirectConnectSearch.cp(); UnmanagedDirectConnectSearch.cp(); */ try { HostTransferSearch = _hostTransferDao.createSearchBuilder(); } catch (Throwable e) { s_logger.debug("error", e); } HostTransferSearch.and("id", HostTransferSearch.entity().getId(), SearchCriteria.Op.NULL); UnmanagedDirectConnectSearch.join("hostTransferSearch", HostTransferSearch, HostTransferSearch.entity().getId(), UnmanagedDirectConnectSearch.entity().getId(), JoinType.LEFTOUTER); ClusterManagedSearch = _clusterDao.createSearchBuilder(); ClusterManagedSearch.and("managed", ClusterManagedSearch.entity().getManagedState(), SearchCriteria.Op.EQ); UnmanagedDirectConnectSearch.join("ClusterManagedSearch", ClusterManagedSearch, ClusterManagedSearch.entity().getId(), UnmanagedDirectConnectSearch.entity().getClusterId(), JoinType.INNER); UnmanagedDirectConnectSearch.done(); DirectConnectSearch = createSearchBuilder(); DirectConnectSearch.and("resource", DirectConnectSearch.entity().getResource(), SearchCriteria.Op.NNULL); DirectConnectSearch.and("id", DirectConnectSearch.entity().getId(), SearchCriteria.Op.EQ); DirectConnectSearch.and().op("nullserver", DirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); DirectConnectSearch.or("server", DirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); DirectConnectSearch.cp(); DirectConnectSearch.done(); UnmanagedApplianceSearch = createSearchBuilder(); UnmanagedApplianceSearch.and("resource", UnmanagedApplianceSearch.entity().getResource(), SearchCriteria.Op.NNULL); UnmanagedApplianceSearch.and("server", UnmanagedApplianceSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); UnmanagedApplianceSearch.and("types", UnmanagedApplianceSearch.entity().getType(), SearchCriteria.Op.IN); UnmanagedApplianceSearch.and("lastPinged", UnmanagedApplianceSearch.entity().getLastPinged(), SearchCriteria.Op.LTEQ); UnmanagedApplianceSearch.done(); AvailHypevisorInZone = createSearchBuilder(); AvailHypevisorInZone.and("zoneId", AvailHypevisorInZone.entity().getDataCenterId(), SearchCriteria.Op.EQ); AvailHypevisorInZone.and("hostId", AvailHypevisorInZone.entity().getId(), SearchCriteria.Op.NEQ); AvailHypevisorInZone.and("type", AvailHypevisorInZone.entity().getType(), SearchCriteria.Op.EQ); AvailHypevisorInZone.groupBy(AvailHypevisorInZone.entity().getHypervisorType()); AvailHypevisorInZone.done(); HostsInStatusSearch = createSearchBuilder(Long.class); HostsInStatusSearch.selectFields(HostsInStatusSearch.entity().getId()); HostsInStatusSearch.and("dc", HostsInStatusSearch.entity().getDataCenterId(), Op.EQ); HostsInStatusSearch.and("pod", HostsInStatusSearch.entity().getPodId(), Op.EQ); HostsInStatusSearch.and("cluster", HostsInStatusSearch.entity().getClusterId(), Op.EQ); HostsInStatusSearch.and("type", HostsInStatusSearch.entity().getType(), Op.EQ); HostsInStatusSearch.and("statuses", HostsInStatusSearch.entity().getStatus(), Op.IN); HostsInStatusSearch.done(); CountRoutingByDc = createSearchBuilder(Long.class); CountRoutingByDc.select(null, Func.COUNT, null); CountRoutingByDc.and("dc", CountRoutingByDc.entity().getDataCenterId(), SearchCriteria.Op.EQ); CountRoutingByDc.and("type", CountRoutingByDc.entity().getType(), SearchCriteria.Op.EQ); CountRoutingByDc.and("status", CountRoutingByDc.entity().getStatus(), SearchCriteria.Op.EQ); CountRoutingByDc.done(); ManagedDirectConnectSearch = createSearchBuilder(); ManagedDirectConnectSearch.and("resource", ManagedDirectConnectSearch.entity().getResource(), SearchCriteria.Op.NNULL); ManagedDirectConnectSearch.and("server", ManagedDirectConnectSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); ManagedDirectConnectSearch.done(); ManagedRoutingServersSearch = createSearchBuilder(); ManagedRoutingServersSearch.and("server", ManagedRoutingServersSearch.entity().getManagementServerId(), SearchCriteria.Op.NNULL); ManagedRoutingServersSearch.and("type", ManagedRoutingServersSearch.entity().getType(), SearchCriteria.Op.EQ); ManagedRoutingServersSearch.done(); RoutingSearch = createSearchBuilder(); RoutingSearch.and("type", RoutingSearch.entity().getType(), SearchCriteria.Op.EQ); RoutingSearch.done(); HostsForReconnectSearch = createSearchBuilder(); HostsForReconnectSearch.and("resource", HostsForReconnectSearch.entity().getResource(), SearchCriteria.Op.NNULL); HostsForReconnectSearch.and("server", HostsForReconnectSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); HostsForReconnectSearch.and("lastPinged", HostsForReconnectSearch.entity().getLastPinged(), SearchCriteria.Op.LTEQ); HostsForReconnectSearch.and("resourceStates", HostsForReconnectSearch.entity().getResourceState(), SearchCriteria.Op.NIN); HostsForReconnectSearch.and("cluster", HostsForReconnectSearch.entity().getClusterId(), SearchCriteria.Op.NNULL); HostsForReconnectSearch.and("status", HostsForReconnectSearch.entity().getStatus(), SearchCriteria.Op.IN); HostsForReconnectSearch.done(); ClustersOwnedByMSSearch = createSearchBuilder(Long.class); ClustersOwnedByMSSearch.select(null, Func.DISTINCT, ClustersOwnedByMSSearch.entity().getClusterId()); ClustersOwnedByMSSearch.and("resource", ClustersOwnedByMSSearch.entity().getResource(), SearchCriteria.Op.NNULL); ClustersOwnedByMSSearch.and("cluster", ClustersOwnedByMSSearch.entity().getClusterId(), SearchCriteria.Op.NNULL); ClustersOwnedByMSSearch.and("server", ClustersOwnedByMSSearch.entity().getManagementServerId(), SearchCriteria.Op.EQ); ClustersOwnedByMSSearch.done(); ClustersForHostsNotOwnedByAnyMSSearch = createSearchBuilder(Long.class); ClustersForHostsNotOwnedByAnyMSSearch.select(null, Func.DISTINCT, ClustersForHostsNotOwnedByAnyMSSearch.entity().getClusterId()); ClustersForHostsNotOwnedByAnyMSSearch.and("resource", ClustersForHostsNotOwnedByAnyMSSearch.entity().getResource(), SearchCriteria.Op.NNULL); ClustersForHostsNotOwnedByAnyMSSearch.and("cluster", ClustersForHostsNotOwnedByAnyMSSearch.entity().getClusterId(), SearchCriteria.Op.NNULL); ClustersForHostsNotOwnedByAnyMSSearch.and("server", ClustersForHostsNotOwnedByAnyMSSearch.entity().getManagementServerId(), SearchCriteria.Op.NULL); ClusterManagedSearch = _clusterDao.createSearchBuilder(); ClusterManagedSearch.and("managed", ClusterManagedSearch.entity().getManagedState(), SearchCriteria.Op.EQ); ClustersForHostsNotOwnedByAnyMSSearch.join("ClusterManagedSearch", ClusterManagedSearch, ClusterManagedSearch.entity().getId(), ClustersForHostsNotOwnedByAnyMSSearch.entity().getClusterId(), JoinType.INNER); ClustersForHostsNotOwnedByAnyMSSearch.done(); AllClustersSearch = _clusterDao.createSearchBuilder(Long.class); AllClustersSearch.select(null, Func.NATIVE, AllClustersSearch.entity().getId()); AllClustersSearch.and("managed", AllClustersSearch.entity().getManagedState(), SearchCriteria.Op.EQ); AllClustersSearch.done(); HostsInClusterSearch = createSearchBuilder(); HostsInClusterSearch.and("resource", HostsInClusterSearch.entity().getResource(), SearchCriteria.Op.NNULL); HostsInClusterSearch.and("cluster", HostsInClusterSearch.entity().getClusterId(), SearchCriteria.Op.EQ); HostsInClusterSearch.and("server", HostsInClusterSearch.entity().getManagementServerId(), SearchCriteria.Op.NNULL); HostsInClusterSearch.done(); HostIdSearch = createSearchBuilder(Long.class); HostIdSearch.selectFields(HostIdSearch.entity().getId()); HostIdSearch.and("dataCenterId", HostIdSearch.entity().getDataCenterId(), Op.EQ); HostIdSearch.done(); _statusAttr = _allAttributes.get("status"); _msIdAttr = _allAttributes.get("managementServerId"); _pingTimeAttr = _allAttributes.get("lastPinged"); _resourceStateAttr = _allAttributes.get("resourceState"); assert (_statusAttr != null && _msIdAttr != null && _pingTimeAttr != null) : "Couldn't find one of these attributes"; } @Override public long countBy(long clusterId, ResourceState... states) { SearchCriteria<HostVO> sc = MaintenanceCountSearch.create(); sc.setParameters("resourceState", (Object[])states); sc.setParameters("cluster", clusterId); List<HostVO> hosts = listBy(sc); return hosts.size(); } @Override public List<HostVO> listByDataCenterId(long id) { SearchCriteria<HostVO> sc = DcSearch.create(); sc.setParameters("dc", id); sc.setParameters("status", Status.Up); sc.setParameters("type", Host.Type.Routing); sc.setParameters("resourceState", ResourceState.Enabled); return listBy(sc); } @Override public List<HostVO> listByDataCenterIdAndHypervisorType(long zoneId, Hypervisor.HypervisorType hypervisorType) { SearchBuilder<ClusterVO> clusterSearch = _clusterDao.createSearchBuilder(); clusterSearch.and("allocationState", clusterSearch.entity().getAllocationState(), SearchCriteria.Op.EQ); clusterSearch.and("hypervisorType", clusterSearch.entity().getHypervisorType(), SearchCriteria.Op.EQ); SearchBuilder<HostVO> hostSearch = createSearchBuilder(); hostSearch.and("dc", hostSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); hostSearch.and("type", hostSearch.entity().getType(), Op.EQ); hostSearch.and("status", hostSearch.entity().getStatus(), Op.EQ); hostSearch.and("resourceState", hostSearch.entity().getResourceState(), Op.EQ); hostSearch.join("clusterSearch", clusterSearch, hostSearch.entity().getClusterId(), clusterSearch.entity().getId(), JoinBuilder.JoinType.INNER); hostSearch.done(); SearchCriteria<HostVO> sc = hostSearch.create(); sc.setParameters("dc", zoneId); sc.setParameters("type", Host.Type.Routing); sc.setParameters("status", Status.Up); sc.setParameters("resourceState", ResourceState.Enabled); sc.setJoinParameters("clusterSearch", "allocationState", Grouping.AllocationState.Enabled); sc.setJoinParameters("clusterSearch", "hypervisorType", hypervisorType.toString()); return listBy(sc); } @Override public HostVO findByGuid(String guid) { SearchCriteria<HostVO> sc = GuidSearch.create("guid", guid); return findOneBy(sc); } /* * Find hosts which is in Disconnected, Down, Alert and ping timeout and server is not null, set server to null */ private void resetHosts(long managementServerId, long lastPingSecondsAfter) { SearchCriteria<HostVO> sc = HostsForReconnectSearch.create(); sc.setParameters("server", managementServerId); sc.setParameters("lastPinged", lastPingSecondsAfter); sc.setParameters("status", Status.Disconnected, Status.Down, Status.Alert); StringBuilder sb = new StringBuilder(); List<HostVO> hosts = lockRows(sc, null, true); // exclusive lock for (HostVO host : hosts) { host.setManagementServerId(null); update(host.getId(), host); sb.append(host.getId()); sb.append(" "); } if (s_logger.isTraceEnabled()) { s_logger.trace("Following hosts got reset: " + sb.toString()); } } /* * Returns a list of cluster owned by @managementServerId */ private List<Long> findClustersOwnedByManagementServer(long managementServerId) { SearchCriteria<Long> sc = ClustersOwnedByMSSearch.create(); sc.setParameters("server", managementServerId); List<Long> clusters = customSearch(sc, null); return clusters; } /* * Returns clusters based on the list of hosts not owned by any MS */ private List<Long> findClustersForHostsNotOwnedByAnyManagementServer() { SearchCriteria<Long> sc = ClustersForHostsNotOwnedByAnyMSSearch.create(); sc.setJoinParameters("ClusterManagedSearch", "managed", Managed.ManagedState.Managed); List<Long> clusters = customSearch(sc, null); return clusters; } /** * This determines if hosts belonging to cluster(@clusterId) are up for grabs * * This is used for handling following cases: * 1. First host added in cluster * 2. During MS restart all hosts in a cluster are without any MS */ private boolean canOwnCluster(long clusterId) { SearchCriteria<HostVO> sc = HostsInClusterSearch.create(); sc.setParameters("cluster", clusterId); List<HostVO> hosts = search(sc, null); boolean ownCluster = (hosts == null || hosts.size() == 0); return ownCluster; } @Override @DB public List<HostVO> findAndUpdateDirectAgentToLoad(long lastPingSecondsAfter, Long limit, long managementServerId) { TransactionLegacy txn = TransactionLegacy.currentTxn(); if (s_logger.isDebugEnabled()) { s_logger.debug("Resetting hosts suitable for reconnect"); } // reset hosts that are suitable candidates for reconnect resetHosts(managementServerId, lastPingSecondsAfter); if (s_logger.isDebugEnabled()) { s_logger.debug("Completed resetting hosts suitable for reconnect"); } List<HostVO> assignedHosts = new ArrayList<HostVO>(); if (s_logger.isDebugEnabled()) { s_logger.debug("Acquiring hosts for clusters already owned by this management server"); } List<Long> clusters = findClustersOwnedByManagementServer(managementServerId); txn.start(); if (clusters.size() > 0) { // handle clusters already owned by @managementServerId SearchCriteria<HostVO> sc = UnmanagedDirectConnectSearch.create(); sc.setParameters("lastPinged", lastPingSecondsAfter); sc.setJoinParameters("ClusterManagedSearch", "managed", Managed.ManagedState.Managed); sc.setParameters("clusterIn", clusters.toArray()); List<HostVO> unmanagedHosts = lockRows(sc, new Filter(HostVO.class, "clusterId", true, 0L, limit), true); // host belongs to clusters owned by @managementServerId StringBuilder sb = new StringBuilder(); for (HostVO host : unmanagedHosts) { host.setManagementServerId(managementServerId); update(host.getId(), host); assignedHosts.add(host); sb.append(host.getId()); sb.append(" "); } if (s_logger.isTraceEnabled()) { s_logger.trace("Following hosts got acquired for clusters already owned: " + sb.toString()); } } if (s_logger.isDebugEnabled()) { s_logger.debug("Completed acquiring hosts for clusters already owned by this management server"); } if (assignedHosts.size() < limit) { if (s_logger.isDebugEnabled()) { s_logger.debug("Acquiring hosts for clusters not owned by any management server"); } // for remaining hosts not owned by any MS check if they can be owned (by owning full cluster) clusters = findClustersForHostsNotOwnedByAnyManagementServer(); List<Long> updatedClusters = clusters; if (clusters.size() > limit) { updatedClusters = clusters.subList(0, limit.intValue()); } if (updatedClusters.size() > 0) { SearchCriteria<HostVO> sc = UnmanagedDirectConnectSearch.create(); sc.setParameters("lastPinged", lastPingSecondsAfter); sc.setJoinParameters("ClusterManagedSearch", "managed", Managed.ManagedState.Managed); sc.setParameters("clusterIn", updatedClusters.toArray()); List<HostVO> unmanagedHosts = lockRows(sc, null, true); // group hosts based on cluster Map<Long, List<HostVO>> hostMap = new HashMap<Long, List<HostVO>>(); for (HostVO host : unmanagedHosts) { if (hostMap.get(host.getClusterId()) == null) { hostMap.put(host.getClusterId(), new ArrayList<HostVO>()); } hostMap.get(host.getClusterId()).add(host); } StringBuilder sb = new StringBuilder(); for (Long clusterId : hostMap.keySet()) { if (canOwnCluster(clusterId)) { // cluster is not owned by any other MS, so @managementServerId can own it List<HostVO> hostList = hostMap.get(clusterId); for (HostVO host : hostList) { host.setManagementServerId(managementServerId); update(host.getId(), host); assignedHosts.add(host); sb.append(host.getId()); sb.append(" "); } } if (assignedHosts.size() > limit) { break; } } if (s_logger.isTraceEnabled()) { s_logger.trace("Following hosts got acquired from newly owned clusters: " + sb.toString()); } } if (s_logger.isDebugEnabled()) { s_logger.debug("Completed acquiring hosts for clusters not owned by any management server"); } } txn.commit(); return assignedHosts; } @Override @DB public List<HostVO> findAndUpdateApplianceToLoad(long lastPingSecondsAfter, long managementServerId) { TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); SearchCriteria<HostVO> sc = UnmanagedApplianceSearch.create(); sc.setParameters("lastPinged", lastPingSecondsAfter); sc.setParameters("types", Type.ExternalDhcp, Type.ExternalFirewall, Type.ExternalLoadBalancer, Type.BaremetalDhcp, Type.BaremetalPxe, Type.TrafficMonitor, Type.L2Networking, Type.NetScalerControlCenter); List<HostVO> hosts = lockRows(sc, null, true); for (HostVO host : hosts) { host.setManagementServerId(managementServerId); update(host.getId(), host); } txn.commit(); return hosts; } @Override public void markHostsAsDisconnected(long msId, long lastPing) { SearchCriteria<HostVO> sc = MsStatusSearch.create(); sc.setParameters("ms", msId); HostVO host = createForUpdate(); host.setLastPinged(lastPing); host.setDisconnectedOn(new Date()); UpdateBuilder ub = getUpdateBuilder(host); ub.set(host, "status", Status.Disconnected); update(ub, sc, null); sc = MsStatusSearch.create(); sc.setParameters("ms", msId); host = createForUpdate(); host.setManagementServerId(null); host.setLastPinged(lastPing); host.setDisconnectedOn(new Date()); ub = getUpdateBuilder(host); update(ub, sc, null); } @Override public List<HostVO> listByHostTag(Host.Type type, Long clusterId, Long podId, long dcId, String hostTag) { SearchBuilder<HostTagVO> hostTagSearch = _hostTagsDao.createSearchBuilder(); HostTagVO tagEntity = hostTagSearch.entity(); hostTagSearch.and("tag", tagEntity.getTag(), SearchCriteria.Op.EQ); SearchBuilder<HostVO> hostSearch = createSearchBuilder(); HostVO entity = hostSearch.entity(); hostSearch.and("type", entity.getType(), SearchCriteria.Op.EQ); hostSearch.and("pod", entity.getPodId(), SearchCriteria.Op.EQ); hostSearch.and("dc", entity.getDataCenterId(), SearchCriteria.Op.EQ); hostSearch.and("cluster", entity.getClusterId(), SearchCriteria.Op.EQ); hostSearch.and("status", entity.getStatus(), SearchCriteria.Op.EQ); hostSearch.and("resourceState", entity.getResourceState(), SearchCriteria.Op.EQ); hostSearch.join("hostTagSearch", hostTagSearch, entity.getId(), tagEntity.getHostId(), JoinBuilder.JoinType.INNER); SearchCriteria<HostVO> sc = hostSearch.create(); sc.setJoinParameters("hostTagSearch", "tag", hostTag); sc.setParameters("type", type.toString()); if (podId != null) { sc.setParameters("pod", podId); } if (clusterId != null) { sc.setParameters("cluster", clusterId); } sc.setParameters("dc", dcId); sc.setParameters("status", Status.Up.toString()); sc.setParameters("resourceState", ResourceState.Enabled.toString()); return listBy(sc); } @Override public List<HostVO> listAllUpAndEnabledNonHAHosts(Type type, Long clusterId, Long podId, long dcId, String haTag) { SearchBuilder<HostTagVO> hostTagSearch = null; if (haTag != null && !haTag.isEmpty()) { hostTagSearch = _hostTagsDao.createSearchBuilder(); hostTagSearch.and().op("tag", hostTagSearch.entity().getTag(), SearchCriteria.Op.NEQ); hostTagSearch.or("tagNull", hostTagSearch.entity().getTag(), SearchCriteria.Op.NULL); hostTagSearch.cp(); } SearchBuilder<HostVO> hostSearch = createSearchBuilder(); hostSearch.and("type", hostSearch.entity().getType(), SearchCriteria.Op.EQ); hostSearch.and("clusterId", hostSearch.entity().getClusterId(), SearchCriteria.Op.EQ); hostSearch.and("podId", hostSearch.entity().getPodId(), SearchCriteria.Op.EQ); hostSearch.and("zoneId", hostSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); hostSearch.and("status", hostSearch.entity().getStatus(), SearchCriteria.Op.EQ); hostSearch.and("resourceState", hostSearch.entity().getResourceState(), SearchCriteria.Op.EQ); if (haTag != null && !haTag.isEmpty()) { hostSearch.join("hostTagSearch", hostTagSearch, hostSearch.entity().getId(), hostTagSearch.entity().getHostId(), JoinBuilder.JoinType.LEFTOUTER); } SearchCriteria<HostVO> sc = hostSearch.create(); if (haTag != null && !haTag.isEmpty()) { sc.setJoinParameters("hostTagSearch", "tag", haTag); } if (type != null) { sc.setParameters("type", type); } if (clusterId != null) { sc.setParameters("clusterId", clusterId); } if (podId != null) { sc.setParameters("podId", podId); } sc.setParameters("zoneId", dcId); sc.setParameters("status", Status.Up); sc.setParameters("resourceState", ResourceState.Enabled); return listBy(sc); } @Override public void loadDetails(HostVO host) { Map<String, String> details = _detailsDao.findDetails(host.getId()); host.setDetails(details); } @Override public void loadHostTags(HostVO host) { List<String> hostTags = _hostTagsDao.gethostTags(host.getId()); host.setHostTags(hostTags); } @DB @Override public List<HostVO> findLostHosts(long timeout) { List<HostVO> result = new ArrayList<HostVO>(); String sql = "select h.id from host h left join cluster c on h.cluster_id=c.id where h.mgmt_server_id is not null and h.last_ping < ? and h.status in ('Up', 'Updating', 'Disconnected', 'Connecting') and h.type not in ('ExternalFirewall', 'ExternalLoadBalancer', 'TrafficMonitor', 'SecondaryStorage', 'LocalSecondaryStorage', 'L2Networking') and (h.cluster_id is null or c.managed_state = 'Managed') ;"; try (TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = txn.prepareStatement(sql);) { pstmt.setLong(1, timeout); try (ResultSet rs = pstmt.executeQuery();) { while (rs.next()) { long id = rs.getLong(1); //ID column result.add(findById(id)); } } } catch (SQLException e) { s_logger.warn("Exception: ", e); } return result; } @Override public void saveDetails(HostVO host) { Map<String, String> details = host.getDetails(); if (details == null) { return; } _detailsDao.persist(host.getId(), details); } protected void saveHostTags(HostVO host) { List<String> hostTags = host.getHostTags(); if (hostTags == null || (hostTags != null && hostTags.isEmpty())) { return; } _hostTagsDao.persist(host.getId(), hostTags); } protected void saveGpuRecords(HostVO host) { HashMap<String, HashMap<String, VgpuTypesInfo>> groupDetails = host.getGpuGroupDetails(); if (groupDetails != null) { // Create/Update GPU group entries _hostGpuGroupsDao.persist(host.getId(), new ArrayList<String>(groupDetails.keySet())); // Create/Update VGPU types entries _vgpuTypesDao.persist(host.getId(), groupDetails); } } @Override @DB public HostVO persist(HostVO host) { final String InsertSequenceSql = "INSERT INTO op_host(id) VALUES(?)"; TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); HostVO dbHost = super.persist(host); try { PreparedStatement pstmt = txn.prepareAutoCloseStatement(InsertSequenceSql); pstmt.setLong(1, dbHost.getId()); pstmt.executeUpdate(); } catch (SQLException e) { throw new CloudRuntimeException("Unable to persist the sequence number for this host"); } saveDetails(host); loadDetails(dbHost); saveHostTags(host); loadHostTags(dbHost); saveGpuRecords(host); txn.commit(); return dbHost; } @Override @DB public boolean update(Long hostId, HostVO host) { TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); boolean persisted = super.update(hostId, host); if (!persisted) { return persisted; } saveDetails(host); saveHostTags(host); saveGpuRecords(host); txn.commit(); return persisted; } @Override @DB public List<RunningHostCountInfo> getRunningHostCounts(Date cutTime) { String sql = "select * from (" + "select h.data_center_id, h.type, count(*) as count from host as h INNER JOIN mshost as m ON h.mgmt_server_id=m.msid " + "where h.status='Up' and h.type='SecondaryStorage' and m.last_update > ? " + "group by h.data_center_id, h.type " + "UNION ALL " + "select h.data_center_id, h.type, count(*) as count from host as h INNER JOIN mshost as m ON h.mgmt_server_id=m.msid " + "where h.status='Up' and h.type='Routing' and m.last_update > ? " + "group by h.data_center_id, h.type) as t " + "ORDER by t.data_center_id, t.type"; ArrayList<RunningHostCountInfo> l = new ArrayList<RunningHostCountInfo>(); TransactionLegacy txn = TransactionLegacy.currentTxn(); ; PreparedStatement pstmt = null; try { pstmt = txn.prepareAutoCloseStatement(sql); String gmtCutTime = DateUtil.getDateDisplayString(TimeZone.getTimeZone("GMT"), cutTime); pstmt.setString(1, gmtCutTime); pstmt.setString(2, gmtCutTime); ResultSet rs = pstmt.executeQuery(); while (rs.next()) { RunningHostCountInfo info = new RunningHostCountInfo(); info.setDcId(rs.getLong(1)); info.setHostType(rs.getString(2)); info.setCount(rs.getInt(3)); l.add(info); } } catch (SQLException e) { s_logger.debug("SQLException caught", e); } return l; } @Override public long getNextSequence(long hostId) { if (s_logger.isTraceEnabled()) { s_logger.trace("getNextSequence(), hostId: " + hostId); } TableGenerator tg = _tgs.get("host_req_sq"); assert tg != null : "how can this be wrong!"; return s_seqFetcher.getNextSequence(Long.class, tg, hostId); } @Override public boolean updateState(Status oldStatus, Event event, Status newStatus, Host vo, Object data) { // lock target row from beginning to avoid lock-promotion caused deadlock HostVO host = lockRow(vo.getId(), true); if (host == null) { if (event == Event.Remove && newStatus == Status.Removed) { host = findByIdIncludingRemoved(vo.getId()); } } if (host == null) { return false; } long oldPingTime = host.getLastPinged(); SearchBuilder<HostVO> sb = createSearchBuilder(); sb.and("status", sb.entity().getStatus(), SearchCriteria.Op.EQ); sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ); sb.and("update", sb.entity().getUpdated(), SearchCriteria.Op.EQ); if (newStatus.checkManagementServer()) { sb.and("ping", sb.entity().getLastPinged(), SearchCriteria.Op.EQ); sb.and().op("nullmsid", sb.entity().getManagementServerId(), SearchCriteria.Op.NULL); sb.or("msid", sb.entity().getManagementServerId(), SearchCriteria.Op.EQ); sb.cp(); } sb.done(); SearchCriteria<HostVO> sc = sb.create(); sc.setParameters("status", oldStatus); sc.setParameters("id", host.getId()); sc.setParameters("update", host.getUpdated()); long oldUpdateCount = host.getUpdated(); if (newStatus.checkManagementServer()) { sc.setParameters("ping", oldPingTime); sc.setParameters("msid", host.getManagementServerId()); } long newUpdateCount = host.incrUpdated(); UpdateBuilder ub = getUpdateBuilder(host); ub.set(host, _statusAttr, newStatus); if (newStatus.updateManagementServer()) { if (newStatus.lostConnection()) { ub.set(host, _msIdAttr, null); } else { ub.set(host, _msIdAttr, host.getManagementServerId()); } if (event.equals(Event.Ping) || event.equals(Event.AgentConnected)) { ub.set(host, _pingTimeAttr, System.currentTimeMillis() >> 10); } } if (event.equals(Event.ManagementServerDown)) { ub.set(host, _pingTimeAttr, ((System.currentTimeMillis() >> 10) - mgmtServiceConf.getTimeout())); } int result = update(ub, sc, null); assert result <= 1 : "How can this update " + result + " rows? "; if (result == 0) { HostVO ho = findById(host.getId()); assert ho != null : "How how how? : " + host.getId(); if (status_logger.isDebugEnabled()) { StringBuilder str = new StringBuilder("Unable to update host for event:").append(event.toString()); str.append(". Name=").append(host.getName()); str.append("; New=[status=").append(newStatus.toString()).append(":msid=").append(newStatus.lostConnection() ? "null" : host.getManagementServerId()) .append(":lastpinged=").append(host.getLastPinged()).append("]"); str.append("; Old=[status=").append(oldStatus.toString()).append(":msid=").append(host.getManagementServerId()).append(":lastpinged=").append(oldPingTime) .append("]"); str.append("; DB=[status=").append(vo.getStatus().toString()).append(":msid=").append(vo.getManagementServerId()).append(":lastpinged=").append(vo.getLastPinged()) .append(":old update count=").append(oldUpdateCount).append("]"); status_logger.debug(str.toString()); } else { StringBuilder msg = new StringBuilder("Agent status update: ["); msg.append("id = " + host.getId()); msg.append("; name = " + host.getName()); msg.append("; old status = " + oldStatus); msg.append("; event = " + event); msg.append("; new status = " + newStatus); msg.append("; old update count = " + oldUpdateCount); msg.append("; new update count = " + newUpdateCount + "]"); status_logger.debug(msg.toString()); } if (ho.getState() == newStatus) { status_logger.debug("Host " + ho.getName() + " state has already been updated to " + newStatus); return true; } } return result > 0; } @Override public boolean updateResourceState(ResourceState oldState, ResourceState.Event event, ResourceState newState, Host vo) { HostVO host = (HostVO)vo; SearchBuilder<HostVO> sb = createSearchBuilder(); sb.and("resource_state", sb.entity().getResourceState(), SearchCriteria.Op.EQ); sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ); sb.done(); SearchCriteria<HostVO> sc = sb.create(); sc.setParameters("resource_state", oldState); sc.setParameters("id", host.getId()); UpdateBuilder ub = getUpdateBuilder(host); ub.set(host, _resourceStateAttr, newState); int result = update(ub, sc, null); assert result <= 1 : "How can this update " + result + " rows? "; if (state_logger.isDebugEnabled() && result == 0) { HostVO ho = findById(host.getId()); assert ho != null : "How how how? : " + host.getId(); StringBuilder str = new StringBuilder("Unable to update resource state: ["); str.append("m = " + host.getId()); str.append("; name = " + host.getName()); str.append("; old state = " + oldState); str.append("; event = " + event); str.append("; new state = " + newState + "]"); state_logger.debug(str.toString()); } else { StringBuilder msg = new StringBuilder("Resource state update: ["); msg.append("id = " + host.getId()); msg.append("; name = " + host.getName()); msg.append("; old state = " + oldState); msg.append("; event = " + event); msg.append("; new state = " + newState + "]"); state_logger.debug(msg.toString()); } return result > 0; } @Override public HostVO findByTypeNameAndZoneId(long zoneId, String name, Host.Type type) { SearchCriteria<HostVO> sc = TypeNameZoneSearch.create(); sc.setParameters("type", type); sc.setParameters("name", name); sc.setParameters("zoneId", zoneId); return findOneBy(sc); } @Override public List<HostVO> findByDataCenterId(Long zoneId) { SearchCriteria<HostVO> sc = DcSearch.create(); sc.setParameters("dc", zoneId); sc.setParameters("type", Type.Routing); return listBy(sc); } @Override public List<HostVO> findByPodId(Long podId) { SearchCriteria<HostVO> sc = PodSearch.create(); sc.setParameters("podId", podId); return listBy(sc); } @Override public List<HostVO> findByClusterId(Long clusterId) { SearchCriteria<HostVO> sc = ClusterSearch.create(); sc.setParameters("clusterId", clusterId); return listBy(sc); } @Override public HostVO findByPublicIp(String publicIp) { SearchCriteria<HostVO> sc = PublicIpAddressSearch.create(); sc.setParameters("publicIpAddress", publicIp); return findOneBy(sc); } @Override public HostVO findByIp(final String ipAddress) { SearchCriteria<HostVO> sc = AnyIpAddressSearch.create(); sc.setParameters("publicIpAddress", ipAddress); sc.setParameters("privateIpAddress", ipAddress); return findOneBy(sc); } @Override public List<HostVO> findHypervisorHostInCluster(long clusterId) { SearchCriteria<HostVO> sc = TypeClusterStatusSearch.create(); sc.setParameters("type", Host.Type.Routing); sc.setParameters("cluster", clusterId); sc.setParameters("status", Status.Up); sc.setParameters("resourceState", ResourceState.Enabled); return listBy(sc); } @Override public List<Long> listAllHosts(long zoneId) { SearchCriteria<Long> sc = HostIdSearch.create(); sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId); return customSearch(sc, null); } @Override public List<Long> listClustersByHostTag(String hostTagOnOffering) { TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = null; List<Long> result = new ArrayList<Long>(); StringBuilder sql = new StringBuilder(LIST_CLUSTERID_FOR_HOST_TAG); // during listing the clusters that cross the threshold // we need to check with disabled thresholds of each cluster if not defined at cluster consider the global value try { pstmt = txn.prepareAutoCloseStatement(sql.toString()); pstmt.setString(1, hostTagOnOffering); ResultSet rs = pstmt.executeQuery(); while (rs.next()) { result.add(rs.getLong(1)); } return result; } catch (SQLException e) { throw new CloudRuntimeException("DB Exception on: " + sql, e); } catch (Throwable e) { throw new CloudRuntimeException("Caught: " + sql, e); } } @Override public List<HostVO> listAllHostsByType(Host.Type type) { SearchCriteria<HostVO> sc = TypeSearch.create(); sc.setParameters("type", type); sc.setParameters("resourceState", ResourceState.Enabled); return listBy(sc); } @Override public List<HostVO> listByType(Host.Type type) { SearchCriteria<HostVO> sc = TypeSearch.create(); sc.setParameters("type", type); return listBy(sc); } String sqlFindHostInZoneToExecuteCommand = "Select id from host " + " where type = 'Routing' and hypervisor_type = ? and data_center_id = ? and status = 'Up' " + " and resource_state = '%s' " + " ORDER by rand() limit 1"; @Override public HostVO findHostInZoneToExecuteCommand(long zoneId, HypervisorType hypervisorType) { try (TransactionLegacy tx = TransactionLegacy.currentTxn()) { String sql = createSqlFindHostToExecuteCommand(false); ResultSet rs = executeSqlGetResultsetForMethodFindHostInZoneToExecuteCommand(hypervisorType, zoneId, tx, sql); if (rs.next()) { return findById(rs.getLong("id")); } sql = createSqlFindHostToExecuteCommand(true); rs = executeSqlGetResultsetForMethodFindHostInZoneToExecuteCommand(hypervisorType, zoneId, tx, sql); if (!rs.next()) { throw new CloudRuntimeException(String.format("Could not find a host in zone [zoneId=%d] to operate on. ", zoneId)); } return findById(rs.getLong("id")); } catch (SQLException e) { throw new CloudRuntimeException(e); } } private ResultSet executeSqlGetResultsetForMethodFindHostInZoneToExecuteCommand(HypervisorType hypervisorType, long zoneId, TransactionLegacy tx, String sql) throws SQLException { PreparedStatement pstmt = tx.prepareAutoCloseStatement(sql); pstmt.setString(1, Objects.toString(hypervisorType)); pstmt.setLong(2, zoneId); return pstmt.executeQuery(); } private String createSqlFindHostToExecuteCommand(boolean useDisabledHosts) { String hostResourceStatus = "Enabled"; if (useDisabledHosts) { hostResourceStatus = "Disabled"; } return String.format(sqlFindHostInZoneToExecuteCommand, hostResourceStatus); } }
//================================================================================ //Copyright (c) 2012, David Yu //All rights reserved. //-------------------------------------------------------------------------------- // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // 3. Neither the name of protostuff nor the names of its contributors may be used // to endorse or promote products derived from this software without // specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. //================================================================================ package io.protostuff.runtime; import static io.protostuff.runtime.RuntimeFieldFactory.ID_COLLECTION; import static io.protostuff.runtime.RuntimeFieldFactory.ID_ENUM; import static io.protostuff.runtime.RuntimeFieldFactory.ID_ENUM_SET; import static io.protostuff.runtime.RuntimeFieldFactory.STR_COLLECTION; import static io.protostuff.runtime.RuntimeFieldFactory.STR_ENUM; import static io.protostuff.runtime.RuntimeFieldFactory.STR_ENUM_SET; import java.io.IOException; import java.lang.reflect.Field; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import io.protostuff.GraphInput; import io.protostuff.Input; import io.protostuff.Output; import io.protostuff.Pipe; import io.protostuff.ProtostuffException; import io.protostuff.Schema; import io.protostuff.StatefulOutput; import io.protostuff.runtime.IdStrategy.Wrapper; import io.protostuff.runtime.RuntimeEnv.Instantiator; /** * Used when the type is an interface (Collection/List/Set/SortedSet). * * @author David Yu * @created Apr 24, 2012 */ public abstract class PolymorphicCollectionSchema extends PolymorphicSchema { static final int ID_EMPTY_SET = 1, ID_EMPTY_LIST = 2, ID_SINGLETON_SET = 3, ID_SINGLETON_LIST = 4, ID_SET_FROM_MAP = 5, ID_COPIES_LIST = 6, ID_UNMODIFIABLE_COLLECTION = 7, ID_UNMODIFIABLE_SET = 8, ID_UNMODIFIABLE_SORTED_SET = 9, ID_UNMODIFIABLE_LIST = 10, ID_UNMODIFIABLE_RANDOM_ACCESS_LIST = 11, ID_SYNCHRONIZED_COLLECTION = 12, ID_SYNCHRONIZED_SET = 13, ID_SYNCHRONIZED_SORTED_SET = 14, ID_SYNCHRONIZED_LIST = 15, ID_SYNCHRONIZED_RANDOM_ACCESS_LIST = 16, ID_CHECKED_COLLECTION = 17, ID_CHECKED_SET = 18, ID_CHECKED_SORTED_SET = 19, ID_CHECKED_LIST = 20, ID_CHECKED_RANDOM_ACCESS_LIST = 21; static final String STR_EMPTY_SET = "a", STR_EMPTY_LIST = "b", STR_SINGLETON_SET = "c", STR_SINGLETON_LIST = "d", STR_SET_FROM_MAP = "e", STR_COPIES_LIST = "f", STR_UNMODIFIABLE_COLLECTION = "g", STR_UNMODIFIABLE_SET = "h", STR_UNMODIFIABLE_SORTED_SET = "i", STR_UNMODIFIABLE_LIST = "j", STR_UNMODIFIABLE_RANDOM_ACCESS_LIST = "k", STR_SYNCHRONIZED_COLLECTION = "l", STR_SYNCHRONIZED_SET = "m", STR_SYNCHRONIZED_SORTED_SET = "n", STR_SYNCHRONIZED_LIST = "o", STR_SYNCHRONIZED_RANDOM_ACCESS_LIST = "p", STR_CHECKED_COLLECTION = "q", STR_CHECKED_SET = "r", STR_CHECKED_SORTED_SET = "s", STR_CHECKED_LIST = "t", STR_CHECKED_RANDOM_ACCESS_LIST = "u"; static final IdentityHashMap<Class<?>, Integer> __nonPublicCollections = new IdentityHashMap<Class<?>, Integer>(); static final Field fSingletonSet_element, fSingletonList_element, fUnmodifiableCollection_c, fUnmodifiableSortedSet_ss, fUnmodifiableList_list, fSynchronizedCollection_c, fSynchronizedSortedSet_ss, fSynchronizedList_list, fSynchronizedCollection_mutex, fCheckedCollection_c, fCheckedSortedSet_ss, fCheckedList_list, fCheckedCollection_type, fSetFromMap_m, fSetFromMap_s, fCopiesList_n, fCopiesList_element; static final Instantiator<?> iSingletonSet, iSingletonList, iUnmodifiableCollection, iUnmodifiableSet, iUnmodifiableSortedSet, iUnmodifiableList, iUnmodifiableRandomAccessList, iSynchronizedCollection, iSynchronizedSet, iSynchronizedSortedSet, iSynchronizedList, iSynchronizedRandomAccessList, iCheckedCollection, iCheckedSet, iCheckedSortedSet, iCheckedList, iCheckedRandomAccessList, iSetFromMap, iCopiesList; static { map("java.util.Collections$EmptySet", ID_EMPTY_SET); map("java.util.Collections$EmptyList", ID_EMPTY_LIST); Class<?> cSingletonSet = map("java.util.Collections$SingletonSet", ID_SINGLETON_SET); Class<?> cSingletonList = map("java.util.Collections$SingletonList", ID_SINGLETON_LIST); Class<?> cSetFromMap = map("java.util.Collections$SetFromMap", ID_SET_FROM_MAP); Class<?> cCopiesList = map("java.util.Collections$CopiesList", ID_COPIES_LIST); Class<?> cUnmodifiableCollection = map( "java.util.Collections$UnmodifiableCollection", ID_UNMODIFIABLE_COLLECTION); Class<?> cUnmodifiableSet = map( "java.util.Collections$UnmodifiableSet", ID_UNMODIFIABLE_SET); Class<?> cUnmodifiableSortedSet = map( "java.util.Collections$UnmodifiableSortedSet", ID_UNMODIFIABLE_SORTED_SET); Class<?> cUnmodifiableList = map( "java.util.Collections$UnmodifiableList", ID_UNMODIFIABLE_LIST); Class<?> cUnmodifiableRandomAccessList = map( "java.util.Collections$UnmodifiableRandomAccessList", ID_UNMODIFIABLE_RANDOM_ACCESS_LIST); Class<?> cSynchronizedCollection = map( "java.util.Collections$SynchronizedCollection", ID_SYNCHRONIZED_COLLECTION); Class<?> cSynchronizedSet = map( "java.util.Collections$SynchronizedSet", ID_SYNCHRONIZED_SET); Class<?> cSynchronizedSortedSet = map( "java.util.Collections$SynchronizedSortedSet", ID_SYNCHRONIZED_SORTED_SET); Class<?> cSynchronizedList = map( "java.util.Collections$SynchronizedList", ID_SYNCHRONIZED_LIST); Class<?> cSynchronizedRandomAccessList = map( "java.util.Collections$SynchronizedRandomAccessList", ID_SYNCHRONIZED_RANDOM_ACCESS_LIST); Class<?> cCheckedCollection = map( "java.util.Collections$CheckedCollection", ID_CHECKED_COLLECTION); Class<?> cCheckedSet = map("java.util.Collections$CheckedSet", ID_CHECKED_SET); Class<?> cCheckedSortedSet = map( "java.util.Collections$CheckedSortedSet", ID_CHECKED_SORTED_SET); Class<?> cCheckedList = map("java.util.Collections$CheckedList", ID_CHECKED_LIST); Class<?> cCheckedRandomAccessList = map( "java.util.Collections$CheckedRandomAccessList", ID_CHECKED_RANDOM_ACCESS_LIST); try { fSingletonSet_element = cSingletonSet.getDeclaredField("element"); fSingletonList_element = cSingletonList.getDeclaredField("element"); fSetFromMap_m = cSetFromMap.getDeclaredField("m"); fSetFromMap_s = cSetFromMap.getDeclaredField("s"); fCopiesList_n = cCopiesList.getDeclaredField("n"); fCopiesList_element = cCopiesList.getDeclaredField("element"); fUnmodifiableCollection_c = cUnmodifiableCollection .getDeclaredField("c"); fUnmodifiableSortedSet_ss = cUnmodifiableSortedSet .getDeclaredField("ss"); fUnmodifiableList_list = cUnmodifiableList.getDeclaredField("list"); fSynchronizedCollection_c = cSynchronizedCollection .getDeclaredField("c"); fSynchronizedCollection_mutex = cSynchronizedCollection .getDeclaredField("mutex"); fSynchronizedSortedSet_ss = cSynchronizedSortedSet .getDeclaredField("ss"); fSynchronizedList_list = cSynchronizedList.getDeclaredField("list"); fCheckedCollection_c = cCheckedCollection.getDeclaredField("c"); fCheckedCollection_type = cCheckedCollection .getDeclaredField("type"); fCheckedSortedSet_ss = cCheckedSortedSet.getDeclaredField("ss"); fCheckedList_list = cCheckedList.getDeclaredField("list"); } catch (Exception e) { throw new RuntimeException(e); } // field accessors fSingletonSet_element.setAccessible(true); fSingletonList_element.setAccessible(true); fSetFromMap_m.setAccessible(true); fSetFromMap_s.setAccessible(true); fCopiesList_n.setAccessible(true); fCopiesList_element.setAccessible(true); fUnmodifiableCollection_c.setAccessible(true); fUnmodifiableSortedSet_ss.setAccessible(true); fUnmodifiableList_list.setAccessible(true); fSynchronizedCollection_c.setAccessible(true); fSynchronizedCollection_mutex.setAccessible(true); fSynchronizedSortedSet_ss.setAccessible(true); fSynchronizedList_list.setAccessible(true); fCheckedCollection_c.setAccessible(true); fCheckedCollection_type.setAccessible(true); fCheckedSortedSet_ss.setAccessible(true); fCheckedList_list.setAccessible(true); // instantiators iSingletonSet = RuntimeEnv.newInstantiator(cSingletonSet); iSingletonList = RuntimeEnv.newInstantiator(cSingletonList); iSetFromMap = RuntimeEnv.newInstantiator(cSetFromMap); iCopiesList = RuntimeEnv.newInstantiator(cCopiesList); iUnmodifiableCollection = RuntimeEnv .newInstantiator(cUnmodifiableCollection); iUnmodifiableSet = RuntimeEnv.newInstantiator(cUnmodifiableSet); iUnmodifiableSortedSet = RuntimeEnv .newInstantiator(cUnmodifiableSortedSet); iUnmodifiableList = RuntimeEnv.newInstantiator(cUnmodifiableList); iUnmodifiableRandomAccessList = RuntimeEnv .newInstantiator(cUnmodifiableRandomAccessList); iSynchronizedCollection = RuntimeEnv .newInstantiator(cSynchronizedCollection); iSynchronizedSet = RuntimeEnv.newInstantiator(cSynchronizedSet); iSynchronizedSortedSet = RuntimeEnv .newInstantiator(cSynchronizedSortedSet); iSynchronizedList = RuntimeEnv.newInstantiator(cSynchronizedList); iSynchronizedRandomAccessList = RuntimeEnv .newInstantiator(cSynchronizedRandomAccessList); iCheckedCollection = RuntimeEnv.newInstantiator(cCheckedCollection); iCheckedSet = RuntimeEnv.newInstantiator(cCheckedSet); iCheckedSortedSet = RuntimeEnv.newInstantiator(cCheckedSortedSet); iCheckedList = RuntimeEnv.newInstantiator(cCheckedList); iCheckedRandomAccessList = RuntimeEnv .newInstantiator(cCheckedRandomAccessList); } private static Class<?> map(String className, int id) { Class<?> clazz = RuntimeEnv.loadClass(className); __nonPublicCollections.put(clazz, id); return clazz; } static String name(int number) { switch (number) { case ID_EMPTY_SET: return STR_EMPTY_SET; case ID_EMPTY_LIST: return STR_EMPTY_LIST; case ID_SINGLETON_SET: return STR_SINGLETON_SET; case ID_SINGLETON_LIST: return STR_SINGLETON_LIST; case ID_SET_FROM_MAP: return STR_SET_FROM_MAP; case ID_COPIES_LIST: return STR_COPIES_LIST; case ID_UNMODIFIABLE_COLLECTION: return STR_UNMODIFIABLE_COLLECTION; case ID_UNMODIFIABLE_SET: return STR_UNMODIFIABLE_SET; case ID_UNMODIFIABLE_SORTED_SET: return STR_UNMODIFIABLE_SORTED_SET; case ID_UNMODIFIABLE_LIST: return STR_UNMODIFIABLE_LIST; case ID_UNMODIFIABLE_RANDOM_ACCESS_LIST: return STR_UNMODIFIABLE_RANDOM_ACCESS_LIST; case ID_SYNCHRONIZED_COLLECTION: return STR_SYNCHRONIZED_COLLECTION; case ID_SYNCHRONIZED_SET: return STR_SYNCHRONIZED_SET; case ID_SYNCHRONIZED_SORTED_SET: return STR_SYNCHRONIZED_SORTED_SET; case ID_SYNCHRONIZED_LIST: return STR_SYNCHRONIZED_LIST; case ID_SYNCHRONIZED_RANDOM_ACCESS_LIST: return STR_SYNCHRONIZED_RANDOM_ACCESS_LIST; case ID_CHECKED_COLLECTION: return STR_CHECKED_COLLECTION; case ID_CHECKED_SET: return STR_CHECKED_SET; case ID_CHECKED_SORTED_SET: return STR_CHECKED_SORTED_SET; case ID_CHECKED_LIST: return STR_CHECKED_LIST; case ID_CHECKED_RANDOM_ACCESS_LIST: return STR_CHECKED_RANDOM_ACCESS_LIST; case ID_ENUM_SET: return STR_ENUM_SET; case ID_ENUM: return STR_ENUM; case ID_COLLECTION: return STR_COLLECTION; default: return null; } } static int number(String name) { return name.length() != 1 ? 0 : number(name.charAt(0)); } static int number(char c) { switch (c) { case 'a': return 1; case 'b': return 2; case 'c': return 3; case 'd': return 4; case 'e': return 5; case 'f': return 6; case 'g': return 7; case 'h': return 8; case 'i': return 9; case 'j': return 10; case 'k': return 11; case 'l': return 12; case 'm': return 13; case 'n': return 14; case 'o': return 15; case 'p': return 16; case 'q': return 17; case 'r': return 18; case 's': return 19; case 't': return 20; case 'u': return 21; case 'v': return ID_ENUM_SET; case 'x': return ID_ENUM; case 'y': return ID_COLLECTION; default: return 0; } } protected final Pipe.Schema<Object> pipeSchema = new Pipe.Schema<Object>( this) { @Override protected void transfer(Pipe pipe, Input input, Output output) throws IOException { transferObject(this, pipe, input, output, strategy); } }; public PolymorphicCollectionSchema(IdStrategy strategy) { super(strategy); } @Override public Pipe.Schema<Object> getPipeSchema() { return pipeSchema; } @Override public String getFieldName(int number) { return name(number); } @Override public int getFieldNumber(String name) { return number(name); } @Override public String messageFullName() { return Collection.class.getName(); } @Override public String messageName() { return Collection.class.getSimpleName(); } @Override public void mergeFrom(Input input, Object owner) throws IOException { setValue(readObjectFrom(input, this, owner, strategy), owner); } @Override public void writeTo(Output output, Object value) throws IOException { writeObjectTo(output, value, this, strategy); } static int idFrom(Class<?> clazz) { final Integer id = __nonPublicCollections.get(clazz); if (id == null) throw new RuntimeException("Unknown collection: " + clazz); return id.intValue(); } static Object instanceFrom(final int id) { switch (id) { case ID_EMPTY_SET: return Collections.EMPTY_SET; case ID_EMPTY_LIST: return Collections.EMPTY_LIST; case ID_SINGLETON_SET: return iSingletonSet.newInstance(); case ID_SINGLETON_LIST: return iSingletonList.newInstance(); case ID_SET_FROM_MAP: return iSetFromMap.newInstance(); case ID_COPIES_LIST: return iCopiesList.newInstance(); case ID_UNMODIFIABLE_COLLECTION: return iUnmodifiableCollection.newInstance(); case ID_UNMODIFIABLE_SET: return iUnmodifiableSet.newInstance(); case ID_UNMODIFIABLE_SORTED_SET: return iUnmodifiableSortedSet.newInstance(); case ID_UNMODIFIABLE_LIST: return iUnmodifiableList.newInstance(); case ID_UNMODIFIABLE_RANDOM_ACCESS_LIST: return iUnmodifiableRandomAccessList.newInstance(); case ID_SYNCHRONIZED_COLLECTION: return iSynchronizedCollection.newInstance(); case ID_SYNCHRONIZED_SET: return iSynchronizedSet.newInstance(); case ID_SYNCHRONIZED_SORTED_SET: return iSynchronizedSortedSet.newInstance(); case ID_SYNCHRONIZED_LIST: return iSynchronizedList.newInstance(); case ID_SYNCHRONIZED_RANDOM_ACCESS_LIST: return iSynchronizedRandomAccessList.newInstance(); case ID_CHECKED_COLLECTION: return iCheckedCollection.newInstance(); case ID_CHECKED_SET: return iCheckedSet.newInstance(); case ID_CHECKED_SORTED_SET: return iCheckedSortedSet.newInstance(); case ID_CHECKED_LIST: return iCheckedList.newInstance(); case ID_CHECKED_RANDOM_ACCESS_LIST: return iCheckedRandomAccessList.newInstance(); default: throw new RuntimeException("Unknown id: " + id); } } @SuppressWarnings("unchecked") static void writeObjectTo(Output output, Object value, Schema<?> currentSchema, IdStrategy strategy) throws IOException { if (Collections.class == value.getClass().getDeclaringClass()) { writeNonPublicCollectionTo(output, value, currentSchema, strategy); return; } if (EnumSet.class.isAssignableFrom(value.getClass())) { strategy.writeEnumIdTo(output, ID_ENUM_SET, EnumIO.getElementTypeFromEnumSet(value)); // TODO optimize } else { strategy.writeCollectionIdTo(output, ID_COLLECTION, value.getClass()); } if (output instanceof StatefulOutput) { // update using the derived schema. ((StatefulOutput) output).updateLast(strategy.COLLECTION_SCHEMA, currentSchema); } strategy.COLLECTION_SCHEMA.writeTo(output, (Collection<Object>) value); } static void writeNonPublicCollectionTo(Output output, Object value, Schema<?> currentSchema, IdStrategy strategy) throws IOException { final Integer num = __nonPublicCollections.get(value.getClass()); if (num == null) throw new RuntimeException("Unknown collection: " + value.getClass()); final int id = num.intValue(); switch (id) { case ID_EMPTY_SET: output.writeUInt32(id, 0, false); break; case ID_EMPTY_LIST: output.writeUInt32(id, 0, false); break; case ID_SINGLETON_SET: { output.writeUInt32(id, 0, false); final Object element; try { element = fSingletonSet_element.get(value); } catch (Exception e) { throw new RuntimeException(e); } if (element != null) output.writeObject(1, element, strategy.OBJECT_SCHEMA, false); break; } case ID_SINGLETON_LIST: { output.writeUInt32(id, 0, false); // faster path (reflection not needed to get the single element). final Object element = ((List<?>) value).get(0); if (element != null) output.writeObject(1, element, strategy.OBJECT_SCHEMA, false); break; } case ID_SET_FROM_MAP: { final Object m; try { m = fSetFromMap_m.get(value); } catch (Exception e) { throw new RuntimeException(e); } output.writeObject(id, m, strategy.POLYMORPHIC_MAP_SCHEMA, false); break; } case ID_COPIES_LIST: { output.writeUInt32(id, 0, false); final int n = ((List<?>) value).size(); final Object element; try { element = fCopiesList_element.get(value); } catch (Exception e) { throw new RuntimeException(e); } output.writeUInt32(1, n, false); if (element != null) output.writeObject(2, element, strategy.OBJECT_SCHEMA, false); break; } case ID_UNMODIFIABLE_COLLECTION: writeUnmodifiableCollectionTo(output, value, currentSchema, strategy, id); break; case ID_UNMODIFIABLE_SET: writeUnmodifiableCollectionTo(output, value, currentSchema, strategy, id); break; case ID_UNMODIFIABLE_SORTED_SET: writeUnmodifiableCollectionTo(output, value, currentSchema, strategy, id); break; case ID_UNMODIFIABLE_LIST: writeUnmodifiableCollectionTo(output, value, currentSchema, strategy, id); break; case ID_UNMODIFIABLE_RANDOM_ACCESS_LIST: writeUnmodifiableCollectionTo(output, value, currentSchema, strategy, id); break; case ID_SYNCHRONIZED_COLLECTION: writeSynchronizedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_SYNCHRONIZED_SET: writeSynchronizedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_SYNCHRONIZED_SORTED_SET: writeSynchronizedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_SYNCHRONIZED_LIST: writeSynchronizedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_SYNCHRONIZED_RANDOM_ACCESS_LIST: writeSynchronizedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_CHECKED_COLLECTION: writeCheckedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_CHECKED_SET: writeCheckedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_CHECKED_SORTED_SET: writeCheckedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_CHECKED_LIST: writeCheckedCollectionTo(output, value, currentSchema, strategy, id); break; case ID_CHECKED_RANDOM_ACCESS_LIST: writeCheckedCollectionTo(output, value, currentSchema, strategy, id); break; default: throw new RuntimeException("Should not happen."); } } private static void writeUnmodifiableCollectionTo(Output output, Object value, Schema<?> currentSchema, IdStrategy strategy, int id) throws IOException { final Object c; try { c = fUnmodifiableCollection_c.get(value); } catch (Exception e) { throw new RuntimeException(e); } output.writeObject(id, c, strategy.POLYMORPHIC_COLLECTION_SCHEMA, false); } private static void writeSynchronizedCollectionTo(Output output, Object value, Schema<?> currentSchema, IdStrategy strategy, int id) throws IOException { final Object c, mutex; try { c = fSynchronizedCollection_c.get(value); mutex = fSynchronizedCollection_mutex.get(value); } catch (Exception e) { throw new RuntimeException(e); } if (mutex != value) { // TODO for future release, introduce an interface(GraphOutput) so // we // can check whether the output can retain references. throw new RuntimeException( "This exception is thrown to fail fast. " + "Synchronized collections with a different mutex would only " + "work if graph format is used, since the reference is retained."); } output.writeObject(id, c, strategy.POLYMORPHIC_COLLECTION_SCHEMA, false); } private static void writeCheckedCollectionTo(Output output, Object value, Schema<?> currentSchema, IdStrategy strategy, int id) throws IOException { final Object c, type; try { c = fCheckedCollection_c.get(value); type = fCheckedCollection_type.get(value); } catch (Exception e) { throw new RuntimeException(e); } output.writeObject(id, c, strategy.POLYMORPHIC_COLLECTION_SCHEMA, false); output.writeObject(1, type, strategy.CLASS_SCHEMA, false); } static Object readObjectFrom(Input input, Schema<?> schema, Object owner, IdStrategy strategy) throws IOException { return readObjectFrom(input, schema, owner, strategy, input.readFieldNumber(schema)); } @SuppressWarnings("unchecked") static Object readObjectFrom(Input input, Schema<?> schema, Object owner, IdStrategy strategy, final int number) throws IOException { final boolean graph = input instanceof GraphInput; Object ret = null; switch (number) { case ID_EMPTY_SET: if (0 != input.readUInt32()) throw new ProtostuffException("Corrupt input."); if (graph) { // update the actual reference. ((GraphInput) input).updateLast(Collections.EMPTY_SET, owner); } ret = Collections.EMPTY_SET; break; case ID_EMPTY_LIST: if (0 != input.readUInt32()) throw new ProtostuffException("Corrupt input."); if (graph) { // update the actual reference. ((GraphInput) input).updateLast(Collections.EMPTY_LIST, owner); } ret = Collections.EMPTY_LIST; break; case ID_SINGLETON_SET: { if (0 != input.readUInt32()) throw new ProtostuffException("Corrupt input."); final Object collection = iSingletonSet.newInstance(); if (graph) { // update the actual reference. ((GraphInput) input).updateLast(collection, owner); } final int next = input.readFieldNumber(schema); if (next == 0) { // null element return collection; } if (next != 1) throw new ProtostuffException("Corrupt input"); final Wrapper wrapper = new Wrapper(); Object element = input.mergeObject(wrapper, strategy.OBJECT_SCHEMA); if (!graph || !((GraphInput) input).isCurrentMessageReference()) element = wrapper.value; try { fSingletonSet_element.set(collection, element); } catch (Exception e) { throw new RuntimeException(e); } ret = collection; break; } case ID_SINGLETON_LIST: { if (0 != input.readUInt32()) throw new ProtostuffException("Corrupt input."); final Object collection = iSingletonList.newInstance(); if (graph) { // update the actual reference. ((GraphInput) input).updateLast(collection, owner); } final int next = input.readFieldNumber(schema); if (next == 0) { // null element return collection; } if (next != 1) throw new ProtostuffException("Corrupt input."); final Wrapper wrapper = new Wrapper(); Object element = input.mergeObject(wrapper, strategy.OBJECT_SCHEMA); if (!graph || !((GraphInput) input).isCurrentMessageReference()) element = wrapper.value; try { fSingletonList_element.set(collection, element); } catch (Exception e) { throw new RuntimeException(e); } ret = collection; break; } case ID_SET_FROM_MAP: { final Object collection = iSetFromMap.newInstance(); if (graph) { // update the actual reference. ((GraphInput) input).updateLast(collection, owner); } final Wrapper wrapper = new Wrapper(); Object m = input.mergeObject(wrapper, strategy.POLYMORPHIC_MAP_SCHEMA); if (!graph || !((GraphInput) input).isCurrentMessageReference()) m = wrapper.value; try { fSetFromMap_m.set(collection, m); fSetFromMap_s.set(collection, ((Map<?, ?>) m).keySet()); } catch (Exception e) { throw new RuntimeException(e); } ret = collection; break; } case ID_COPIES_LIST: { if (0 != input.readUInt32()) throw new ProtostuffException("Corrupt input."); final Object collection = iCopiesList.newInstance(); if (graph) { // update the actual reference. ((GraphInput) input).updateLast(collection, owner); } if (1 != input.readFieldNumber(schema)) throw new ProtostuffException("Corrupt input."); final int n = input.readUInt32(), next = input .readFieldNumber(schema); if (next == 0) { // null element try { fCopiesList_n.setInt(collection, n); } catch (Exception e) { throw new RuntimeException(e); } return collection; } if (next != 2) throw new ProtostuffException("Corrupt input."); final Wrapper wrapper = new Wrapper(); Object element = input.mergeObject(wrapper, strategy.OBJECT_SCHEMA); if (!graph || !((GraphInput) input).isCurrentMessageReference()) element = wrapper.value; try { fCopiesList_n.setInt(collection, n); fCopiesList_element.set(collection, element); } catch (Exception e) { throw new RuntimeException(e); } ret = collection; break; } case ID_UNMODIFIABLE_COLLECTION: ret = readUnmodifiableCollectionFrom(input, schema, owner, strategy, graph, iUnmodifiableCollection.newInstance(), false, false); break; case ID_UNMODIFIABLE_SET: ret = readUnmodifiableCollectionFrom(input, schema, owner, strategy, graph, iUnmodifiableSet.newInstance(), false, false); break; case ID_UNMODIFIABLE_SORTED_SET: ret = readUnmodifiableCollectionFrom(input, schema, owner, strategy, graph, iUnmodifiableSortedSet.newInstance(), true, false); break; case ID_UNMODIFIABLE_LIST: ret = readUnmodifiableCollectionFrom(input, schema, owner, strategy, graph, iUnmodifiableList.newInstance(), false, true); break; case ID_UNMODIFIABLE_RANDOM_ACCESS_LIST: ret = readUnmodifiableCollectionFrom(input, schema, owner, strategy, graph, iUnmodifiableRandomAccessList.newInstance(), false, true); break; case ID_SYNCHRONIZED_COLLECTION: ret = readSynchronizedCollectionFrom(input, schema, owner, strategy, graph, iSynchronizedCollection.newInstance(), false, false); break; case ID_SYNCHRONIZED_SET: ret = readSynchronizedCollectionFrom(input, schema, owner, strategy, graph, iSynchronizedSet.newInstance(), false, false); break; case ID_SYNCHRONIZED_SORTED_SET: ret = readSynchronizedCollectionFrom(input, schema, owner, strategy, graph, iSynchronizedSortedSet.newInstance(), true, false); break; case ID_SYNCHRONIZED_LIST: ret = readSynchronizedCollectionFrom(input, schema, owner, strategy, graph, iSynchronizedList.newInstance(), false, true); break; case ID_SYNCHRONIZED_RANDOM_ACCESS_LIST: ret = readSynchronizedCollectionFrom(input, schema, owner, strategy, graph, iSynchronizedRandomAccessList.newInstance(), false, true); break; case ID_CHECKED_COLLECTION: ret = readCheckedCollectionFrom(input, schema, owner, strategy, graph, iCheckedCollection.newInstance(), false, false); break; case ID_CHECKED_SET: ret = readCheckedCollectionFrom(input, schema, owner, strategy, graph, iCheckedSet.newInstance(), false, false); break; case ID_CHECKED_SORTED_SET: ret = readCheckedCollectionFrom(input, schema, owner, strategy, graph, iCheckedSortedSet.newInstance(), true, false); break; case ID_CHECKED_LIST: ret = readCheckedCollectionFrom(input, schema, owner, strategy, graph, iCheckedList.newInstance(), false, true); break; case ID_CHECKED_RANDOM_ACCESS_LIST: ret = readCheckedCollectionFrom(input, schema, owner, strategy, graph, iCheckedRandomAccessList.newInstance(), false, true); break; case ID_ENUM_SET: { final Collection<?> es = strategy.resolveEnumFrom(input) .newEnumSet(); if (graph) { // update the actual reference. ((GraphInput) input).updateLast(es, owner); } // TODO enum schema strategy.COLLECTION_SCHEMA .mergeFrom(input, (Collection<Object>) es); return es; } case ID_COLLECTION: { final Collection<Object> collection = strategy .resolveCollectionFrom(input).newMessage(); if (graph) { // update the actual reference. ((GraphInput) input).updateLast(collection, owner); } strategy.COLLECTION_SCHEMA.mergeFrom(input, collection); return collection; } default: throw new ProtostuffException("Corrupt input."); } if (0 != input.readFieldNumber(schema)) throw new ProtostuffException("Corrupt input."); return ret; } private static Object readUnmodifiableCollectionFrom(Input input, Schema<?> schema, Object owner, IdStrategy strategy, boolean graph, Object collection, boolean ss, boolean list) throws IOException { if (graph) { // update the actual reference. ((GraphInput) input).updateLast(collection, owner); } final Wrapper wrapper = new Wrapper(); Object c = input.mergeObject(wrapper, strategy.POLYMORPHIC_COLLECTION_SCHEMA); if (!graph || !((GraphInput) input).isCurrentMessageReference()) c = wrapper.value; try { fUnmodifiableCollection_c.set(collection, c); if (ss) fUnmodifiableSortedSet_ss.set(collection, c); if (list) fUnmodifiableList_list.set(collection, c); } catch (Exception e) { throw new RuntimeException(e); } return collection; } private static Object readSynchronizedCollectionFrom(Input input, Schema<?> schema, Object owner, IdStrategy strategy, boolean graph, Object collection, boolean ss, boolean list) throws IOException { if (graph) { // update the actual reference. ((GraphInput) input).updateLast(collection, owner); } final Wrapper wrapper = new Wrapper(); Object c = input.mergeObject(wrapper, strategy.POLYMORPHIC_COLLECTION_SCHEMA); if (!graph || !((GraphInput) input).isCurrentMessageReference()) c = wrapper.value; try { fSynchronizedCollection_c.set(collection, c); // mutex is the object itself. fSynchronizedCollection_mutex.set(collection, collection); if (ss) fSynchronizedSortedSet_ss.set(collection, c); if (list) fSynchronizedList_list.set(collection, c); } catch (Exception e) { throw new RuntimeException(e); } return collection; } private static Object readCheckedCollectionFrom(Input input, Schema<?> schema, Object owner, IdStrategy strategy, boolean graph, Object collection, boolean ss, boolean list) throws IOException { if (graph) { // update the actual reference. ((GraphInput) input).updateLast(collection, owner); } final Wrapper wrapper = new Wrapper(); Object c = input.mergeObject(wrapper, strategy.POLYMORPHIC_COLLECTION_SCHEMA); if (!graph || !((GraphInput) input).isCurrentMessageReference()) c = wrapper.value; if (1 != input.readFieldNumber(schema)) throw new ProtostuffException("Corrupt input."); Object type = input.mergeObject(wrapper, strategy.CLASS_SCHEMA); if (!graph || !((GraphInput) input).isCurrentMessageReference()) type = wrapper.value; try { fCheckedCollection_c.set(collection, c); fCheckedCollection_type.set(collection, type); if (ss) fCheckedSortedSet_ss.set(collection, c); if (list) fCheckedList_list.set(collection, c); } catch (Exception e) { throw new RuntimeException(e); } return collection; } static void transferObject(Pipe.Schema<Object> pipeSchema, Pipe pipe, Input input, Output output, IdStrategy strategy) throws IOException { transferObject(pipeSchema, pipe, input, output, strategy, input.readFieldNumber(pipeSchema.wrappedSchema)); } static void transferObject(Pipe.Schema<Object> pipeSchema, Pipe pipe, Input input, Output output, IdStrategy strategy, final int number) throws IOException { switch (number) { case ID_EMPTY_SET: output.writeUInt32(number, input.readUInt32(), false); break; case ID_EMPTY_LIST: output.writeUInt32(number, input.readUInt32(), false); break; case ID_SINGLETON_SET: case ID_SINGLETON_LIST: { output.writeUInt32(number, input.readUInt32(), false); final int next = input.readFieldNumber(pipeSchema.wrappedSchema); if (next == 0) { // null element return; } if (next != 1) throw new ProtostuffException("Corrupt input."); output.writeObject(1, pipe, strategy.OBJECT_PIPE_SCHEMA, false); break; } case ID_SET_FROM_MAP: output.writeObject(number, pipe, strategy.POLYMORPHIC_MAP_PIPE_SCHEMA, false); break; case ID_COPIES_LIST: { output.writeUInt32(number, input.readUInt32(), false); if (1 != input.readFieldNumber(pipeSchema.wrappedSchema)) throw new ProtostuffException("Corrupt input."); // size output.writeUInt32(1, input.readUInt32(), false); final int next = input.readFieldNumber(pipeSchema.wrappedSchema); if (next == 0) { // null element return; } if (next != 2) throw new ProtostuffException("Corrupt input."); output.writeObject(2, pipe, strategy.OBJECT_PIPE_SCHEMA, false); break; } case ID_UNMODIFIABLE_COLLECTION: case ID_UNMODIFIABLE_SET: case ID_UNMODIFIABLE_SORTED_SET: case ID_UNMODIFIABLE_LIST: case ID_UNMODIFIABLE_RANDOM_ACCESS_LIST: output.writeObject(number, pipe, strategy.POLYMORPHIC_COLLECTION_PIPE_SCHEMA, false); break; case ID_SYNCHRONIZED_COLLECTION: case ID_SYNCHRONIZED_SET: case ID_SYNCHRONIZED_SORTED_SET: case ID_SYNCHRONIZED_LIST: case ID_SYNCHRONIZED_RANDOM_ACCESS_LIST: output.writeObject(number, pipe, strategy.POLYMORPHIC_COLLECTION_PIPE_SCHEMA, false); break; case ID_CHECKED_COLLECTION: case ID_CHECKED_SET: case ID_CHECKED_SORTED_SET: case ID_CHECKED_LIST: case ID_CHECKED_RANDOM_ACCESS_LIST: output.writeObject(number, pipe, strategy.POLYMORPHIC_COLLECTION_PIPE_SCHEMA, false); if (1 != input.readFieldNumber(pipeSchema.wrappedSchema)) throw new ProtostuffException("Corrupt input."); output.writeObject(1, pipe, strategy.CLASS_PIPE_SCHEMA, false); break; case ID_ENUM_SET: strategy.transferEnumId(input, output, number); if (output instanceof StatefulOutput) { // update using the derived schema. ((StatefulOutput) output).updateLast( strategy.COLLECTION_PIPE_SCHEMA, pipeSchema); } // TODO use enum schema Pipe.transferDirect(strategy.COLLECTION_PIPE_SCHEMA, pipe, input, output); return; case ID_COLLECTION: strategy.transferCollectionId(input, output, number); if (output instanceof StatefulOutput) { // update using the derived schema. ((StatefulOutput) output).updateLast( strategy.COLLECTION_PIPE_SCHEMA, pipeSchema); } Pipe.transferDirect(strategy.COLLECTION_PIPE_SCHEMA, pipe, input, output); return; default: throw new ProtostuffException("Corrupt input."); } if (0 != input.readFieldNumber(pipeSchema.wrappedSchema)) throw new ProtostuffException("Corrupt input."); } }
/* * #%L * Commons utilities * %% * Copyright (C) 2017 Kiril Arabadzhiyski * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.a9ski.utils; import java.util.Collection; /** * Utility for numbers * * @author Kiril Arabadzhiyski * */ public class NumberUtils extends org.apache.commons.lang3.math.NumberUtils { /** * Returns a default value if the object passed is {@code null}. * * @param value * the value to test, may be {@code null} * @param defaultValue * the default value to return, may be {@code null} * @param <N> * number type * @return value if it is not {@code null}, defaultValue otherwise */ public static <N extends Number> N defaultValue(final N value, final N defaultValue) { if (value == null) { return defaultValue; } return value; } /** * Returns a default value if the object passed is {@code null}. * * @param value * the value to test, may be {@code null} * @param defaultValue * the default value to return * @return value if it is not {@code null}, defaultValue otherwise */ public static long defaultLong(final Number value, final long defaultValue) { if (value == null) { return defaultValue; } return value.longValue(); } /** * Returns a default value if the object passed is {@code null}. * * @param value * the value to test, may be {@code null} * @param defaultValue * the default value to return * @return value if it is not {@code null}, defaultValue otherwise */ public static int defaultInt(final Number value, final int defaultValue) { if (value == null) { return defaultValue; } return value.intValue(); } /** * Returns a default value if the object passed is {@code null}. * * @param value * the value to test, may be {@code null} * @param defaultValue * the default value to return * @return value if it is not {@code null}, defaultValue otherwise */ public static double defaultDouble(final Number value, final double defaultValue) { if (value == null) { return defaultValue; } return value.doubleValue(); } /** * Check if number is null * * @param number * the number to be checked * @return true if the number is null */ public static boolean isNull(final Number number) { return number == null; } /** * Sums two longs. If either of the numbers is null, then the other one is returned. If both are null returns null * * @param a * the first number * @param b * the second number * @return a + b */ public static Long add(final Long a, final Long b) { if (a != null && b != null) { return a + b; } else if (a != null) { return a; } else { return b; } } /** * Sums two longs. If either of the numbers is null, then the other one is returned. If both are null returns null * * @param a * the first number * @param b * the second number * @return a + b */ public static Double add(final Double a, final Double b) { if (a != null && b != null) { return a + b; } else if (a != null) { return a; } else { return b; } } /** * Sums two longs. If either of the numbers is null, then the other one is returned. If both are null returns null * * @param a * the first number * @param b * the second number * @return a + b */ public static Integer add(final Integer a, final Integer b) { if (a != null && b != null) { return a + b; } else if (a != null) { return a; } else { return b; } } /** * Subtract two numbers. if <tt>a</tt> is null the result is -b. If both are null returns null * * @param a * the first number * @param b * the second number * @return a - b */ public static Long subtract(final Long a, final Long b) { if (a != null && b != null) { return a - b; } else if (a != null) { return a; } else if (b != null) { return -b; } else { return null; } } /** * Subtract two numbers. if <tt>a</tt> is null the result is -b. If both are null returns null * * @param a * the first number * @param b * the second number * @return a - b */ public static Double subtract(final Double a, final Double b) { if (a != null && b != null) { return a - b; } else if (a != null) { return a; } else if (b != null) { return -b; } else { return null; } } /** * Subtract two numbers. if <tt>a</tt> is null the result is -b. If both are null returns null * * @param a * the first number * @param b * the second number * @return a - b */ public static Integer subtract(final Integer a, final Integer b) { if (a != null && b != null) { return a - b; } else if (a != null) { return a; } else if (b != null) { return -b; } else { return null; } } /** * Returns the sign of the number. The return value is either -1, 0 or 1 * * @param x * the number * @return One of the values -1, 0, 1 */ public static int sign(final long x) { if (x > 0) { return 1; } else if (x < 0) { return -1; } else { return 0; } } /** * Returns the sign of the number. The return value is either -1, 0 or 1 * * @param x * the number * @return One of the values -1, 0, 1 */ public static int sign(final int x) { if (x > 0) { return 1; } else if (x < 0) { return -1; } else { return 0; } } /** * Returns the sign of the number. The return value is either -1, 0 or 1 * * @param x * the number * @return One of the values -1, 0, 1 */ public static int sign(final double x) { return sign(Double.valueOf(x).compareTo(Double.valueOf(0))); } /** * Returns the max value of two number. If one of them is null, the other one is returned. Returns null if both of them are null. * * @param a * first number * @param b * second number * @param <N> * number type * @return the max value */ public static <N extends Number & Comparable<N>> N max(final N a, final N b) { if (a == null) { return b; } else if (b == null) { return a; } else { return (a.compareTo(b) > 0 ? a : b); } } /** * Returns the min value of two number. If one of them is null, the other one is returned. Returns null if both of them are null. * * @param a * first number * @param b * second number * @param <N> * number type * @return the min value */ public static <N extends Number & Comparable<N>> N min(final N a, final N b) { if (a == null) { return b; } else if (b == null) { return a; } else { return (a.compareTo(b) < 0 ? a : b); } } /** * Calculates the average of collection of numbers * * @param values * the collection * @param <N> * number type * @return the average: (values[0] + values[1] + ... values[N]) / N */ public static <N extends Number> double average(final Collection<N> values) { if (values != null) { //@formatter:off return values.stream() .filter(val -> val != null) .filter(val -> isValid(val.doubleValue())) .mapToDouble(val -> val.doubleValue()) .average() .orElse(0D); //@formatter:on } else { return 0; } } /** * Converts string to {@link Long}. If the value cannot be parsed, returns defaultValue * * @param str * string to be converted * @param defaultValue * the defaultValue * @return {@link Long} parsed value or default value if string cannot be parsed */ public static Long parseLong(final String str, Long defaultValue) { if (str == null) { return defaultValue; } try { return Long.parseLong(str); } catch (final NumberFormatException nfe) { return defaultValue; } } /** * Converts number to {@link Long}. If the value is null, returns null * * @param n * number value * @return {@link Long} value or null */ public static Long toLong(final Number n) { if (n != null) { return n.longValue(); } else { return null; } } /** * Converts double to long. * * @param d * number value * @return long value */ public static long toLongPrimitive(final double d) { return Double.valueOf(d).longValue(); } /** * Converts int to long. * * @param i * number value * @return long value */ public static long toLongPrimitive(final int i) { return i; } /** * Converts number to long. If the number is null returns the <tt>defautlVal</tt> * * @param n * the number * @param defaultVal * the value returned in case <tt>n</tt> is null * @return n converted to long, or <tt>defautlVal</tt> if n is null */ public static long toLongPrimitive(final Number n, final long defaultVal) { if (n != null) { return n.longValue(); } else { return defaultVal; } } /** * Converts string to {@link Long}. If the value cannot be parsed, returns defaultValue * * @param str * string to be converted * @param defaultValue * the defaultValue * @return {@link Long} parsed value or default value if string cannot be parsed */ public static Integer parseInt(final String str, Integer defaultValue) { if (str == null) { return defaultValue; } try { return Integer.parseInt(str); } catch (final NumberFormatException nfe) { return defaultValue; } } /** * Converts number to {@link Integer}. If the value is null, returns null * * @param n * number value * @return {@link Integer} value or null */ public static Integer toInt(final Number n) { if (n != null) { return n.intValue(); } else { return null; } } /** * Converts double to int. * * @param d * number value * @return int value */ public static int toIntPrimitive(final double d) { return Double.valueOf(d).intValue(); } /** * Converts long to int. * * @param l * number value * @return int value */ public static int toIntPrimitive(final long l) { return (int) l; } /** * Converts number to int. If the number is null returns the <tt>defautlVal</tt> * * @param n * the number * @param defaultVal * the value returned in case <tt>n</tt> is null * @return n converted to int, or <tt>defautlVal</tt> if n is null */ public static int toIntPrimitive(final Number n, final int defaultVal) { if (n != null) { return n.intValue(); } else { return defaultVal; } } /** * Converts string to {@link Double}. If the value cannot be parsed, returns defaultValue * * @param str * string to be converted * @param defaultValue * the defaultValue * @return {@link Double} parsed value or default value if string cannot be parsed */ public static Double parseDouble(final String str, Double defaultValue) { if (str == null) { return defaultValue; } try { return Double.parseDouble(str); } catch (final NumberFormatException nfe) { return defaultValue; } } /** * Converts number to {@link Double}. If the value is null, returns null * * @param n * number value * @return {@link Double} value or null */ public static Double toDouble(final Number n) { if (n != null) { return n.doubleValue(); } else { return null; } } /** * Converts long to double. * * @param l * number value * @return double value */ public static double toDoublePrimitive(final long l) { return Long.valueOf(l).doubleValue(); } /** * Converts int to double. * * @param l * number value * @return double value */ public static double toDoublePrimitive(final int l) { return Long.valueOf(l).doubleValue(); } /** * Converts number to double. If the number is null returns the <tt>defautlVal</tt> * * @param n * the number * @param defaultVal * the value returned in case <tt>n</tt> is null * @return n converted to double, or <tt>defautlVal</tt> if n is null */ public static double toDoublePrimitive(final Number n, final double defaultVal) { if (n != null) { return n.doubleValue(); } else { return defaultVal; } } /** * Checks if a double is valid number (not null, not inifinite, not NaN) * * @param d * the double * @return true if d is not null, d is not infinite and d is not NaN */ public static boolean isValid(final Double d) { return d != null && !d.isInfinite() && !d.isNaN(); } /** * Checks if x is in the range of [a,b] (inclusive) * * @param x * the number. If null returns false * @param a * the range start value. * @param b * the range end value. * @param <N> * number type * @return true if x is in the range of [a,b] (inclusive) */ public static <N extends Number & Comparable<N>> boolean isInRange(final N x, N a, N b) { if (x == null) { return false; } else if (a != null && b != null) { if (a.compareTo(b) > 0) { final N t = a; a = b; b = t; } return a.compareTo(x) <= 0 && x.compareTo(b) <= 0; } else { if (a != null && a.compareTo(x) > 0) { return false; } if (b != null && b.compareTo(x) < 0) { return false; } return true; } } /** * Checks if x is not in the range of [a,b] (inclusive) * * @param x * the number. If null returns true * @param a * the range start value. * @param b * the range end value. * @param <N> * number type * @return true if x is not in the range of [a,b] (inclusive) */ public static <N extends Number & Comparable<N>> boolean isNotInRange(final N x, final N a, final N b) { return !isInRange(x, a, b); } /** * Check if two numbers are equals * * @param a * the first number * @param b * the second number * @return true if a is equal to b */ public static boolean equals(final double a, final double b) { // NOSONAR return Double.compare(a, b) == 0; } /** * Check if two numbers are equals * * @param a * the first number * @param b * the second number * @param <N> * number type * @return true if a is equal to b */ public static <N extends Number & Comparable<N>> boolean equals(final N a, final N b) { // NOSONAR return (a != null && b != null && a.compareTo(b) == 0) || (a == null && b == null); } /** * Check if two numbers are not equals * * @param a * the first number * @param b * the second number * @return true if a is not equal to b */ public static boolean notEquals(final double a, final double b) { return !equals(a, b); } /** * Check if two numbers are not equals * * @param a * the first number * @param b * the second number * @param <N> * number type * @return true if a is not equal to b */ public static <N extends Number & Comparable<N>> boolean notEquals(final N a, final N b) { return !equals(a, b); } }
/* * Copyright (c) 2007 BUSINESS OBJECTS SOFTWARE LIMITED * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of Business Objects nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /* * ResourceJarStore.java * Creation date: Nov 23, 2004. * By: Edward Lam */ package org.openquark.cal.services; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Iterator; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import org.openquark.cal.compiler.ModuleName; /** * Warning- this class should only be used by the CAL services implementation. It is not part of the * external API of the CAL platform. * <p> * A read-only resource store based on compressed resources in a jar file. * @author Edward Lam */ public abstract class ResourceJarStore extends AbstractResourcePathStore { /** The manager for the jar file on which this store is based. */ private final JarFileManager jarFileManager; /** * Constructor for a ResourceNullaryStore. * @param jarFileManager the manager for the jar file on which this store is based. * @param resourceType the resource type for the associated resource. * @param pathMapper the path mapper for this store. */ public ResourceJarStore(JarFileManager jarFileManager, String resourceType, ResourcePathMapper pathMapper) { super(resourceType, pathMapper); this.jarFileManager = jarFileManager; } /** * @param resourceName the name of the resource. * @return the JarEntry for the feature, or null if the feature could not be found */ private JarEntry getJarEntry(ResourceName resourceName) { ResourcePath.FilePath resourceFile = getResourcePath(resourceName); return jarFileManager.getJarFile().getJarEntry(resourceFile.getPathStringMinusSlash()); } /** * {@inheritDoc} */ public List<ResourceName> getFolderResourceNames(ResourcePath.Folder folder) { return ModulePackager.getFolderResourceNames(jarFileManager, folder, getPathMapper()); } /** * {@inheritDoc} */ public List<ResourceName> getFilteredFolderResourceNames(ResourcePath.Folder folder, ResourceName.Filter filter) { return ModulePackager.getFilteredFolderResourceNames(jarFileManager, folder, getPathMapper(), filter); } /** * {@inheritDoc} */ public void removeResource(ResourceName resourceName, Status removeStatus) { removeStatus.add(new Status(Status.Severity.ERROR, "Jar Store not modifiable.", null)); } /** * {@inheritDoc} */ public void removeAllResources(Status removeStatus) { removeStatus.add(new Status(Status.Severity.ERROR, "Jar Store not modifiable.", null)); } /** * {@inheritDoc} */ public boolean renameResource(ResourceName oldResourceName, ResourceName newResourceName, ResourceStore newResourceStore, Status renameStatus) { renameStatus.add(new Status(Status.Severity.ERROR, "Jar Store not modifiable.", null)); return false; } /** * {@inheritDoc} */ public boolean hasFeature(ResourceName resourceName) { return getJarEntry(resourceName) != null; } /** * {@inheritDoc} */ public InputStream getInputStream(ResourceName resourceName) { JarEntry jarEntry = getJarEntry(resourceName); if (jarEntry != null) { try { return jarFileManager.getJarFile().getInputStream(jarEntry); } catch (IOException e) { } } return null; } /** * {@inheritDoc} */ public OutputStream getOutputStream(ResourceName resourceName, Status saveStatus) { saveStatus.add(new Status(Status.Severity.ERROR, "Jar Store not modifiable.", null)); return null; } /** * {@inheritDoc} */ public String getDebugInfo(ResourceName resourceName) { JarEntry jarEntry = getJarEntry(resourceName); if (jarEntry != null) { String jarFileName = jarFileManager.getJarFile().getName(); String type; if (jarFileName.endsWith(CarBuilder.DOT_CAR_DOT_JAR)) { type = "Car-jar"; } else if (jarFileName.endsWith("." + CarPathMapper.INSTANCE.getFileExtension())) { type = "Car"; } else { type = "jar"; } return "from " + type + ": " + jarFileName + ", entry: " + jarEntry; } return null; } /** * {@inheritDoc} */ public long getTimeStamp(ResourceName resourceName) { JarEntry jarEntry = getJarEntry(resourceName); if (jarEntry == null) { return 0L; } long entryTime = jarEntry.getTime(); return (entryTime == -1) ? 0 : entryTime; } /** * {@inheritDoc} */ public boolean isWriteable() { return false; } /** * {@inheritDoc} */ public boolean isWriteable(ResourceName resourceName) { return false; } /** * {@inheritDoc} */ public boolean isRemovable(ResourceName resourceName) { return false; } /** * @return the jar file on which this store is based. */ public JarFile getJarFile() { return jarFileManager.getJarFile(); } /** * @return the jar file manager associated with this store. */ public JarFileManager getJarFileManager() { return jarFileManager; } /** * A ResourceJarStore for module resources. * @author Edward Lam */ public static abstract class Module extends ResourceJarStore implements ResourceStore.Module { /** * Constructor for a ResourceJarStore.Module. * @param jarFileManager * @param resourceType * @param pathMapper */ public Module(JarFileManager jarFileManager, String resourceType, ResourcePathMapper pathMapper) { super(jarFileManager, resourceType, pathMapper); } /** * {@inheritDoc} */ public void removeModuleResources(ModuleName moduleName, Status removeStatus) { removeStatus.add(new Status(Status.Severity.ERROR, "Jar Store not modifiable.", null)); } /** * {@inheritDoc} */ public final Iterator<WorkspaceResource> getResourceIterator() { return ModuleResourceStoreHelper.getResourceIterator(this); } /** * {@inheritDoc} */ public final Iterator<WorkspaceResource> getResourceIterator(ModuleName moduleName) { return ModuleResourceStoreHelper.getResourceIterator(this, moduleName); } } }
package nz.ac.auckland.application; import java.io.File; import java.io.IOException; import java.util.Optional; import org.apache.log4j.Logger; import javafx.application.Application; import javafx.application.Platform; import javafx.event.Event; import javafx.event.EventDispatchChain; import javafx.event.EventDispatcher; import javafx.event.EventHandler; import javafx.fxml.FXMLLoader; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.ButtonBar.ButtonData; import javafx.scene.control.ButtonType; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.BorderPane; import javafx.scene.layout.VBox; import javafx.stage.FileChooser; import javafx.stage.Stage; import javafx.stage.WindowEvent; import nz.ac.auckland.model.ModelHelper; import nz.ac.auckland.model.Project; import nz.ac.auckland.model.VidevoxException; import nz.ac.auckland.view.PlayerViewController; import nz.ac.auckland.view.RootLayoutController; import nz.ac.auckland.view.TTSViewController; /** * * @author Fraser * */ public class VidevoxApplication extends Application { private static final Logger logger = Logger.getLogger(VidevoxApplication.class); /** * The window for the main part of the app to be loaded into */ private Stage _primaryStage; /** * The shell layout with menu bars etc. */ private BorderPane _rootLayout; /** * Controller for the root layout */ private RootLayoutController _controller; private ViewType _viewOnShow = ViewType.PREVIEW; public enum ViewType { PREVIEW, EDIT } /** * */ public void showPlayerView() { try { // Load PlayerView FXMLLoader loader = new FXMLLoader(); loader.setLocation(VidevoxApplication.class.getResource("/nz/ac/auckland/view/PlayerView.fxml")); AnchorPane playerView = (AnchorPane) loader.load(); // Place it inside the root layout _rootLayout.setCenter(playerView); // Give the controller class the references it wants PlayerViewController controller = loader.getController(); controller.setMainApp(this); // Set view toggle buttons _controller.setViewToggle(RootLayoutController.PREVIEW); } catch (IOException e) { // At this point, there is not much use trying to recover at this // point logger.error("showPlayerView()", e); } } private void initRootLayout() { try { // Load RootLayout FXMLLoader loader = new FXMLLoader(); loader.setLocation(this.getClass().getClassLoader().getResource("nz/ac/auckland/view/RootLayout.fxml")); _rootLayout = (BorderPane) loader.load(); // Show on stage Scene scene = new Scene(_rootLayout); _primaryStage.setScene(scene); _primaryStage.show(); _primaryStage.setMinHeight(550); _primaryStage.setMinWidth(750); // Set event handler on the window. Do not let it close without // prompting to save if unsaved scene.getWindow().setOnCloseRequest(new EventHandler<WindowEvent>() { public void handle(WindowEvent ev) { if (!Project.getProject().isSaved()) { ev.consume(); saveAndClose(); } } }); // Give controller access to main app _controller = loader.getController(); _controller.setMainApp(this); } catch (IOException e) { logger.error("initRootLayout()", e); } } public void saveAndClose() { if (!Project.getProject().isSaved()) { // Ask to save, exit without saving, or cancel Alert alert = new Alert(AlertType.WARNING); alert.setTitle("Save Changes Before Exit"); alert.setHeaderText("You Have Unsaved Changes"); alert.setContentText("You have unsaved changes, do you want to save them now?"); ButtonType saveButton = new ButtonType("Save"); ButtonType discardButton = new ButtonType("Discard"); ButtonType cancel = new ButtonType("Cancel", ButtonData.CANCEL_CLOSE); alert.getButtonTypes().setAll(saveButton, discardButton, cancel); Optional<ButtonType> result = alert.showAndWait(); if (result.get() == saveButton) { try { save(); } catch (IOException e) { System.exit(1); } Platform.exit(); } else if (result.get() == discardButton) { Platform.exit(); } else { return; } } else { Platform.exit(); } } public static void showExceptionDialog(VidevoxException e) { // Show a generic dialog with the exception message Alert alert = new Alert(AlertType.ERROR); alert.setTitle("ERROR"); alert.setHeaderText("An Error has Occurred"); alert.setContentText(e.getMessage()); alert.showAndWait(); } public void save() throws IOException { Project project = Project.getProject(); if (project.getLocation() != null) { try { project.toFile(project.getLocation()); } catch (VidevoxException e) { showExceptionDialog(e); } } else { saveAs(); } } public void saveAs() { FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Save the project"); // Set extension filter to only see .vvox project files FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("Project file", "*.vvox"); fileChooser.getExtensionFilters().add(extFilter); File file = fileChooser.showSaveDialog(_primaryStage); if (file == null) { return; } file = ModelHelper.enforceFileExtension(file, ".vvox"); try { Project.getProject().toFile(file); } catch (VidevoxException e) { VidevoxApplication.showExceptionDialog(e); } catch (IOException e) { e.printStackTrace(); } } public Stage getStage() { return _primaryStage; } @Override public void start(Stage primaryStage) { // Set the primaryStage as the window for the application this._primaryStage = primaryStage; // Set a title to appear on the window this._primaryStage.setTitle("VIDEVOX - video editor"); // Initiate the root layout of the application initRootLayout(); // Set/reset the views reset(); } public static void main(String[] args) { launch(args); } /** * Resets the entire GUI from the root layout down, mostly for after a new * GUI is loaded */ public void reset() { // Decide which view to show switch (_viewOnShow) { case PREVIEW: showPlayerView(); break; default: showPlayerView(); break; } } public void showTTS() { try { logger.trace("entered showTTS"); FXMLLoader loader = new FXMLLoader(); loader.setLocation(this.getClass().getClassLoader().getResource("nz/ac/auckland/view/TTSView.fxml")); logger.debug( "location: " + this.getClass().getClassLoader().getResource("nz/ac/auckland/view/TTSView.fxml")); VBox ttsView = (VBox) loader.load(); logger.trace("Loaded ttsView from fxml"); Stage stage = new Stage(); stage.setTitle("VIDEVOX Text-to-Speech"); stage.setScene(new Scene(ttsView)); // Keep a pointer to the Primary Stage's Event Dispatcher for later EventDispatcher ev = _primaryStage.getEventDispatcher(); // Put in a new Event Dispatcher while the TTS view is open _primaryStage.setEventDispatcher(new EventDispatcher() { @Override public Event dispatchEvent(Event event, EventDispatchChain tail) { stage.requestFocus(); return null; } }); logger.trace("Showing ttsView"); TTSViewController controller = loader.getController(); controller.setMainApp(this); stage.showAndWait(); // Put the Event Dispatcher back and reset the app in case TTS was // added _primaryStage.setEventDispatcher(ev); reset(); } catch (IOException e) { logger.debug("error: " + e.getMessage()); e.printStackTrace(); VidevoxApplication.showExceptionDialog(new VidevoxException(e.getMessage())); } } public void addAudio() { FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Select an Audio file to use"); // Set visible extensions FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("Audio Files", "*.mp3"); fileChooser.getExtensionFilters().add(extFilter); File file = fileChooser.showOpenDialog(_primaryStage); if (file != null) { VidevoxPlayer.getPlayer().addAudio(file); reset(); } } public void export() { FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Export the project to an mp4"); // Set extension filter to only see .vvox project files FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("MPEG-4", "*.mp4"); fileChooser.getExtensionFilters().add(extFilter); File file = fileChooser.showSaveDialog(_primaryStage); if (file == null) { return; } file = ModelHelper.enforceFileExtension(file, ".mp4"); Thread th = new Thread(new VideoCompiler(file)); th.setDaemon(true); th.start(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.core.metamodel.specloader.specimpl; import java.util.List; import java.util.Objects; import org.apache.isis.applib.Identifier; import org.apache.isis.applib.annotation.When; import org.apache.isis.applib.annotation.Where; import org.apache.isis.applib.filter.Filter; import org.apache.isis.applib.services.bookmark.Bookmark; import org.apache.isis.applib.services.command.Command; import org.apache.isis.applib.services.command.CommandContext; import org.apache.isis.core.commons.lang.StringExtensions; import org.apache.isis.core.metamodel.adapter.ObjectAdapter; import org.apache.isis.core.metamodel.consent.Consent; import org.apache.isis.core.metamodel.consent.InteractionInitiatedBy; import org.apache.isis.core.metamodel.consent.InteractionResult; import org.apache.isis.core.metamodel.facetapi.Facet; import org.apache.isis.core.metamodel.facetapi.FacetHolder; import org.apache.isis.core.metamodel.facetapi.FeatureType; import org.apache.isis.core.metamodel.facetapi.MultiTypedFacet; import org.apache.isis.core.metamodel.facets.FacetedMethod; import org.apache.isis.core.metamodel.facets.actions.action.invocation.CommandUtil; import org.apache.isis.core.metamodel.facets.actions.command.CommandFacet; import org.apache.isis.core.metamodel.facets.all.describedas.DescribedAsFacet; import org.apache.isis.core.metamodel.facets.all.help.HelpFacet; import org.apache.isis.core.metamodel.facets.all.hide.HiddenFacet; import org.apache.isis.core.metamodel.facets.all.named.NamedFacet; import org.apache.isis.core.metamodel.facets.object.mixin.MixinFacet; import org.apache.isis.core.metamodel.interactions.AccessContext; import org.apache.isis.core.metamodel.interactions.DisablingInteractionAdvisor; import org.apache.isis.core.metamodel.interactions.HidingInteractionAdvisor; import org.apache.isis.core.metamodel.interactions.InteractionContext; import org.apache.isis.core.metamodel.interactions.InteractionUtils; import org.apache.isis.core.metamodel.interactions.UsabilityContext; import org.apache.isis.core.metamodel.interactions.VisibilityContext; import org.apache.isis.core.metamodel.services.ServicesInjector; import org.apache.isis.core.metamodel.services.command.CommandDtoServiceInternal; import org.apache.isis.core.metamodel.services.persistsession.PersistenceSessionServiceInternal; import org.apache.isis.core.metamodel.spec.ObjectSpecification; import org.apache.isis.core.metamodel.spec.feature.ObjectAction; import org.apache.isis.core.metamodel.spec.feature.ObjectMember; import org.apache.isis.core.metamodel.specloader.SpecificationLoader; import org.apache.isis.schema.cmd.v1.CommandDto; import org.apache.isis.schema.utils.CommandDtoUtils; public abstract class ObjectMemberAbstract implements ObjectMember { public static ObjectSpecification getSpecification(final SpecificationLoader specificationLookup, final Class<?> type) { return type == null ? null : specificationLookup.loadSpecification(type); } //region > fields private final String id; private final FacetedMethod facetedMethod; private final FeatureType featureType; private final SpecificationLoader specificationLoader; private final ServicesInjector servicesInjector; private final PersistenceSessionServiceInternal persistenceSessionServiceInternal; //endregion protected ObjectMemberAbstract( final FacetedMethod facetedMethod, final FeatureType featureType, final ServicesInjector servicesInjector) { final String id = facetedMethod.getIdentifier().getMemberName(); if (id == null) { throw new IllegalArgumentException("Id must always be set"); } this.facetedMethod = facetedMethod; this.featureType = featureType; this.id = id; this.servicesInjector = servicesInjector; this.specificationLoader = servicesInjector.getSpecificationLoader(); this.persistenceSessionServiceInternal = servicesInjector.getPersistenceSessionServiceInternal(); } //region > Identifiers @Override public String getId() { return id; } @Override public Identifier getIdentifier() { return getFacetedMethod().getIdentifier(); } @Override public FeatureType getFeatureType() { return featureType; } //endregion //region > Facets public FacetedMethod getFacetedMethod() { return facetedMethod; } protected FacetHolder getFacetHolder() { return getFacetedMethod(); } @Override public boolean containsFacet(final Class<? extends Facet> facetType) { return getFacetHolder().containsFacet(facetType); } @Override public boolean containsDoOpFacet(final Class<? extends Facet> facetType) { return getFacetHolder().containsDoOpFacet(facetType); } @Override public <T extends Facet> T getFacet(final Class<T> cls) { return getFacetHolder().getFacet(cls); } @Override public Class<? extends Facet>[] getFacetTypes() { return getFacetHolder().getFacetTypes(); } @Override public List<Facet> getFacets(final Filter<Facet> filter) { return getFacetHolder().getFacets(filter); } @Override public void addFacet(final Facet facet) { getFacetHolder().addFacet(facet); } @Override public void addFacet(final MultiTypedFacet facet) { getFacetHolder().addFacet(facet); } @Override public void removeFacet(final Facet facet) { getFacetHolder().removeFacet(facet); } @Override public void removeFacet(final Class<? extends Facet> facetType) { getFacetHolder().removeFacet(facetType); } //endregion //region > Name, Description, Help (convenience for facets) /** * Return the default label for this member. This is based on the name of * this member. * * @see #getId() */ @Override public String getName() { final NamedFacet facet = getFacet(NamedFacet.class); final String name = facet.value(); if (name != null) { return name; } else { // this should now be redundant, see NamedFacetDefault return StringExtensions.asNaturalName2(getId()); } } @Override public String getDescription() { final DescribedAsFacet facet = getFacet(DescribedAsFacet.class); return facet.value(); } @Override public String getHelp() { final HelpFacet facet = getFacet(HelpFacet.class); return facet.value(); } //endregion //region > Hidden (or visible) /** * Create an {@link InteractionContext} to represent an attempt to view this * member (that is, to check if it is visible or not). * * <p> * Typically it is easier to just call * {@link ObjectMember#isVisible(ObjectAdapter, InteractionInitiatedBy, Where)}; this is * provided as API for symmetry with interactions (such as * {@link AccessContext} accesses) have no corresponding vetoing methods. */ protected abstract VisibilityContext<?> createVisibleInteractionContext( final ObjectAdapter targetObjectAdapter, final InteractionInitiatedBy interactionInitiatedBy, final Where where); @Override public boolean isAlwaysHidden() { final HiddenFacet facet = getFacet(HiddenFacet.class); return facet != null && !facet.isNoop() && facet.when() == When.ALWAYS && (facet.where() == Where.EVERYWHERE || facet.where() == Where.ANYWHERE) ; } /** * Loops over all {@link HidingInteractionAdvisor} {@link Facet}s and * returns <tt>true</tt> only if none hide the member. */ @Override public Consent isVisible( final ObjectAdapter target, final InteractionInitiatedBy interactionInitiatedBy, final Where where) { return isVisibleResult(target, interactionInitiatedBy, where).createConsent(); } private InteractionResult isVisibleResult( final ObjectAdapter target, final InteractionInitiatedBy interactionInitiatedBy, final Where where) { final VisibilityContext<?> ic = createVisibleInteractionContext(target, interactionInitiatedBy, where); return InteractionUtils.isVisibleResult(this, ic); } //endregion //region > Disabled (or enabled) /** * Create an {@link InteractionContext} to represent an attempt to * use this member (that is, to check if it is usable or not). * * <p> * Typically it is easier to just call * {@link ObjectMember#isUsable(ObjectAdapter, InteractionInitiatedBy, Where)}; this is * provided as API for symmetry with interactions (such as * {@link AccessContext} accesses) have no corresponding vetoing methods. */ protected abstract UsabilityContext<?> createUsableInteractionContext( final ObjectAdapter target, final InteractionInitiatedBy interactionInitiatedBy, final Where where); /** * Loops over all {@link DisablingInteractionAdvisor} {@link Facet}s and * returns <tt>true</tt> only if none disables the member. */ @Override public Consent isUsable( final ObjectAdapter target, final InteractionInitiatedBy interactionInitiatedBy, final Where where) { return isUsableResult(target, interactionInitiatedBy, where).createConsent(); } private InteractionResult isUsableResult( final ObjectAdapter target, final InteractionInitiatedBy interactionInitiatedBy, final Where where) { final UsabilityContext<?> ic = createUsableInteractionContext(target, interactionInitiatedBy, where); return InteractionUtils.isUsableResult(this, ic); } //endregion //region > isAssociation, isAction @Override public boolean isAction() { return featureType.isAction(); } @Override public boolean isPropertyOrCollection() { return featureType.isPropertyOrCollection(); } @Override public boolean isOneToManyAssociation() { return featureType.isCollection(); } @Override public boolean isOneToOneAssociation() { return featureType.isProperty(); } //endregion //region > mixinAdapterFor /** * For mixins */ protected ObjectAdapter mixinAdapterFor( final Class<?> mixinType, final ObjectAdapter mixedInAdapter) { final ObjectSpecification objectSpecification = getSpecificationLoader().loadSpecification(mixinType); final MixinFacet mixinFacet = objectSpecification.getFacet(MixinFacet.class); final Object mixinPojo = mixinFacet.instantiate(mixedInAdapter.getObject()); return getPersistenceSessionService().adapterFor(mixinPojo); } public static String determineNameFrom(final ObjectAction mixinAction) { return StringExtensions.asCapitalizedName(suffix(mixinAction)); } static String determineIdFrom(final ObjectActionDefault mixinAction) { final String id = StringExtensions.asCamelLowerFirst(compress(suffix(mixinAction))); return id; } private static String compress(final String suffix) { return suffix.replaceAll(" ",""); } static String suffix(final ObjectAction mixinAction) { return suffix(mixinAction.getOnType().getSingularName()); } static String suffix(final String singularName) { final String deriveFromUnderscore = derive(singularName, "_"); if(!Objects.equals(singularName, deriveFromUnderscore)) { return deriveFromUnderscore; } final String deriveFromDollar = derive(singularName, "$"); if(!Objects.equals(singularName, deriveFromDollar)) { return deriveFromDollar; } return singularName; } private static String derive(final String singularName, final String separator) { final int indexOfSeparator = singularName.lastIndexOf(separator); return occursNotAtEnd(singularName, indexOfSeparator) ? singularName.substring(indexOfSeparator + 1) : singularName; } private static boolean occursNotAtEnd(final String singularName, final int indexOfUnderscore) { return indexOfUnderscore != -1 && indexOfUnderscore != singularName.length() - 1; } //endregion //region > toString @Override public String toString() { return String.format("id=%s,name='%s'", getId(), getName()); } //endregion //region > Dependencies public SpecificationLoader getSpecificationLoader() { return specificationLoader; } public ServicesInjector getServicesInjector() { return servicesInjector; } public PersistenceSessionServiceInternal getPersistenceSessionService() { return persistenceSessionServiceInternal; } protected <T> T lookupService(final Class<T> serviceClass) { return getServicesInjector().lookupService(serviceClass); } protected CommandContext getCommandContext() { CommandContext commandContext = lookupService(CommandContext.class); if (commandContext == null) { throw new IllegalStateException("The CommandContext service is not registered!"); } return commandContext; } protected CommandDtoServiceInternal getCommandDtoService() { return lookupService(CommandDtoServiceInternal.class); } //endregion //region > command (setup) protected void setupCommandTarget(final ObjectAdapter targetAdapter, final String arguments) { final CommandContext commandContext = getCommandContext(); final Command command = commandContext.getCommand(); if (command.getExecutor() != Command.Executor.USER) { return; } if(command.getTarget() != null) { // is set up by the outer-most action; inner actions (invoked via the WrapperFactory) must not overwrite return; } command.setTargetClass(CommandUtil.targetClassNameFor(targetAdapter)); command.setTargetAction(CommandUtil.targetMemberNameFor(this)); command.setArguments(arguments); final Bookmark targetBookmark = CommandUtil.bookmarkFor(targetAdapter); command.setTarget(targetBookmark); } protected void setupCommandMemberIdentifier() { final CommandContext commandContext = getCommandContext(); final Command command = commandContext.getCommand(); if (command.getExecutor() != Command.Executor.USER) { return; } if (command.getMemberIdentifier() != null) { // any contributed/mixin actions will fire after the main action // the guard here prevents them from trashing the command's memberIdentifier return; } command.setMemberIdentifier(CommandUtil.memberIdentifierFor(this)); } protected void setupCommandDtoAndExecutionContext(final CommandDto dto) { final CommandContext commandContext = getCommandContext(); final Command command = commandContext.getCommand(); if (command.getExecutor() != Command.Executor.USER) { return; } if (command.getMemento() != null) { // guard here to prevent subsequent contributed/mixin actions from // trampling over the command's memento and execution context return; } // memento final String mementoXml = CommandDtoUtils.toXml(dto); command.setMemento(mementoXml); // copy over the command execution 'context' (if available) final CommandFacet commandFacet = getFacetHolder().getFacet(CommandFacet.class); if(commandFacet != null && !commandFacet.isDisabled()) { command.setExecuteIn(commandFacet.executeIn()); command.setPersistence(commandFacet.persistence()); } else { // if no facet, assume do want to execute right now, but only persist (eventually) if hinted. command.setExecuteIn(org.apache.isis.applib.annotation.Command.ExecuteIn.FOREGROUND); command.setPersistence(org.apache.isis.applib.annotation.Command.Persistence.IF_HINTED); } } //endregion }