gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. */ package org.jetbrains.plugins.groovy.runner; import com.intellij.execution.CommonJavaRunConfigurationParameters; import com.intellij.execution.ExecutionException; import com.intellij.execution.Executor; import com.intellij.execution.ExternalizablePath; import com.intellij.execution.configurations.*; import com.intellij.execution.process.OSProcessHandler; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.util.JavaParametersUtil; import com.intellij.execution.util.ScriptFileUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.options.SettingsEditor; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.JavaSdkType; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.SimpleJavaSdkType; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.JDOMExternalizer; import com.intellij.openapi.util.WriteExternalException; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.search.DelegatingGlobalSearchScope; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.refactoring.listeners.RefactoringElementAdapter; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.util.ObjectUtils; import com.intellij.util.PathUtil; import com.intellij.util.SystemProperties; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.hash.LinkedHashMap; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.GroovyBundle; import org.jetbrains.plugins.groovy.lang.psi.GroovyFile; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.psi.util.GroovyRunnerPsiUtil; import org.jetbrains.plugins.groovy.runner.util.CommonProgramRunConfigurationParametersDelegate; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Map; import static com.intellij.execution.util.ProgramParametersUtil.configureConfiguration; /** * @author peter */ public class GroovyScriptRunConfiguration extends ModuleBasedConfiguration<RunConfigurationModule> implements CommonJavaRunConfigurationParameters, RefactoringListenerProvider { private String vmParams; private String workDir; private boolean isDebugEnabled; private boolean isAddClasspathToTheRunner; @Nullable private String scriptParams; @Nullable private String scriptPath; private final Map<String, String> envs = new LinkedHashMap<>(); public boolean passParentEnv = true; private boolean myAlternativeJrePathEnabled; private @Nullable String myAlternativeJrePath; public GroovyScriptRunConfiguration(final String name, final Project project, final ConfigurationFactory factory) { super(name, new RunConfigurationModule(project), factory); workDir = PathUtil.getLocalPath(project.getBaseDir()); } @Nullable public Module getModule() { return ObjectUtils.chooseNotNull(getConfigurationModule().getModule(), ContainerUtil.getFirstItem(getValidModules())); } @Override public Collection<Module> getValidModules() { Module[] modules = ModuleManager.getInstance(getProject()).getModules(); final GroovyScriptRunner scriptRunner = getScriptRunner(); if (scriptRunner == null) { return Arrays.asList(modules); } ArrayList<Module> res = new ArrayList<>(); for (Module module : modules) { if (scriptRunner.isValidModule(module)) { res.add(module); } } return res; } @Nullable private GroovyScriptRunner getScriptRunner() { final VirtualFile scriptFile = ScriptFileUtil.findScriptFileByPath(getScriptPath()); if (scriptFile == null) return null; final PsiFile psiFile = PsiManager.getInstance(getProject()).findFile(scriptFile); if (!(psiFile instanceof GroovyFile)) return null; final GroovyFile groovyFile = (GroovyFile)psiFile; if (groovyFile.isScript()) { return GroovyScriptUtil.getScriptType(groovyFile).getRunner(); } else { return new DefaultGroovyScriptRunner(); } } @Override public void readExternal(@NotNull Element element) { super.readExternal(element); scriptPath = ExternalizablePath.localPathValue(JDOMExternalizer.readString(element, "path")); vmParams = JDOMExternalizer.readString(element, "vmparams"); scriptParams = JDOMExternalizer.readString(element, "params"); final String wrk = JDOMExternalizer.readString(element, "workDir"); if (!".".equals(wrk)) { workDir = ExternalizablePath.localPathValue(wrk); } isDebugEnabled = Boolean.parseBoolean(JDOMExternalizer.readString(element, "debug")); isAddClasspathToTheRunner = Boolean.parseBoolean(JDOMExternalizer.readString(element, "addClasspath")); envs.clear(); JDOMExternalizer.readMap(element, envs, null, "env"); myAlternativeJrePathEnabled = JDOMExternalizer.readBoolean(element, "alternativeJrePathEnabled"); myAlternativeJrePath = JDOMExternalizer.readString(element, "alternativeJrePath"); } @Override public void writeExternal(@NotNull Element element) throws WriteExternalException { super.writeExternal(element); JDOMExternalizer.write(element, "path", ExternalizablePath.urlValue(scriptPath)); JDOMExternalizer.write(element, "vmparams", vmParams); JDOMExternalizer.write(element, "params", scriptParams); JDOMExternalizer.write(element, "workDir", ExternalizablePath.urlValue(workDir)); JDOMExternalizer.write(element, "debug", isDebugEnabled); if (isAddClasspathToTheRunner) JDOMExternalizer.write(element, "addClasspath", true); JDOMExternalizer.writeMap(element, envs, null, "env"); if (myAlternativeJrePathEnabled) { JDOMExternalizer.write(element, "alternativeJrePathEnabled", true); if (StringUtil.isNotEmpty(myAlternativeJrePath)) JDOMExternalizer.write(element, "alternativeJrePath", myAlternativeJrePath); } } @Override public RunProfileState getState(@NotNull Executor executor, @NotNull ExecutionEnvironment environment) { final VirtualFile scriptFile = ScriptFileUtil.findScriptFileByPath(getScriptPath()); if (scriptFile == null) return null; final GroovyScriptRunner scriptRunner = getScriptRunner(); if (scriptRunner == null) return null; return new JavaCommandLineState(environment) { @NotNull @Override protected OSProcessHandler startProcess() throws ExecutionException { final OSProcessHandler handler = super.startProcess(); handler.setShouldDestroyProcessRecursively(true); if (scriptRunner.shouldRefreshAfterFinish()) { handler.addProcessListener(new ProcessAdapter() { @Override public void processTerminated(@NotNull ProcessEvent event) { if (!ApplicationManager.getApplication().isDisposed()) { VirtualFileManager.getInstance().asyncRefresh(null); } } }); } return handler; } @Override protected JavaParameters createJavaParameters() throws ExecutionException { final Module module = getModule(); final boolean tests = ProjectRootManager.getInstance(getProject()).getFileIndex().isInTestSourceContent(scriptFile); String jrePath = isAlternativeJrePathEnabled() ? getAlternativeJrePath() : null; JavaParameters params = new JavaParameters(); params.setUseClasspathJar(true); params.setDefaultCharset(getProject()); params.setJdk( module == null ? JavaParametersUtil.createProjectJdk(getProject(), jrePath) : JavaParametersUtil.createModuleJdk(module, !tests, jrePath) ); configureConfiguration(params, new CommonProgramRunConfigurationParametersDelegate(GroovyScriptRunConfiguration.this) { @Nullable @Override public String getProgramParameters() { return null; } }); scriptRunner.configureCommandLine(params, module, tests, scriptFile, GroovyScriptRunConfiguration.this); return params; } }; } @Override public RefactoringElementListener getRefactoringElementListener(PsiElement element) { if (scriptPath == null || !scriptPath.equals(getPathByElement(element))) { return null; } final PsiClass classToRun = GroovyRunnerPsiUtil.getRunningClass(element); if (element instanceof GroovyFile) { return new RefactoringElementAdapter() { @Override protected void elementRenamedOrMoved(@NotNull PsiElement newElement) { if (newElement instanceof GroovyFile) { GroovyFile file = (GroovyFile)newElement; setScriptPath(ScriptFileUtil.getScriptFilePath(file.getVirtualFile())); } } @Override public void undoElementMovedOrRenamed(@NotNull PsiElement newElement, @NotNull String oldQualifiedName) { elementRenamedOrMoved(newElement); } }; } else if (element instanceof PsiClass && element.getManager().areElementsEquivalent(element, classToRun)) { return new RefactoringElementAdapter() { @Override protected void elementRenamedOrMoved(@NotNull PsiElement newElement) { setName(((PsiClass)newElement).getName()); } @Override public void undoElementMovedOrRenamed(@NotNull PsiElement newElement, @NotNull String oldQualifiedName) { elementRenamedOrMoved(newElement); } }; } return null; } @SuppressWarnings("Duplicates") @Nullable private static String getPathByElement(@NotNull PsiElement element) { PsiFile file = element.getContainingFile(); if (file == null) return null; VirtualFile vfile = file.getVirtualFile(); if (vfile == null) return null; return vfile.getPath(); } public static JavaParameters createJavaParametersWithSdk(@Nullable Module module) { JavaParameters params = new JavaParameters(); params.setCharset(null); if (module != null) { final Sdk sdk = ModuleRootManager.getInstance(module).getSdk(); if (sdk != null && sdk.getSdkType() instanceof JavaSdkType) { params.setJdk(sdk); } } if (params.getJdk() == null) { params.setJdk(new SimpleJavaSdkType().createJdk("tmp", SystemProperties.getJavaHome())); } return params; } @Override @NotNull public SettingsEditor<? extends RunConfiguration> getConfigurationEditor() { return new GroovyRunConfigurationEditor(getProject()); } @Override public void checkConfiguration() throws RuntimeConfigurationException { super.checkConfiguration(); final String scriptPath = getScriptPath(); final VirtualFile script = ScriptFileUtil.findScriptFileByPath(scriptPath); if (script == null) throw new RuntimeConfigurationException("Cannot find script " + scriptPath); final GroovyScriptRunner scriptRunner = getScriptRunner(); if (scriptRunner == null) throw new RuntimeConfigurationException("Unknown script type " + scriptPath); scriptRunner.ensureRunnerConfigured(this); final PsiFile file = PsiManager.getInstance(getProject()).findFile(script); final PsiClass toRun = GroovyRunnerPsiUtil.getRunningClass(file); if (toRun == null) { throw new RuntimeConfigurationWarning(GroovyBundle.message("class.does.not.exist")); } if (toRun instanceof GrTypeDefinition) { if (!GroovyRunnerPsiUtil.canBeRunByGroovy(toRun)) { throw new RuntimeConfigurationWarning(GroovyBundle.message("class.cannot.be.executed")); } } else { throw new RuntimeConfigurationWarning(GroovyBundle.message("script.file.is.not.groovy.file")); } JavaParametersUtil.checkAlternativeJRE(this); } @Override public void setVMParameters(@Nullable String value) { vmParams = value; } @Override public String getVMParameters() { return vmParams; } @Override public boolean isAlternativeJrePathEnabled() { return myAlternativeJrePathEnabled; } @Override public void setAlternativeJrePathEnabled(boolean alternativeJrePathEnabled) { myAlternativeJrePathEnabled = alternativeJrePathEnabled; } @Nullable @Override public String getAlternativeJrePath() { return myAlternativeJrePath; } @Override public void setAlternativeJrePath(@Nullable String alternativeJrePath) { myAlternativeJrePath = alternativeJrePath; } @Override public String getRunClass() { return null; } @Override public String getPackage() { return null; } @Override public void setProgramParameters(@Nullable String value) { scriptParams = value; } @Override public String getProgramParameters() { return scriptParams; } @Override public void setWorkingDirectory(@Nullable String value) { workDir = value; } @Override public String getWorkingDirectory() { return workDir; } @Override public void setEnvs(@NotNull Map<String, String> envs) { this.envs.clear(); this.envs.putAll(envs); } @NotNull @Override public Map<String, String> getEnvs() { return envs; } @Override public void setPassParentEnvs(boolean passParentEnvs) { this.passParentEnv = passParentEnvs; } @Override public boolean isPassParentEnvs() { return passParentEnv; } public boolean isDebugEnabled() { return isDebugEnabled; } public void setDebugEnabled(boolean debugEnabled) { isDebugEnabled = debugEnabled; } public boolean isAddClasspathToTheRunner() { return isAddClasspathToTheRunner; } public void setAddClasspathToTheRunner(boolean addClasspathToTheRunner) { isAddClasspathToTheRunner = addClasspathToTheRunner; } @Nullable public String getScriptPath() { return scriptPath; } public void setScriptPath(@Nullable String scriptPath) { this.scriptPath = scriptPath; } @Override public GlobalSearchScope getSearchScope() { GlobalSearchScope superScope = super.getSearchScope(); String path = getScriptPath(); if (path == null) return superScope; VirtualFile scriptFile = LocalFileSystem.getInstance().findFileByPath(path); if (scriptFile == null) return superScope; GlobalSearchScope fileScope = GlobalSearchScope.fileScope(getProject(), scriptFile); if (superScope == null) return fileScope; return new DelegatingGlobalSearchScope(fileScope.union(superScope)) { @Override public int compare(@NotNull VirtualFile file1, @NotNull VirtualFile file2) { if (file1.equals(scriptFile)) return 1; if (file2.equals(scriptFile)) return -1; return super.compare(file1, file2); } }; } }
package org.soitoolkit.commons.module.logger.impl; import static org.mule.api.config.MuleProperties.MULE_ENDPOINT_PROPERTY; import static org.mule.transport.http.HttpConnector.HTTP_METHOD_PROPERTY; import static org.mule.transport.http.HttpConnector.HTTP_REQUEST_PROPERTY; import static org.soitoolkit.commons.mule.core.PropertyNames.SOITOOLKIT_BUSINESS_CONTEXT_ID; import static org.soitoolkit.commons.mule.core.PropertyNames.SOITOOLKIT_CONTRACT_ID; import static org.soitoolkit.commons.mule.core.PropertyNames.SOITOOLKIT_CORRELATION_ID; import static org.soitoolkit.commons.mule.core.PropertyNames.SOITOOLKIT_INTEGRATION_SCENARIO; import java.io.IOException; import java.io.StringWriter; import java.lang.management.ManagementFactory; import java.net.InetAddress; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.inject.Inject; import javax.inject.Named; import javax.jms.JMSException; import javax.jms.Message; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.namespace.QName; import org.mule.RequestContext; import org.mule.api.MuleContext; import org.mule.api.MuleEvent; import org.mule.api.MuleEventContext; import org.mule.api.MuleMessage; import org.mule.api.config.MuleConfiguration; import org.mule.api.transport.PropertyScope; import org.mule.config.DefaultMuleConfiguration; import org.mule.config.ExceptionHelper; import org.mule.module.xml.stax.ReversibleXMLStreamReader; import org.mule.transport.jms.JmsMessageUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.soitoolkit.commons.logentry.schema.v1.LogEntryType; import org.soitoolkit.commons.logentry.schema.v1.LogEntryType.ExtraInfo; import org.soitoolkit.commons.logentry.schema.v1.LogEvent; import org.soitoolkit.commons.logentry.schema.v1.LogLevelType; import org.soitoolkit.commons.logentry.schema.v1.LogMessageExceptionType; import org.soitoolkit.commons.logentry.schema.v1.LogMessageType; import org.soitoolkit.commons.logentry.schema.v1.LogMetadataInfoType; import org.soitoolkit.commons.logentry.schema.v1.LogRuntimeInfoType; import org.soitoolkit.commons.logentry.schema.v1.LogRuntimeInfoType.BusinessContextId; import org.soitoolkit.commons.module.logger.api.LogEventCreator; import org.soitoolkit.commons.mule.util.MuleUtil; import org.soitoolkit.commons.mule.util.XmlUtil; import org.springframework.context.annotation.Primary; @Named @Primary public class DefaultLogEventCreator implements LogEventCreator { private static final Logger log = LoggerFactory.getLogger(DefaultLogEventCreator.class); private static final String CAUSE_EXCEPTION_HEADER = "CauseException"; private static InetAddress HOST = null; private static String HOST_NAME = "UNKNOWN"; private static String HOST_IP = "UNKNOWN"; private static String PROCESS_ID = "UNKNOWN"; private String serverId = null; // Can't read this one at class initialization because it is not set at that time. Can also be different for different loggers in the same JVM (e.g. multiple wars in one servlet container with shared classes?)) static { try { // Let's give it a try, fail silently... HOST = InetAddress.getLocalHost(); HOST_NAME = HOST.getHostName(); HOST_IP = HOST.getHostAddress(); PROCESS_ID = ManagementFactory.getRuntimeMXBean().getName(); } catch (Throwable ex) { } } private JAXBContext jaxbContext = null; /** * Setter for the jaxbContext property * * @param jaxbContext */ @Inject public void setJaxbContext(JAXBContext jaxbContext) { this.jaxbContext = jaxbContext; } @Override public LogEvent createLogEvent( MuleEvent muleEvent, LogLevelType logLevel, String logMessage, String loggerName, String argIntegrationScenario, // TODO integrationScenario, String argContractId, // TODO contractId, String correlationId, Map<String, String> extraInfo, Throwable exception, Object payload) { // -------------------------- // // 1. Process input variables // // -------------------------- MuleMessage message = muleEvent.getMessage(); // TODO: Will event-context always be null when an error is reported? // If so then its probably better to move this code to the info-logger method. String serviceImplementation = ""; MuleEventContext event = RequestContext.getEventContext(); if (event != null) { serviceImplementation = MuleUtil.getServiceName(event); } String endpoint = getEndpoint(message, event); String messageId = ""; String integrationScenarioId = ""; String contractId = ""; String businessCorrelationId = correlationId; String propertyBusinessContextId = null; if (message != null) { if (log.isDebugEnabled()) { @SuppressWarnings("rawtypes") Set names = message.getPropertyNames(PropertyScope.INBOUND); for (Object object : names) { Object value = message.getInboundProperty(object.toString()); log.debug(object + " = " + value + " (" + object.getClass().getName() + ")"); } } messageId = message.getUniqueId(); contractId = message.getInboundProperty(SOITOOLKIT_CONTRACT_ID, ""); businessCorrelationId = message.getSessionProperty(SOITOOLKIT_CORRELATION_ID, ""); integrationScenarioId = message.getInboundProperty(SOITOOLKIT_INTEGRATION_SCENARIO, ""); propertyBusinessContextId = message.getInboundProperty(SOITOOLKIT_BUSINESS_CONTEXT_ID, null); // Override contract id from the message properties with the supplied one from the log-call, if any if (argContractId != null && argContractId.length() > 0) { contractId = argContractId; } // Override contract id from the message properties with the supplied one from the log-call, if any if (argIntegrationScenario != null && argIntegrationScenario.length() > 0) { integrationScenarioId = argIntegrationScenario; } } String componentId = getServerId(muleEvent); String payloadASstring = getPayloadAsString(payload); // ------------------------- // // 2. Create LogEvent object // // ------------------------- // Setup basic runtime information for the log entry LogRuntimeInfoType lri = new LogRuntimeInfoType(); lri.setTimestamp(XmlUtil.convertDateToXmlDate(null)); lri.setHostName(HOST_NAME); lri.setHostIp(HOST_IP); lri.setProcessId(PROCESS_ID); lri.setThreadId(Thread.currentThread().getName()); lri.setComponentId(componentId); lri.setMessageId(messageId); lri.setBusinessCorrelationId(businessCorrelationId); // // Add any business contexts // if (businessContextId != null) { // Set<Entry<String, String>> entries = businessContextId.entrySet(); // for (Entry<String, String> entry : entries) { // BusinessContextId bxid = new BusinessContextId(); // bxid.setName(entry.getKey()); // bxid.setValue(entry.getValue()); // lri.getBusinessContextId().add(bxid); // } // } // Also add any business contexts from message properties if (propertyBusinessContextId != null) { String[] propertyArr = propertyBusinessContextId.split(","); for (String property : propertyArr) { String[] nameValueArr = property.split("="); String name = nameValueArr[0]; String value = (nameValueArr.length > 1) ? nameValueArr[1] : ""; BusinessContextId bxid = new BusinessContextId(); bxid.setName(name); bxid.setValue(value); lri.getBusinessContextId().add(bxid); } } // Setup basic metadata information for the log entry LogMetadataInfoType lmi = new LogMetadataInfoType(); lmi.setLoggerName(loggerName); lmi.setIntegrationScenarioId(integrationScenarioId); lmi.setContractId(contractId); lmi.setServiceImplementation(serviceImplementation); lmi.setEndpoint(endpoint); // Setup basic information of the log message for the log entry LogMessageType lm = new LogMessageType(); lm.setLevel(logLevel); lm.setMessage(logMessage); // Setup exception information if present if (exception != null) { exception = (DefaultMuleConfiguration.verboseExceptions) ? exception : ExceptionHelper.summarise(exception, 5); LogMessageExceptionType lme = new LogMessageExceptionType(); lme.setExceptionClass(exception.getClass().getName()); lme.setExceptionMessage(exception.getMessage()); StackTraceElement[] stArr = exception.getStackTrace(); List<String> stList = new ArrayList<String>(); for (int i = 0; i < stArr.length; i++) { stList.add(stArr[i].toString()); } if (exception.getCause() != null) { Throwable ce = exception.getCause(); ce = (DefaultMuleConfiguration.verboseExceptions) ? ce : ExceptionHelper.summarise(ce, 5); stList.add(CAUSE_EXCEPTION_HEADER + ": " + ce.getMessage()); StackTraceElement[] ceStArr = ce.getStackTrace(); for (int i = 0; i < ceStArr.length; i++) { stList.add(ceStArr[i].toString()); } } if (!DefaultMuleConfiguration.verboseExceptions) { stList.add("*** set debug level logging or '-Dmule.verbose.exceptions=true' for full stacktrace ***"); } lme.getStackTrace().addAll(stList); // if (exception instanceof MuleException) { // MuleException de = (MuleException)exception; // System.err.println("Cause: " + de.getCause()); // StackTraceElement[] st = de.getCause().getStackTrace(); // for (int i = 0; i < st.length; i++) { //// stList.add(st[i].toString()); // System.err.println(st[i].toString()); // } //// System.err.println("Detailed: " + de.getDetailedMessage()); //// System.err.println("Summary: " + de.getSummaryMessage()); //// System.err.println("Verbose: " + de.getVerboseMessage()); // } lm.setException(lme); } // Create the log entry object LogEntryType logEntry = new LogEntryType(); logEntry.setMetadataInfo(lmi); logEntry.setRuntimeInfo(lri); logEntry.setMessageInfo(lm); logEntry.setPayload(payloadASstring); // Add any extra info if (extraInfo != null) { Set<Entry<String, String>> entries = extraInfo.entrySet(); for (Entry<String, String> entry : entries) { ExtraInfo ei = new ExtraInfo(); ei.setName(entry.getKey()); ei.setValue(entry.getValue()); logEntry.getExtraInfo().add(ei); } } // Create the final log event object LogEvent logEvent = new LogEvent(); logEvent.setLogEntry(logEntry); // We are actually done :-) return logEvent; } /** * Pick up the most relevant endpoint information: * * 1. First from the outbound property MULE_ENDPOINT_PROPERTY if found * 2. Secondly from the inbound property MULE_ENDPOINT_PROPERTY if found * 3. Last try with the mule-event's endpoint-info * * @param message * @param event * @return */ protected String getEndpoint(MuleMessage message, MuleEventContext event) { try { if (message != null) { String outEp = message.getOutboundProperty(MULE_ENDPOINT_PROPERTY); if (outEp != null) { // If http endpoint then try to add the http-method if (outEp.startsWith("http")) { String httpMethod = message.getOutboundProperty(HTTP_METHOD_PROPERTY); if (httpMethod != null) { outEp += " (" + httpMethod + ")"; } } return outEp; } String inEp = message.getInboundProperty(MULE_ENDPOINT_PROPERTY); if (inEp != null) { // If http endpoint then try to add the http-method if (inEp.startsWith("http")) { String httpMethod = message.getInboundProperty(HTTP_METHOD_PROPERTY); if (httpMethod != null) { inEp += " (" + httpMethod + ")"; } } return inEp; } } if (event != null) { URI endpointURI = event.getEndpointURI(); String ep = (endpointURI == null)? "" : endpointURI.toString(); if (ep.startsWith("http")) { String httpMethod = message.getInboundProperty(HTTP_METHOD_PROPERTY); String httpRequest = message.getInboundProperty(HTTP_REQUEST_PROPERTY); if (httpMethod != null) { ep += " (" + httpMethod + " on " + httpRequest + ")"; } } return ep; } // No luck at all this time :-( return ""; } catch (Throwable ex) { // Really bad... return "GET-ENDPOINT ERROR: " + ex.getMessage(); } } private String getServerId(MuleEvent muleEvent) { // Return serverId if already set if (serverId != null) return serverId; // Try to get the serverId; if (muleEvent == null) return "UNKNOWN.NULL_MULE_EVENT"; MuleContext muleContext = muleEvent.getMuleContext(); if (muleContext == null) return "UNKNOWN.NULL_MULE_CONTEXT"; MuleConfiguration mConf = muleContext.getConfiguration(); if (mConf == null) return "UNKNOWN.NULL_MULE_CONFIGURATION"; // Ok, we got! Save and return it. return serverId = mConf.getId(); } private String getPayloadAsString(Object payload) { String content = null; if (payload instanceof Object[]) { Object[] arr = (Object[]) payload; int i = 0; for (Object object : arr) { String arrContent = "[" + i++ + "]: " + getContentAsString(object); if (i == 1) { content = arrContent; } else { content += "\n" + arrContent; } } } else { content = getContentAsString(payload); } return content; } private String getContentAsString(Object payload) { String content = null; if (payload == null) { return null; } else if (payload instanceof byte[]) { content = getByteArrayContentAsString(payload); } else if (payload instanceof ReversibleXMLStreamReader) { content = XmlUtil.convertReversibleXMLStreamReaderToString( (ReversibleXMLStreamReader) payload, "UTF-8"); } else if (payload instanceof Message) { content = convertJmsMessageToString(payload, "UTF-8"); } else if (isJabxObject(payload)) { content = getJaxbContentAsString(payload, "UTF-8"); // } else if (payload instanceof ChunkedInputStream) { // contents = message.getPayloadAsString(); // message.setPayload(contents); } else { // Using message.getPayloadAsString() consumes InputStreams causing // exceptions after the logging... // contents = message.getPayloadAsString(); content = payload.toString(); } return content; } private String convertJmsMessageToString(Object payload, String outputEncoding) { try { return JmsMessageUtils.toObject((Message) payload, null, outputEncoding).toString(); } catch (JMSException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } } private String getByteArrayContentAsString(Object payload) { String content; StringBuffer byteArray = new StringBuffer(); byte[] bytes = (byte[]) payload; for (int i = 0; i < bytes.length; i++) { byteArray.append((char) bytes[i]); } content = byteArray.toString(); return content; } private boolean isJabxObject(Object payload) { return payload.getClass().isAnnotationPresent(XmlType.class); } @SuppressWarnings({ "unchecked", "rawtypes" }) private String getJaxbContentAsString(Object jaxbObject, String outputEncoding) { String content; if (jaxbContext == null) { content = "Missing jaxbContext injection, can't marshal JAXB object of type: " + jaxbObject.getClass().getName(); } else { if (!jaxbObject.getClass() .isAnnotationPresent(XmlRootElement.class)) { // We are missing element end namespace info, let's create a // wrapper xml-root-element QName wrapperQName = new QName("class:" + jaxbObject.getClass().getName(), getJaxbWrapperElementName(jaxbObject)); jaxbObject = new JAXBElement(wrapperQName, jaxbObject .getClass(), null, jaxbObject); } try { content = marshalJaxbObject(jaxbObject); } catch (Throwable e) { e.printStackTrace(); content = "JAXB object marshalling failed: " + e.getMessage(); } } return content; } private String marshalJaxbObject(Object jaxbObject) { try { StringWriter writer = new StringWriter(); Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.marshal(jaxbObject, writer); return writer.toString(); } catch (JAXBException e) { throw new RuntimeException(e); } } private String getJaxbWrapperElementName(Object payload) { String name = payload.getClass().getSimpleName(); String elementName = name.substring(0, 1).toLowerCase() + name.substring(1); return elementName; } }
package Utilities; import Application.Node; import DHash.BigInt; import DHash.IdKey; import edu.uci.ics.jung.algorithms.layout.CircleLayout; import edu.uci.ics.jung.graph.DirectedSparseGraph; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.EdgeType; import edu.uci.ics.jung.visualization.VisualizationViewer; import edu.uci.ics.jung.visualization.control.ModalGraphMouse; import edu.uci.ics.jung.visualization.decorators.ToStringLabeller; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics2D; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.io.File; import java.net.MalformedURLException; import java.rmi.Naming; import java.rmi.NotBoundException; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javax.imageio.ImageIO; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import org.apache.commons.collections15.Transformer; import org.apache.commons.collections15.TransformerUtils; import org.apache.commons.collections15.functors.MapTransformer; import org.apache.commons.collections15.map.LazyMap; /** * Implemetation of ChordViewer - Only for educational purposes * @author ChordFPG team * @version 1.0 */ public class ChordViewer extends JFrame { private Node node; private VisualizationViewer<String, String> visual; private static List<IdKey> nodesKey = new ArrayList<IdKey>(); public ChordViewer(Node node) throws RemoteException { super("Chord Ring Viewer 1.0 - " + node.getLocalID().toString()); setIconImage(Toolkit.getDefaultToolkit().getImage(this.getClass().getResource("/Utilities/resources/logo2.gif"))); setSize(new Dimension(600, 600)); setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); setResizable(false); JMenuBar menuBar = new JMenuBar(); setJMenuBar(menuBar); JMenu fileMenu = new JMenu("File"); JMenuItem saveAsItem = new JMenuItem("Save As..."); saveAsItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JFileChooser fileChooser = new JFileChooser(); fileChooser.setAcceptAllFileFilterUsed(false); int option = fileChooser.showSaveDialog(ChordViewer.this.rootPane); if (option == JFileChooser.APPROVE_OPTION) { File file = fileChooser.getSelectedFile(); ChordViewer.this.writeImage(new File(file.getAbsolutePath() + ".jpg")); } } }); fileMenu.add(saveAsItem); fileMenu.addSeparator(); JMenuItem closeItem = new JMenuItem("Close"); closeItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { ChordViewer.this.dispose(); } }); fileMenu.add(closeItem); menuBar.add(fileMenu); this.node = node; Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); int x = (screen.width / 2) - (this.getWidth() / 2); int y = (screen.height / 2) - (this.getHeight() / 2); this.setLocation(x, y); this.setVisible(true); } /** * Writes graph into an image file * @param file - File to be written */ private void writeImage(File file) { int width = visual.getWidth(); int height = visual.getHeight(); BufferedImage bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); Graphics2D graphics2D = bufferedImage.createGraphics(); visual.paint(graphics2D); graphics2D.dispose(); String s = file.getName(); try { ImageIO.write(bufferedImage, "jpeg", file); } catch (Exception e) { } } /** * Creates the circular graph using JUNG2 API */ public void create() { Graph<String, String> graph = new DirectedSparseGraph<String, String>(); List<String> list; Map<String, Integer> map = new TreeMap<String, Integer>(); IdKey key; Node nd; nd = node; CircleLayout layout; Transformer<String, String> tran; int i = 0; try { BigInt bg = nd.getLocalID().getHashKey().powerOfTwo(0); IdKey id = new IdKey(bg, 0, "low"); id = nd.find_successor_ID(id); try { node = (Node) Naming.lookup("/" + id.getIP() + ":1099/" + String.valueOf(id.getPID())); } catch (MalformedURLException ex) { JOptionPane.showMessageDialog(this, "A problem occurred while creating the graph. Please try again later.", "ChordFPG said:", JOptionPane.ERROR_MESSAGE); this.dispose(); } key = id; /* finds all nodes of the Chord p2p system , starting from the first node*/ /* add them as vertices to the circular graph */ while (true) { nodesKey.add(key); map.put(key.hashKeytoHexString(), i); graph.addVertex(Integer.toString(i)); i++; key = node.getImmediateSuccessor(); try { node = (Node) Naming.lookup("/" + key.getIP() + ":1099/" + String.valueOf(key.getPID())); } catch (MalformedURLException ex) { JOptionPane.showMessageDialog(this, "A problem occurred while creating the graph. Please try again later.", "ChordFPG said:", JOptionPane.ERROR_MESSAGE); this.dispose(); } if (key.equals(id)) { break; } } } catch (RemoteException re) { JOptionPane.showMessageDialog(this, "A problem occurred while creating the graph. Please try again later.", "ChordFPG said:", JOptionPane.ERROR_MESSAGE); this.dispose(); } catch (NotBoundException nb) { JOptionPane.showMessageDialog(this, "A problem occurred while creating the graph. Please try again later.", "ChordFPG said:", JOptionPane.ERROR_MESSAGE); this.dispose(); } list = new ArrayList<String>(); for (int j = 0; j < i; j++) { list.add(Integer.toString(j)); } /*add edge between two successive vertices*/ for (int j = 0; j < list.size() - 1; j++) { graph.addEdge("Successor[" + j + "]", Integer.toString(j), Integer.toString(j + 1), EdgeType.DIRECTED); } graph.addEdge("Successor[" + (list.size() - 1) + "]", Integer.toString(list.size() - 1), Integer.toString(0), EdgeType.DIRECTED); /*creates the layout and the visualization viewer*/ layout = new CircleLayout(graph); layout.setVertexOrder(list); layout.setSize(new Dimension(550, 550)); Map<String, String> nMap = new HashMap<String, String>(); Set<String> set = map.keySet(); Object[] array = set.toArray(); for (i = 0; i < map.size(); i++) { nMap.put(Integer.toString(i), (String) array[i]); } tran = TransformerUtils.mapTransformer(nMap); visual = new VisualizationViewer<String, String>(layout); visual.setBackground(Color.white); layout.setVertexOrder(list); visual.setSize(new Dimension(400, 400)); visual.getRenderContext().setVertexLabelTransformer(MapTransformer.<String, String>getInstance( LazyMap.<String, String>decorate(new HashMap<String, String>(), new ToStringLabeller<String>()))); visual.getRenderContext().setEdgeLabelTransformer(MapTransformer.<String, String>getInstance( LazyMap.<String, String>decorate(new HashMap<String, String>(), new ToStringLabeller<String>()))); visual.setVertexToolTipTransformer(tran); /*creates our custom mouse plugin*/ GraphMouse<String, String> gm = new GraphMouse<String, String>(); gm.setMode(ModalGraphMouse.Mode.PICKING); visual.setGraphMouse(gm); this.getContentPane().add(visual); visual.repaint(); } public static IdKey getNodeKey(int index) { return nodesKey.get(index); } }
/* * (c) Copyright 2022 Micro Focus * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudslang.content.httpclient.build.auth; import io.cloudslang.content.httpclient.entities.HttpClientInputs; import io.cloudslang.content.httpclient.build.Utils; import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.Header; import org.apache.http.auth.AuthScheme; import org.apache.http.auth.AuthSchemeProvider; import org.apache.http.client.config.AuthSchemes; import org.apache.http.config.Lookup; import org.apache.http.config.RegistryBuilder; import org.apache.http.impl.auth.*; import org.apache.http.message.BasicHeader; import org.apache.http.protocol.HttpContext; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import static org.apache.commons.lang3.StringUtils.isEmpty; public class AuthSchemeProviderLookupBuilder { private AuthTypes authTypes; private String skipPortAtKerberosDatabaseLookup = "true"; private String kerberosConfigFile; private String kerberosLoginConfigFile; private String host; private String username; private String password; private String proxyUsername; private String proxyPassword; private List<Header> headers; public AuthSchemeProviderLookupBuilder setAuthTypes(AuthTypes authTypes) { this.authTypes = authTypes; return this; } public AuthSchemeProviderLookupBuilder setSkipPortAtKerberosDatabaseLookup(String skipPortAtKerberosDatabaseLookup) { if (!StringUtils.isEmpty(skipPortAtKerberosDatabaseLookup)) { this.skipPortAtKerberosDatabaseLookup = skipPortAtKerberosDatabaseLookup; } return this; } public AuthSchemeProviderLookupBuilder setKerberosConfigFile(String kerberosConfigFile) { this.kerberosConfigFile = kerberosConfigFile; return this; } public AuthSchemeProviderLookupBuilder setHost(String host) { this.host = host; return this; } public AuthSchemeProviderLookupBuilder setKerberosLoginConfigFile(String kerberosLoginConfigFile) { this.kerberosLoginConfigFile = kerberosLoginConfigFile; return this; } public AuthSchemeProviderLookupBuilder setUsername(String username) { this.username = username; return this; } public AuthSchemeProviderLookupBuilder setPassword(String password) { this.password = password; return this; } public AuthSchemeProviderLookupBuilder setProxyUsername(String proxyUsername) { this.proxyUsername = proxyUsername; return this; } public AuthSchemeProviderLookupBuilder setProxyPassword(String proxyPassword) { this.proxyPassword = proxyPassword; return this; } public AuthSchemeProviderLookupBuilder setHeaders(List<Header> headers) { this.headers = headers; return this; } public Lookup<AuthSchemeProvider> buildAuthSchemeProviderLookup() { RegistryBuilder<AuthSchemeProvider> registryBuilder = RegistryBuilder.create(); for (String type : authTypes) { switch (type.trim()) { case "NTLM": registryBuilder.register(AuthSchemes.NTLM, new AuthSchemeProvider() { @Override public AuthScheme create(HttpContext httpContext) { return new NTLMScheme(new JCIFSEngine()); } }); break; case "BASIC": registryBuilder.register(AuthSchemes.BASIC, new BasicSchemeFactory(Charset.forName(Utils.DEFAULT_CHARACTER_SET))); if(!isEmpty(proxyUsername) && !isEmpty(proxyPassword)){ String value = proxyUsername + ":" + proxyPassword; byte[] encodedValue = Base64.encodeBase64(value.getBytes(StandardCharsets.UTF_8)); headers.add(new BasicHeader("Proxy-Authorization", "Basic " + new String(encodedValue))); } if(!isEmpty(username) && !isEmpty(password)) { String value = username + ":" + password; byte[] encodedValue = Base64.encodeBase64(value.getBytes(StandardCharsets.UTF_8)); headers.add(new BasicHeader("Authorization", "Basic " + new String(encodedValue))); } break; case "DIGEST": registryBuilder.register(AuthSchemes.DIGEST, new DigestSchemeFactory()); break; case "KERBEROS": if (kerberosConfigFile != null) { System.setProperty("java.security.krb5.conf", kerberosConfigFile); } else { File krb5Config; String domain = host.replaceAll(".*\\.(?=.*\\.)", ""); try { krb5Config = createKrb5Configuration(domain); } catch (IOException e) { throw new RuntimeException("could not create the krb5 config file" + e.getMessage(), e); } System.setProperty("java.security.krb5.conf", krb5Config.toURI().toString()); } if (kerberosLoginConfigFile != null) { System.setProperty("java.security.auth.login.config", kerberosLoginConfigFile); } else { File loginConfig; try { loginConfig = createLoginConfig(); } catch (IOException e) { throw new RuntimeException("could not create the kerberos login config file" + e.getMessage(), e); } System.setProperty("java.security.auth.login.config", loginConfig.toURI().toString()); } if (password != null) { System.setProperty(KrbHttpLoginModule.PAS, password); } if (username != null) { System.setProperty(KrbHttpLoginModule.USR, username); } System.setProperty("javax.security.auth.useSubjectCredsOnly", "false"); boolean skipPort = Boolean.parseBoolean(skipPortAtKerberosDatabaseLookup); registryBuilder.register(AuthSchemes.KERBEROS, new KerberosSchemeFactory(skipPort)); registryBuilder.register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(skipPort)); break; case AuthTypes.ANONYMOUS: break; default: throw new IllegalStateException("Unsupported '" + HttpClientInputs.AUTH_TYPE + "'authentication scheme: " + type); } } return registryBuilder.build(); } private static File createKrb5Configuration(String domain) throws IOException { File tempFile = File.createTempFile("krb", "kdc"); tempFile.deleteOnExit(); ArrayList<String> lines = new ArrayList<>(); lines.add("[libdefaults]"); lines.add("\tdefault_realm = " + domain.toUpperCase()); lines.add("[realms]"); lines.add("\t" + domain.toUpperCase() + " = {"); lines.add("\t\tkdc = " + domain); lines.add("\t\tadmin_server = " + domain); lines.add("\t}"); FileWriter writer = null; try { writer = new FileWriter(tempFile); IOUtils.writeLines(lines, System.lineSeparator(), writer); } finally { if (writer != null) { // IOUtils.closeQuietly(writer); safeClose(writer); } } return tempFile; } private static File createLoginConfig() throws IOException { File tempFile = File.createTempFile("krb", "loginConf"); tempFile.deleteOnExit(); ArrayList<String> lines = new ArrayList<>(); lines.add("com.sun.security.jgss.initiate {\n" + " " + KrbHttpLoginModule.class.getCanonicalName() + " required\n" + " doNotPrompt=true\n" + " useFirstPass=true\n" + " debug=true ;\n" + "};"); FileWriter writer = null; try { writer = new FileWriter(tempFile); IOUtils.writeLines(lines, System.lineSeparator(), writer); } finally { if (writer != null) { // IOUtils.closeQuietly(writer); safeClose(writer); } } return tempFile; } public static void safeClose(FileWriter fis) { if (fis != null) { try { fis.close(); } catch (IOException e) { System.out.println(e); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.run.v1alpha1.model; /** * k8s.io.apimachinery.pkg.apis.meta.v1.ObjectMeta is metadata that all persisted resources must * have, which includes all objects users must create. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Run Admin API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ObjectMeta extends com.google.api.client.json.GenericJson { /** * (Optional) Annotations is an unstructured key value map stored with a resource that may be set * by external tools to store and retrieve arbitrary metadata. They are not queryable and should * be preserved when modifying objects. More info: https://kubernetes.io/docs/user- * guide/annotations * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> annotations; /** * (Optional) Not supported by Cloud Run The name of the cluster which the object belongs to. This * is used to distinguish resources with same name and namespace in different clusters. This field * is not set anywhere right now and apiserver is going to ignore it if set in create or update * request. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String clusterName; /** * (Optional) CreationTimestamp is a timestamp representing the server time when this object was * created. It is not guaranteed to be set in happens-before order across separate operations. * Clients may not set this value. It is represented in RFC3339 form and is in UTC. Populated by * the system. Read-only. Null for lists. More info: * https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata * The value may be {@code null}. */ @com.google.api.client.util.Key private String creationTimestamp; /** * (Optional) Not supported by Cloud Run Number of seconds allowed for this object to gracefully * terminate before it will be removed from the system. Only set when deletionTimestamp is also * set. May only be shortened. Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer deletionGracePeriodSeconds; /** * (Optional) Not supported by Cloud Run DeletionTimestamp is RFC 3339 date and time at which this * resource will be deleted. This field is set by the server when a graceful deletion is requested * by the user, and is not directly settable by a client. The resource is expected to be deleted * (no longer visible from resource lists, and not reachable by name) after the time in this * field, once the finalizers list is empty. As long as the finalizers list contains items, * deletion is blocked. Once the deletionTimestamp is set, this value may not be unset or be set * further into the future, although it may be shortened or the resource may be deleted prior to * this time. For example, a user may request that a pod is deleted in 30 seconds. The Kubelet * will react by sending a graceful termination signal to the containers in the pod. After that 30 * seconds, the Kubelet will send a hard termination signal (SIGKILL) to the container and after * cleanup, remove the pod from the API. In the presence of network partitions, this object may * still exist after this timestamp, until an administrator or automated process can determine the * resource is fully terminated. If not set, graceful deletion of the object has not been * requested. Populated by the system when a graceful deletion is requested. Read-only. More info: * https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata * The value may be {@code null}. */ @com.google.api.client.util.Key private String deletionTimestamp; /** * (Optional) Not supported by Cloud Run Must be empty before the object is deleted from the * registry. Each entry is an identifier for the responsible component that will remove the entry * from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only * be removed. +patchStrategy=merge * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> finalizers; /** * (Optional) Not supported by Cloud Run GenerateName is an optional prefix, used by the server, * to generate a unique name ONLY IF the Name field has not been provided. If this field is used, * the name returned to the client will be different than the name passed. This value will also be * combined with a unique suffix. The provided value has the same validation rules as the Name * field, and may be truncated by the length of the suffix required to make the value unique on * the server. If this field is specified and the generated name exists, the server will NOT * return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout * indicating a unique name could not be found in the time allotted, and the client should retry * (optionally after the time indicated in the Retry-After header). Applied only if Name is not * specified. More info: https://git.k8s.io/community/contributors/devel/api- * conventions.md#idempotency string generateName = 2; * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String generateName; /** * (Optional) A sequence number representing a specific generation of the desired state. Populated * by the system. Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer generation; /** * (Optional) Map of string keys and values that can be used to organize and categorize (scope and * select) objects. May match selectors of replication controllers and routes. More info: * https://kubernetes.io/docs/user-guide/labels * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * Name must be unique within a namespace, within a Cloud Run region. Is required when creating * resources, although some resources may allow a client to request the generation of an * appropriate name automatically. Name is primarily intended for creation idempotence and * configuration definition. Cannot be updated. More info: https://kubernetes.io/docs/user- * guide/identifiers#names +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Namespace defines the space within each name must be unique, within a Cloud Run region. In * Cloud Run the namespace must be equal to either the project ID or project number. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String namespace; /** * (Optional) Not supported by Cloud Run List of objects that own this object. If ALL objects in * the list have been deleted, this object will be garbage collected. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<OwnerReference> ownerReferences; /** * Optional. An opaque value that represents the internal version of this object that can be used * by clients to determine when objects have changed. May be used for optimistic concurrency, * change detection, and the watch operation on a resource or set of resources. Clients must treat * these values as opaque and passed unmodified back to the server or omit the value to disable * conflict-detection. They may only be valid for a particular resource or set of resources. * Populated by the system. Read-only. Value must be treated as opaque by clients or omitted. More * info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md * #concurrency-control-and-consistency * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String resourceVersion; /** * (Optional) SelfLink is a URL representing this object. Populated by the system. Read-only. * string selfLink = 4; * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * (Optional) UID is the unique in time and space value for this object. It is typically generated * by the server on successful creation of a resource and is not allowed to change on PUT * operations. Populated by the system. Read-only. More info: https://kubernetes.io/docs/user- * guide/identifiers#uids * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String uid; /** * (Optional) Annotations is an unstructured key value map stored with a resource that may be set * by external tools to store and retrieve arbitrary metadata. They are not queryable and should * be preserved when modifying objects. More info: https://kubernetes.io/docs/user- * guide/annotations * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getAnnotations() { return annotations; } /** * (Optional) Annotations is an unstructured key value map stored with a resource that may be set * by external tools to store and retrieve arbitrary metadata. They are not queryable and should * be preserved when modifying objects. More info: https://kubernetes.io/docs/user- * guide/annotations * @param annotations annotations or {@code null} for none */ public ObjectMeta setAnnotations(java.util.Map<String, java.lang.String> annotations) { this.annotations = annotations; return this; } /** * (Optional) Not supported by Cloud Run The name of the cluster which the object belongs to. This * is used to distinguish resources with same name and namespace in different clusters. This field * is not set anywhere right now and apiserver is going to ignore it if set in create or update * request. * @return value or {@code null} for none */ public java.lang.String getClusterName() { return clusterName; } /** * (Optional) Not supported by Cloud Run The name of the cluster which the object belongs to. This * is used to distinguish resources with same name and namespace in different clusters. This field * is not set anywhere right now and apiserver is going to ignore it if set in create or update * request. * @param clusterName clusterName or {@code null} for none */ public ObjectMeta setClusterName(java.lang.String clusterName) { this.clusterName = clusterName; return this; } /** * (Optional) CreationTimestamp is a timestamp representing the server time when this object was * created. It is not guaranteed to be set in happens-before order across separate operations. * Clients may not set this value. It is represented in RFC3339 form and is in UTC. Populated by * the system. Read-only. Null for lists. More info: * https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata * @return value or {@code null} for none */ public String getCreationTimestamp() { return creationTimestamp; } /** * (Optional) CreationTimestamp is a timestamp representing the server time when this object was * created. It is not guaranteed to be set in happens-before order across separate operations. * Clients may not set this value. It is represented in RFC3339 form and is in UTC. Populated by * the system. Read-only. Null for lists. More info: * https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata * @param creationTimestamp creationTimestamp or {@code null} for none */ public ObjectMeta setCreationTimestamp(String creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; } /** * (Optional) Not supported by Cloud Run Number of seconds allowed for this object to gracefully * terminate before it will be removed from the system. Only set when deletionTimestamp is also * set. May only be shortened. Read-only. * @return value or {@code null} for none */ public java.lang.Integer getDeletionGracePeriodSeconds() { return deletionGracePeriodSeconds; } /** * (Optional) Not supported by Cloud Run Number of seconds allowed for this object to gracefully * terminate before it will be removed from the system. Only set when deletionTimestamp is also * set. May only be shortened. Read-only. * @param deletionGracePeriodSeconds deletionGracePeriodSeconds or {@code null} for none */ public ObjectMeta setDeletionGracePeriodSeconds(java.lang.Integer deletionGracePeriodSeconds) { this.deletionGracePeriodSeconds = deletionGracePeriodSeconds; return this; } /** * (Optional) Not supported by Cloud Run DeletionTimestamp is RFC 3339 date and time at which this * resource will be deleted. This field is set by the server when a graceful deletion is requested * by the user, and is not directly settable by a client. The resource is expected to be deleted * (no longer visible from resource lists, and not reachable by name) after the time in this * field, once the finalizers list is empty. As long as the finalizers list contains items, * deletion is blocked. Once the deletionTimestamp is set, this value may not be unset or be set * further into the future, although it may be shortened or the resource may be deleted prior to * this time. For example, a user may request that a pod is deleted in 30 seconds. The Kubelet * will react by sending a graceful termination signal to the containers in the pod. After that 30 * seconds, the Kubelet will send a hard termination signal (SIGKILL) to the container and after * cleanup, remove the pod from the API. In the presence of network partitions, this object may * still exist after this timestamp, until an administrator or automated process can determine the * resource is fully terminated. If not set, graceful deletion of the object has not been * requested. Populated by the system when a graceful deletion is requested. Read-only. More info: * https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata * @return value or {@code null} for none */ public String getDeletionTimestamp() { return deletionTimestamp; } /** * (Optional) Not supported by Cloud Run DeletionTimestamp is RFC 3339 date and time at which this * resource will be deleted. This field is set by the server when a graceful deletion is requested * by the user, and is not directly settable by a client. The resource is expected to be deleted * (no longer visible from resource lists, and not reachable by name) after the time in this * field, once the finalizers list is empty. As long as the finalizers list contains items, * deletion is blocked. Once the deletionTimestamp is set, this value may not be unset or be set * further into the future, although it may be shortened or the resource may be deleted prior to * this time. For example, a user may request that a pod is deleted in 30 seconds. The Kubelet * will react by sending a graceful termination signal to the containers in the pod. After that 30 * seconds, the Kubelet will send a hard termination signal (SIGKILL) to the container and after * cleanup, remove the pod from the API. In the presence of network partitions, this object may * still exist after this timestamp, until an administrator or automated process can determine the * resource is fully terminated. If not set, graceful deletion of the object has not been * requested. Populated by the system when a graceful deletion is requested. Read-only. More info: * https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata * @param deletionTimestamp deletionTimestamp or {@code null} for none */ public ObjectMeta setDeletionTimestamp(String deletionTimestamp) { this.deletionTimestamp = deletionTimestamp; return this; } /** * (Optional) Not supported by Cloud Run Must be empty before the object is deleted from the * registry. Each entry is an identifier for the responsible component that will remove the entry * from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only * be removed. +patchStrategy=merge * @return value or {@code null} for none */ public java.util.List<java.lang.String> getFinalizers() { return finalizers; } /** * (Optional) Not supported by Cloud Run Must be empty before the object is deleted from the * registry. Each entry is an identifier for the responsible component that will remove the entry * from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only * be removed. +patchStrategy=merge * @param finalizers finalizers or {@code null} for none */ public ObjectMeta setFinalizers(java.util.List<java.lang.String> finalizers) { this.finalizers = finalizers; return this; } /** * (Optional) Not supported by Cloud Run GenerateName is an optional prefix, used by the server, * to generate a unique name ONLY IF the Name field has not been provided. If this field is used, * the name returned to the client will be different than the name passed. This value will also be * combined with a unique suffix. The provided value has the same validation rules as the Name * field, and may be truncated by the length of the suffix required to make the value unique on * the server. If this field is specified and the generated name exists, the server will NOT * return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout * indicating a unique name could not be found in the time allotted, and the client should retry * (optionally after the time indicated in the Retry-After header). Applied only if Name is not * specified. More info: https://git.k8s.io/community/contributors/devel/api- * conventions.md#idempotency string generateName = 2; * @return value or {@code null} for none */ public java.lang.String getGenerateName() { return generateName; } /** * (Optional) Not supported by Cloud Run GenerateName is an optional prefix, used by the server, * to generate a unique name ONLY IF the Name field has not been provided. If this field is used, * the name returned to the client will be different than the name passed. This value will also be * combined with a unique suffix. The provided value has the same validation rules as the Name * field, and may be truncated by the length of the suffix required to make the value unique on * the server. If this field is specified and the generated name exists, the server will NOT * return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout * indicating a unique name could not be found in the time allotted, and the client should retry * (optionally after the time indicated in the Retry-After header). Applied only if Name is not * specified. More info: https://git.k8s.io/community/contributors/devel/api- * conventions.md#idempotency string generateName = 2; * @param generateName generateName or {@code null} for none */ public ObjectMeta setGenerateName(java.lang.String generateName) { this.generateName = generateName; return this; } /** * (Optional) A sequence number representing a specific generation of the desired state. Populated * by the system. Read-only. * @return value or {@code null} for none */ public java.lang.Integer getGeneration() { return generation; } /** * (Optional) A sequence number representing a specific generation of the desired state. Populated * by the system. Read-only. * @param generation generation or {@code null} for none */ public ObjectMeta setGeneration(java.lang.Integer generation) { this.generation = generation; return this; } /** * (Optional) Map of string keys and values that can be used to organize and categorize (scope and * select) objects. May match selectors of replication controllers and routes. More info: * https://kubernetes.io/docs/user-guide/labels * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * (Optional) Map of string keys and values that can be used to organize and categorize (scope and * select) objects. May match selectors of replication controllers and routes. More info: * https://kubernetes.io/docs/user-guide/labels * @param labels labels or {@code null} for none */ public ObjectMeta setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * Name must be unique within a namespace, within a Cloud Run region. Is required when creating * resources, although some resources may allow a client to request the generation of an * appropriate name automatically. Name is primarily intended for creation idempotence and * configuration definition. Cannot be updated. More info: https://kubernetes.io/docs/user- * guide/identifiers#names +optional * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Name must be unique within a namespace, within a Cloud Run region. Is required when creating * resources, although some resources may allow a client to request the generation of an * appropriate name automatically. Name is primarily intended for creation idempotence and * configuration definition. Cannot be updated. More info: https://kubernetes.io/docs/user- * guide/identifiers#names +optional * @param name name or {@code null} for none */ public ObjectMeta setName(java.lang.String name) { this.name = name; return this; } /** * Namespace defines the space within each name must be unique, within a Cloud Run region. In * Cloud Run the namespace must be equal to either the project ID or project number. * @return value or {@code null} for none */ public java.lang.String getNamespace() { return namespace; } /** * Namespace defines the space within each name must be unique, within a Cloud Run region. In * Cloud Run the namespace must be equal to either the project ID or project number. * @param namespace namespace or {@code null} for none */ public ObjectMeta setNamespace(java.lang.String namespace) { this.namespace = namespace; return this; } /** * (Optional) Not supported by Cloud Run List of objects that own this object. If ALL objects in * the list have been deleted, this object will be garbage collected. * @return value or {@code null} for none */ public java.util.List<OwnerReference> getOwnerReferences() { return ownerReferences; } /** * (Optional) Not supported by Cloud Run List of objects that own this object. If ALL objects in * the list have been deleted, this object will be garbage collected. * @param ownerReferences ownerReferences or {@code null} for none */ public ObjectMeta setOwnerReferences(java.util.List<OwnerReference> ownerReferences) { this.ownerReferences = ownerReferences; return this; } /** * Optional. An opaque value that represents the internal version of this object that can be used * by clients to determine when objects have changed. May be used for optimistic concurrency, * change detection, and the watch operation on a resource or set of resources. Clients must treat * these values as opaque and passed unmodified back to the server or omit the value to disable * conflict-detection. They may only be valid for a particular resource or set of resources. * Populated by the system. Read-only. Value must be treated as opaque by clients or omitted. More * info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md * #concurrency-control-and-consistency * @return value or {@code null} for none */ public java.lang.String getResourceVersion() { return resourceVersion; } /** * Optional. An opaque value that represents the internal version of this object that can be used * by clients to determine when objects have changed. May be used for optimistic concurrency, * change detection, and the watch operation on a resource or set of resources. Clients must treat * these values as opaque and passed unmodified back to the server or omit the value to disable * conflict-detection. They may only be valid for a particular resource or set of resources. * Populated by the system. Read-only. Value must be treated as opaque by clients or omitted. More * info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md * #concurrency-control-and-consistency * @param resourceVersion resourceVersion or {@code null} for none */ public ObjectMeta setResourceVersion(java.lang.String resourceVersion) { this.resourceVersion = resourceVersion; return this; } /** * (Optional) SelfLink is a URL representing this object. Populated by the system. Read-only. * string selfLink = 4; * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * (Optional) SelfLink is a URL representing this object. Populated by the system. Read-only. * string selfLink = 4; * @param selfLink selfLink or {@code null} for none */ public ObjectMeta setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * (Optional) UID is the unique in time and space value for this object. It is typically generated * by the server on successful creation of a resource and is not allowed to change on PUT * operations. Populated by the system. Read-only. More info: https://kubernetes.io/docs/user- * guide/identifiers#uids * @return value or {@code null} for none */ public java.lang.String getUid() { return uid; } /** * (Optional) UID is the unique in time and space value for this object. It is typically generated * by the server on successful creation of a resource and is not allowed to change on PUT * operations. Populated by the system. Read-only. More info: https://kubernetes.io/docs/user- * guide/identifiers#uids * @param uid uid or {@code null} for none */ public ObjectMeta setUid(java.lang.String uid) { this.uid = uid; return this; } @Override public ObjectMeta set(String fieldName, Object value) { return (ObjectMeta) super.set(fieldName, value); } @Override public ObjectMeta clone() { return (ObjectMeta) super.clone(); } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.test.json; import java.io.BufferedReader; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; import java.lang.reflect.Field; import org.assertj.core.api.Assertions; import org.springframework.beans.factory.ObjectFactory; import org.springframework.core.ResolvableType; import org.springframework.core.io.ByteArrayResource; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.InputStreamResource; import org.springframework.core.io.Resource; import org.springframework.util.Assert; import org.springframework.util.ReflectionUtils; /** * Base class for AssertJ based JSON marshal testers. Exposes specific Asserts following a * {@code read}, {@code write} or {@code parse} of JSON content. Typically used in * combination with an AssertJ {@link Assertions#assertThat(Object) assertThat} call. For * example: <pre class="code"> * public class ExampleObjectJsonTests { * * private AbstractJsonTester&lt;ExampleObject&gt; json = //... * * &#064;Test * public void testWriteJson() { * ExampleObject object = //... * assertThat(json.write(object)).isEqualToJson("expected.json"); * assertThat(json.read("expected.json")).isEqualTo(object); * } * * } * </pre> For a complete list of supported assertions see {@link JsonContentAssert} and * {@link ObjectContentAssert}. * <p> * To use this library JSONAssert must be on the test classpath. * * @param <T> the type under test * @author Phillip Webb * @since 1.4.0 * @see JsonContentAssert * @see ObjectContentAssert */ public abstract class AbstractJsonMarshalTester<T> { private Class<?> resourceLoadClass; private ResolvableType type; /** * Create a new uninitialized {@link AbstractJsonMarshalTester} instance. */ protected AbstractJsonMarshalTester() { } /** * Create a new {@link AbstractJsonMarshalTester} instance. * @param resourceLoadClass the source class used when loading relative classpath * resources * @param type the type under test */ public AbstractJsonMarshalTester(Class<?> resourceLoadClass, ResolvableType type) { Assert.notNull(resourceLoadClass, "ResourceLoadClass must not be null"); Assert.notNull(type, "Type must not be null"); initialize(resourceLoadClass, type); } /** * Initialize the marshal tester for use. * @param resourceLoadClass the source class used when loading relative classpath * resources * @param type the type under test */ protected final void initialize(Class<?> resourceLoadClass, ResolvableType type) { if (this.resourceLoadClass == null && this.type == null) { this.resourceLoadClass = resourceLoadClass; this.type = type; } } /** * Return the type under test. * @return the type under test */ protected final ResolvableType getType() { return this.type; } /** * Return class used to load relative resources. * @return the resource load class */ protected final Class<?> getResourceLoadClass() { return this.resourceLoadClass; } /** * Return {@link JsonContent} from writing the specific value. * @param value the value to write * @return the {@link JsonContent} * @throws IOException on write error */ public JsonContent<T> write(T value) throws IOException { verify(); Assert.notNull(value, "Value must not be null"); String json = writeObject(value, this.type); return new JsonContent<>(this.resourceLoadClass, this.type, json); } /** * Return the object created from parsing the specific JSON bytes. * @param jsonBytes the source JSON bytes * @return the resulting object * @throws IOException on parse error */ public T parseObject(byte[] jsonBytes) throws IOException { verify(); return parse(jsonBytes).getObject(); } /** * Return {@link ObjectContent} from parsing the specific JSON bytes. * @param jsonBytes the source JSON bytes * @return the {@link ObjectContent} * @throws IOException on parse error */ public ObjectContent<T> parse(byte[] jsonBytes) throws IOException { verify(); Assert.notNull(jsonBytes, "JsonBytes must not be null"); return read(new ByteArrayResource(jsonBytes)); } /** * Return the object created from parsing the specific JSON String. * @param jsonString the source JSON string * @return the resulting object * @throws IOException on parse error */ public T parseObject(String jsonString) throws IOException { verify(); return parse(jsonString).getObject(); } /** * Return {@link ObjectContent} from parsing the specific JSON String. * @param jsonString the source JSON string * @return the {@link ObjectContent} * @throws IOException on parse error */ public ObjectContent<T> parse(String jsonString) throws IOException { verify(); Assert.notNull(jsonString, "JsonString must not be null"); return read(new StringReader(jsonString)); } /** * Return the object created from reading from the specified classpath resource. * @param resourcePath the source resource path. May be a full path or a path relative * to the {@code resourceLoadClass} passed to the constructor * @return the resulting object * @throws IOException on read error */ public T readObject(String resourcePath) throws IOException { verify(); return read(resourcePath).getObject(); } /** * Return {@link ObjectContent} from reading from the specified classpath resource. * @param resourcePath the source resource path. May be a full path or a path relative * to the {@code resourceLoadClass} passed to the constructor * @return the {@link ObjectContent} * @throws IOException on read error */ public ObjectContent<T> read(String resourcePath) throws IOException { verify(); Assert.notNull(resourcePath, "ResourcePath must not be null"); return read(new ClassPathResource(resourcePath, this.resourceLoadClass)); } /** * Return the object created from reading from the specified file. * @param file the source file * @return the resulting object * @throws IOException on read error */ public T readObject(File file) throws IOException { verify(); return read(file).getObject(); } /** * Return {@link ObjectContent} from reading from the specified file. * @param file the source file * @return the {@link ObjectContent} * @throws IOException on read error */ public ObjectContent<T> read(File file) throws IOException { verify(); Assert.notNull(file, "File must not be null"); return read(new FileSystemResource(file)); } /** * Return the object created from reading from the specified input stream. * @param inputStream the source input stream * @return the resulting object * @throws IOException on read error */ public T readObject(InputStream inputStream) throws IOException { verify(); return read(inputStream).getObject(); } /** * Return {@link ObjectContent} from reading from the specified input stream. * @param inputStream the source input stream * @return the {@link ObjectContent} * @throws IOException on read error */ public ObjectContent<T> read(InputStream inputStream) throws IOException { verify(); Assert.notNull(inputStream, "InputStream must not be null"); return read(new InputStreamResource(inputStream)); } /** * Return the object created from reading from the specified resource. * @param resource the source resource * @return the resulting object * @throws IOException on read error */ public T readObject(Resource resource) throws IOException { verify(); return read(resource).getObject(); } /** * Return {@link ObjectContent} from reading from the specified resource. * @param resource the source resource * @return the {@link ObjectContent} * @throws IOException on read error */ public ObjectContent<T> read(Resource resource) throws IOException { verify(); Assert.notNull(resource, "Resource must not be null"); InputStream inputStream = resource.getInputStream(); T object = readObject(inputStream, this.type); closeQuietly(inputStream); return new ObjectContent<>(this.type, object); } /** * Return the object created from reading from the specified reader. * @param reader the source reader * @return the resulting object * @throws IOException on read error */ public T readObject(Reader reader) throws IOException { verify(); return read(reader).getObject(); } /** * Return {@link ObjectContent} from reading from the specified reader. * @param reader the source reader * @return the {@link ObjectContent} * @throws IOException on read error */ public ObjectContent<T> read(Reader reader) throws IOException { verify(); Assert.notNull(reader, "Reader must not be null"); T object = readObject(reader, this.type); closeQuietly(reader); return new ObjectContent<>(this.type, object); } private void closeQuietly(Closeable closeable) { try { closeable.close(); } catch (IOException ex) { } } private void verify() { Assert.state(this.resourceLoadClass != null, "Uninitialized JsonMarshalTester (ResourceLoadClass is null)"); Assert.state(this.type != null, "Uninitialized JsonMarshalTester (Type is null)"); } /** * Write the specified object to a JSON string. * @param value the source value (never {@code null}) * @param type the resulting type (never {@code null}) * @return the JSON string * @throws IOException on write error */ protected abstract String writeObject(T value, ResolvableType type) throws IOException; /** * Read from the specified input stream to create an object of the specified type. The * default implementation delegates to {@link #readObject(Reader, ResolvableType)}. * @param inputStream the source input stream (never {@code null}) * @param type the resulting type (never {@code null}) * @return the resulting object * @throws IOException on read error */ protected T readObject(InputStream inputStream, ResolvableType type) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); return readObject(reader, type); } /** * Read from the specified reader to create an object of the specified type. * @param reader the source reader (never {@code null}) * @param type the resulting type (never {@code null}) * @return the resulting object * @throws IOException on read error */ protected abstract T readObject(Reader reader, ResolvableType type) throws IOException; /** * Utility class used to support field initialization. Used by subclasses to support * {@code initFields}. * * @param <M> The marshaller type */ protected static abstract class FieldInitializer<M> { private final Class<?> testerClass; @SuppressWarnings("rawtypes") protected FieldInitializer( Class<? extends AbstractJsonMarshalTester> testerClass) { Assert.notNull(testerClass, "TesterClass must not be null"); this.testerClass = testerClass; } public void initFields(final Object testInstance, final M marshaller) { Assert.notNull(testInstance, "TestInstance must not be null"); Assert.notNull(marshaller, "Marshaller must not be null"); initFields(testInstance, () -> marshaller); } public void initFields(final Object testInstance, final ObjectFactory<M> marshaller) { Assert.notNull(testInstance, "TestInstance must not be null"); Assert.notNull(marshaller, "Marshaller must not be null"); ReflectionUtils.doWithFields(testInstance.getClass(), (field) -> doWithField(field, testInstance, marshaller)); } protected void doWithField(Field field, Object test, ObjectFactory<M> marshaller) { if (this.testerClass.isAssignableFrom(field.getType())) { ReflectionUtils.makeAccessible(field); Object existingValue = ReflectionUtils.getField(field, test); if (existingValue == null) { setupField(field, test, marshaller); } } } private void setupField(Field field, Object test, ObjectFactory<M> marshaller) { ResolvableType type = ResolvableType.forField(field).getGeneric(); ReflectionUtils.setField(field, test, createTester(test.getClass(), type, marshaller.getObject())); } protected abstract AbstractJsonMarshalTester<Object> createTester( Class<?> resourceLoadClass, ResolvableType type, M marshaller); } }
/* * Copyright (c) 2022, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.demonstrations.calibration; import boofcv.abst.geo.calibration.CalibrateMonoPlanar; import boofcv.abst.geo.calibration.DetectSingleFiducialCalibration; import boofcv.abst.geo.calibration.ImageResults; import boofcv.alg.distort.LensDistortionWideFOV; import boofcv.alg.fiducial.calib.ConfigCalibrationTarget; import boofcv.alg.geo.calibration.CalibrationObservation; import boofcv.factory.distort.LensDistortionFactory; import boofcv.gui.BoofSwingUtil; import boofcv.gui.StandardAlgConfigPanel; import boofcv.gui.calibration.DisplayCalibrationPanel; import boofcv.gui.calibration.DisplayFisheyeCalibrationPanel; import boofcv.gui.calibration.DisplayPinholeCalibrationPanel; import boofcv.gui.calibration.UtilCalibrationGui; import boofcv.gui.controls.CalibrationModelPanel; import boofcv.gui.controls.CalibrationTargetPanel; import boofcv.gui.controls.JCheckBoxValue; import boofcv.gui.controls.JSpinnerNumber; import boofcv.gui.image.ImagePanel; import boofcv.gui.image.ScaleOptions; import boofcv.gui.image.ShowImages; import boofcv.gui.settings.GlobalSettingsControls; import boofcv.io.PathLabel; import boofcv.io.UtilIO; import boofcv.io.calibration.CalibrationIO; import boofcv.io.image.ConvertBufferedImage; import boofcv.io.image.UtilImageIO; import boofcv.misc.BoofMiscOps; import boofcv.misc.VariableLockSet; import boofcv.struct.calib.CameraModel; import boofcv.struct.calib.CameraModelType; import boofcv.struct.calib.CameraPinholeBrown; import boofcv.struct.calib.StereoParameters; import boofcv.struct.image.GrayF32; import lombok.Getter; import org.apache.commons.io.FilenameUtils; import org.ddogleg.struct.DogArray; import org.ddogleg.struct.DogArray_I32; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.filechooser.FileNameExtensionFilter; import java.awt.*; import java.awt.event.KeyEvent; import java.awt.image.BufferedImage; import java.io.File; import java.util.List; import java.util.*; import static boofcv.gui.BoofSwingUtil.MAX_ZOOM; import static boofcv.gui.BoofSwingUtil.MIN_ZOOM; /** * Application for calibrating single cameras from planar targets. User can change the camera model and * target type from the GUI. * * @author Peter Abeles */ @SuppressWarnings({"NullAway.Init"}) public class CalibrateMonocularPlanarApp extends JPanel { public static final String CALIBRATION_TARGET = "calibration_target.yaml"; public static final String INTRINSICS = "intrinsics.yaml"; public JMenuBar menuBar; protected JMenu menuRecent; // Window the application is shown in public JFrame window; boolean calibratorChanged = true; boolean targetChanged = true; // if true the landmarks have been modified and it should not display results boolean resultsInvalid; //----------------------- GUI owned objects protected @Getter ConfigureInfoPanel configurePanel = new ConfigureInfoPanel(); protected CalibrationListPanel imageListPanel = createImageListPanel(); // protected ImageCalibrationPanel imagePanel = new ImageCalibrationPanel(); protected DisplayFisheyeCalibrationPanel fisheyePanel = new DisplayFisheyeCalibrationPanel(); protected DisplayPinholeCalibrationPanel pinholePanel = new DisplayPinholeCalibrationPanel(); protected boolean cameraIsPinhole = true; //-------------------------------------------------------------------- // Directory where images were loaded from File imageDirectory = new File("."); // True if a thread is running for calibration protected boolean runningCalibration = false; protected DetectorLocked detectorSet = new DetectorLocked(); protected ResultsLocked results = new ResultsLocked(); { BoofSwingUtil.initializeSwing(); } public CalibrateMonocularPlanarApp() { setLayout(new BorderLayout()); imageListPanel.setPreferredSize(new Dimension(200, 200)); // When these images change value pass on the scale fisheyePanel.setScale = ( scale ) -> configurePanel.setZoom(scale); pinholePanel.setScale = ( scale ) -> configurePanel.setZoom(scale); updateVisualizationSettings(); getCalibrationPanel().setPreferredSize(new Dimension(600, 600)); createMenuBar(); add(configurePanel, BorderLayout.WEST); add(imageListPanel, BorderLayout.EAST); add(getCalibrationPanel(), BorderLayout.CENTER); createAlgorithms(); } protected void createMenuBar() { menuBar = new JMenuBar(); JMenu menuFile = new JMenu("File"); menuFile.setMnemonic(KeyEvent.VK_F); menuBar.add(menuFile); var menuItemFile = new JMenuItem("Open Images"); BoofSwingUtil.setMenuItemKeys(menuItemFile, KeyEvent.VK_O, KeyEvent.VK_O); menuItemFile.addActionListener(( e ) -> openImages()); menuFile.add(menuItemFile); menuRecent = new JMenu("Open Recent"); menuFile.add(menuRecent); updateRecentItems(); var menuItemSaveCalibration = new JMenuItem("Save Intrinsics"); BoofSwingUtil.setMenuItemKeys(menuItemSaveCalibration, KeyEvent.VK_S, KeyEvent.VK_S); menuItemSaveCalibration.addActionListener(( e ) -> saveIntrinsics(this, imageDirectory, detectorSet.calibrator.getIntrinsic())); menuFile.add(menuItemSaveCalibration); var menuItemSaveLandmarks = new JMenuItem("Save Landmarks"); menuItemSaveLandmarks.addActionListener(( e ) -> saveLandmarks()); menuFile.add(menuItemSaveLandmarks); var menuItemSaveTarget = new JMenuItem("Save Target"); menuItemSaveTarget.addActionListener(( e ) -> saveCalibrationTarget(this, imageDirectory, configurePanel.targetPanel.createConfigCalibrationTarget())); menuFile.add(menuItemSaveTarget); JMenuItem menuSettings = new JMenuItem("Settings"); menuSettings.addActionListener(e -> new GlobalSettingsControls().showDialog(window, this)); var menuItemQuit = new JMenuItem("Quit", KeyEvent.VK_Q); menuItemQuit.addActionListener(e -> System.exit(0)); BoofSwingUtil.setMenuItemKeys(menuItemQuit, KeyEvent.VK_Q, KeyEvent.VK_Q); menuFile.addSeparator(); menuFile.add(menuSettings); menuFile.add(menuItemQuit); } /** * Adds a new menu for examples */ public void addExamples( List<PathLabel> examples ) { JMenu menuExamples = new JMenu("Examples"); for (PathLabel p : examples) { var menuItem = new JMenuItem(p.label); menuItem.addActionListener(( e ) -> processDirectory(new File(p.path[0]))); menuExamples.add(menuItem); } menuBar.add(menuExamples); } /** * Let the user select a directory to save detected landmarks */ protected void saveLandmarks() { // Open a dialog which will save using the default name in the place images were recently loaded from var chooser = new JFileChooser(); chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); chooser.setSelectedFile(imageDirectory); int returnVal = chooser.showSaveDialog(this); if (returnVal != JFileChooser.APPROVE_OPTION) { return; } // Make sure the directory exists File destination = chooser.getSelectedFile(); if (!destination.exists()) BoofMiscOps.checkTrue(destination.mkdirs()); BoofMiscOps.checkTrue(!destination.isFile(), "Can't select a file as output"); try { results.safe(() -> { String detectorName = detectorSet.select(() -> detectorSet.detector.getClass().getSimpleName()); for (String imageName : results.imagePaths) { String outputName = FilenameUtils.getBaseName(imageName) + ".csv"; CalibrationIO.saveLandmarksCsv(imageName, detectorName, results.getObservation(imageName), new File(destination, outputName)); } }); } catch (RuntimeException e) { e.printStackTrace(); BoofSwingUtil.warningDialog(this, e); } } /** * Saves found intrinsic parameters */ protected static void saveIntrinsics( JComponent owner, File directory, Object calibration ) { boolean mono = calibration instanceof CameraModel; var chooser = new JFileChooser(); chooser.addChoosableFileFilter(new FileNameExtensionFilter("yaml", "yaml", "yml")); chooser.setFileSelectionMode(JFileChooser.FILES_ONLY); chooser.setSelectedFile(new File(directory, mono ? INTRINSICS : "stereo.yaml")); int returnVal = chooser.showSaveDialog(owner); if (returnVal != JFileChooser.APPROVE_OPTION) { return; } try { if (mono) { CalibrationIO.save((CameraModel)calibration, chooser.getSelectedFile()); } else { CalibrationIO.save((StereoParameters)calibration, chooser.getSelectedFile()); } } catch (RuntimeException e) { e.printStackTrace(); BoofSwingUtil.warningDialog(owner, e); } } /** * Saves a calibration target description to disk so that it can be loaded again later on. */ protected static void saveCalibrationTarget( Component owner, File imageDirectory, ConfigCalibrationTarget target ) { // Open a dialog which will save using the default name in the place images were recently loaded from var chooser = new JFileChooser(); chooser.addChoosableFileFilter(new FileNameExtensionFilter("yaml", "yaml", "yml")); chooser.setFileSelectionMode(JFileChooser.FILES_ONLY); chooser.setCurrentDirectory(imageDirectory); chooser.setSelectedFile(new File(imageDirectory, CALIBRATION_TARGET)); int returnVal = chooser.showSaveDialog(owner); if (returnVal != JFileChooser.APPROVE_OPTION) { return; } try { File file = chooser.getSelectedFile(); UtilIO.saveConfig(target, new ConfigCalibrationTarget(), file); } catch (RuntimeException e) { e.printStackTrace(); BoofSwingUtil.warningDialog(owner, e); } } public void setMenuBarEnabled( boolean enabled ) { menuBar.setEnabled(enabled); } protected void openImages() { File selected = BoofSwingUtil.openFileChooser(this, BoofSwingUtil.FileTypes.DIRECTORIES); if (selected == null) return; processDirectory(selected); } /** * Change camera model */ protected void createAlgorithms() { detectorSet.safe(() -> { if (targetChanged) detectorSet.detector = configurePanel.targetPanel.createSingleTargetDetector(); detectorSet.calibrator = new CalibrateMonoPlanar(detectorSet.detector.getLayout()); configurePanel.modelPanel.configureCalibrator(detectorSet.calibrator); }); // See which type of camera model is active boolean isPinhole = configurePanel.modelPanel.selected == CameraModelType.BROWN; // Switch image visualization if it's a pinhole or fisheye model SwingUtilities.invokeLater(() -> { if (isPinhole != cameraIsPinhole) { // Pass in the image to the now active panel BufferedImage image = getCalibrationPanel().getImage(); remove(getCalibrationPanel()); cameraIsPinhole = isPinhole; add(getCalibrationPanel(), BorderLayout.CENTER); // Synchronize the image panel's state with the latest settings getCalibrationPanel().setBufferedImageNoChange(image); getCalibrationPanel().setScale(configurePanel.zoom.vdouble()); updateVisualizationSettings(); validate(); } }); targetChanged = false; calibratorChanged = false; resultsInvalid = true; } /** * Detects image features from the set */ public void processDirectory( File directory ) { List<String> selectedImages = UtilIO.listImages(directory.getPath(), true); if (selectedImages.isEmpty()) return; BoofSwingUtil.invokeNowOrLater(() -> { // Disable the menu bar so the user can't try to open more images setMenuBarEnabled(false); // Add to list of recently opened directories BoofSwingUtil.addToRecentFiles(this, directory.getName(), BoofMiscOps.asList(directory.getPath())); updateRecentItems(); }); processImages(directory, selectedImages); } /** * Detects image features from the set */ public void processImages( File directory, List<String> selectedImages ) { loadDefaultTarget(directory, configurePanel.targetPanel); imageDirectory = directory; targetChanged = true; // We need to launch the processing thread from the UI thread since it might have loaded the calibration // target and that won't take effect until the UI thread runs SwingUtilities.invokeLater(() -> // Process the images in a non-gui thread new Thread(() -> handleProcessCalled(selectedImages), "OpenImages()").start()); } protected static boolean loadDefaultTarget( File directory, CalibrationTargetPanel panel ) { // If the calibration target type is specified load that var fileTarget = new File(directory, CALIBRATION_TARGET); try { // Just try to load it. Checking to see if it exists will fail inside a jar where it will always return // false. ConfigCalibrationTarget config = UtilIO.loadConfig(fileTarget); BoofSwingUtil.invokeNowOrLater(() -> panel.setConfigurationTo(config)); return true; } catch (RuntimeException ignore) {} return false; } protected DisplayCalibrationPanel getCalibrationPanel() { return cameraIsPinhole ? pinholePanel : fisheyePanel; } /** * Updates the list in recent menu */ protected void updateRecentItems() { BoofSwingUtil.updateRecentItems(this, menuRecent, ( info ) -> processDirectory(new File(info.files.get(0)))); } /** * Change which image is being displayed. Request from GUI */ private void changeSelectedGUI( int index ) { if (index < 0 || index >= results.imagePaths.size()) return; BoofSwingUtil.checkGuiThread(); // Change the item selected in the list imageListPanel.setSelected(index); String path = results.select(() -> results.imagePaths.get(index)); BufferedImage image = UtilImageIO.loadImage(path); if (image == null) { System.err.println("Could not load image: " + path); return; } configurePanel.setImageSize(image.getWidth(), image.getHeight()); getCalibrationPanel().setBufferedImageNoChange(image); CalibrationObservation imageObservations = getObservationsForSelected(); ImageResults imageResults = getResultsForSelected(); if (imageObservations == null || imageResults == null) return; getCalibrationPanel().setResults(imageObservations, imageResults, results.allUsedObservations); getCalibrationPanel().repaint(); } /** * Handle the user clicking on the process button. This will either detect landmarks AND calibrate or just * calibrate using existing features. * * @param imagePaths List of images or null if it should use existing landmarks. */ protected void handleProcessCalled( @Nullable List<String> imagePaths ) { BoofSwingUtil.checkNotGuiThread(); boolean detectTargets = targetChanged; createAlgorithms(); if (detectTargets) { // If null then it must want to reprocess the current set of images if (imagePaths == null) { List<String> l = new ArrayList<>(); results.safe(() -> l.addAll(results.imagePaths)); imagePaths = l; } // Disable the menu bar so the user can't try to open more images SwingUtilities.invokeLater(() -> setMenuBarEnabled(false)); detectLandmarksInImages(imagePaths); } calibrateFromCorners(); SwingUtilities.invokeLater(() -> getCalibrationPanel().repaint()); } /** * Detects image features from the set */ protected void detectLandmarksInImages( List<String> foundImages ) { // reset all data structures results.reset(); SwingUtilities.invokeLater(() -> { imageListPanel.clearImages(); getCalibrationPanel().clearCalibration(); getCalibrationPanel().clearResults(); }); // Let the user configure verbose output to stdout detectorSet.safe(() -> BoofSwingUtil.setVerboseWithDemoSettings(detectorSet.calibrator)); // Load and detect calibration targets GrayF32 gray = new GrayF32(1, 1); for (String path : foundImages) { BufferedImage buffered = UtilImageIO.loadImage(path); if (buffered == null) { System.err.println("Failed to load image: " + path); continue; } // Convert to gray and detect the marker inside it ConvertBufferedImage.convertFrom(buffered, gray); detectorSet.lock(); boolean detected; CalibrationObservation observation; try { detected = detectorSet.detector.process(gray); observation = detectorSet.detector.getDetectedPoints(); } catch (RuntimeException e) { e.printStackTrace(System.err); continue; } finally { detectorSet.unlock(); } // Order matters for visualization later on Collections.sort(observation.points, Comparator.comparingInt(a -> a.index)); // Record that it could process this image and display it in the GUI results.safe(() -> { results.imagePaths.add(path); results.imageObservations.put(path, observation); // only images with 4 points can be used in calibration if (observation.points.size() >= 4) { results.usedImages.add(results.imagePaths.size() - 1); results.allUsedObservations.add(observation); } // need to create a copy since the copy being passed in to the other structures might be modified // later on results.originalObservations.grow().setTo(observation); }); SwingUtilities.invokeLater(() -> { imageListPanel.addImage(new File(path).getName(), detected); // This will show the viewed image, but it won't be "selected". Selecting it will cause the image to // be loaded again getCalibrationPanel().setBufferedImageNoChange(buffered); getCalibrationPanel().repaint(); }); } // Officially change the selected image SwingUtilities.invokeLater(() -> imageListPanel.setSelected(imageListPanel.imageNames.size() - 1)); } protected void calibrateFromCorners() { BoofSwingUtil.checkNotGuiThread(); if (runningCalibration) return; SwingUtilities.invokeLater(() -> getCalibrationPanel().clearCalibration()); runningCalibration = true; detectorSet.lock(); // by default assume the calibration will be unsuccessful detectorSet.calibrationSuccess = false; try { detectorSet.calibrator.reset(); results.safe(() -> { for (int usedIdx = 0; usedIdx < results.usedImages.size(); usedIdx++) { String image = results.imagePaths.get(results.usedImages.get(usedIdx)); detectorSet.calibrator.addImage(results.getObservation(image)); } }); // Calibrate detectorSet.calibrator.process(); detectorSet.calibrationSuccess = true; resultsInvalid = false; // Save results for visualization results.safe(() -> { results.imageResults.clear(); List<ImageResults> listResults = detectorSet.calibrator.getErrors(); for (int i = 0; i < listResults.size(); i++) { String image = results.imagePaths.get(results.usedImages.get(i)); results.imageResults.put(image, listResults.get(i)); } }); detectorSet.calibrator.printStatistics(System.out); } catch (RuntimeException e) { e.printStackTrace(); SwingUtilities.invokeLater(() -> BoofSwingUtil.warningDialog(this, e)); return; } finally { runningCalibration = false; detectorSet.unlock(); } displayCalibrationResults(); showStatsToUser(); } private void displayCalibrationResults() { SwingUtilities.invokeLater(() -> detectorSet.safe(() -> { // pass in the new calibrated camera CameraModel foundIntrinsics = detectorSet.calibrator.getIntrinsic(); if (cameraIsPinhole) { pinholePanel.setCalibration((CameraPinholeBrown)foundIntrinsics); } else { LensDistortionWideFOV model = LensDistortionFactory.wide(foundIntrinsics); fisheyePanel.setCalibration(model, foundIntrinsics.width, foundIntrinsics.height); } configurePanel.bCompute.setEnabled(false); // Force it to redraw with new image features int selected = imageListPanel.imageList.getSelectedIndex(); imageListPanel.imageList.clearSelection(); imageListPanel.setSelected(selected); // Show the user the found calibration parameters. Format a bit to make it look nicer String text = foundIntrinsics.toString().replace(',', '\n').replace("{", "\n "); text = text.replace('}', '\n'); configurePanel.textAreaCalib.setText(text); })); } /** Format statistics on results and add to a text panel */ private void showStatsToUser() { results.safe(() -> { double averageError = 0.0; double maxError = 0.0; for (int i = 0; i < results.usedImages.size(); i++) { String image = results.imagePaths.get(results.usedImages.get(i)); ImageResults r = results.getResults(image); averageError += r.meanError; maxError = Math.max(maxError, r.maxError); } averageError /= results.usedImages.size(); String text = String.format("Reprojection Errors (px):\n\nmean=%.3f max=%.3f\n\n", averageError, maxError); text += String.format("%-10s | %8s\n", "image", "max (px)"); for (int i = 0; i < results.usedImages.size(); i++) { String image = results.imagePaths.get(results.usedImages.get(i)); ImageResults r = results.getResults(image); text += String.format("%-12s %8.3f\n", new File(image).getName(), r.maxError); } String _text = text; SwingUtilities.invokeLater(() -> { configurePanel.textAreaStats.setText(_text); configurePanel.textAreaStats.setCaretPosition(0); // show the top where summary stats are }); }); } protected void settingsChanged( boolean target, boolean calibrator ) { BoofSwingUtil.checkGuiThread(); targetChanged |= target; calibratorChanged |= calibrator; SwingUtilities.invokeLater(() -> configurePanel.bCompute.setEnabled(true)); } /** Removes the selected point or does nothing if nothing is selected */ protected void removePoint() { int whichPoint = getCalibrationPanel().getSelectedObservation(); if (whichPoint < 0) return; CalibrationObservation observation = getCalibrationPanel().getObservation(); if (observation == null) return; resultsInvalid = true; results.safe(() -> { if (whichPoint >= observation.points.size()) return; observation.points.remove(whichPoint); if (observation.points.size() < 4) { removeImage(); } }); SwingUtilities.invokeLater(() -> { // Remove the results since they are no longer valid getCalibrationPanel().results = null; getCalibrationPanel().deselectPoint(); configurePanel.bCompute.setEnabled(true); getCalibrationPanel().repaint(); }); } /** Removes an image */ protected void removeImage() { BoofSwingUtil.invokeNowOrLater(() -> { int selected = imageListPanel.imageList.getSelectedIndex(); if (selected < 0) return; // If the image isn't "used" don't remove it if (!imageListPanel.imageSuccess.get(selected)) return; resultsInvalid = true; // Mark it as not used in the UI imageListPanel.imageSuccess.set(selected, false); results.safe(() -> { // Remove all points from this image, which will remove it from the active list String image = results.imagePaths.get(selected); results.getObservation(image).points.clear(); // This image is no longer used for calibration int usedIdx = results.usedImages.indexOf(selected); if (usedIdx >= 0) results.usedImages.remove(usedIdx); }); // Visually show the changes getCalibrationPanel().results = null; configurePanel.bCompute.setEnabled(true); getCalibrationPanel().repaint(); }); } /** Adds all images and points back in */ protected void undoAllRemove() { resultsInvalid = true; results.safe(() -> { // we will re-generate the used image list results.usedImages.reset(); for (int i = 0; i < results.originalObservations.size; i++) { // Revert by mindlessly copying CalibrationObservation o = results.originalObservations.get(i); String image = results.imagePaths.get(i); results.getObservation(image).setTo(o); // If the image has enough points, use it if (o.size() >= 4) results.usedImages.add(i); } }); // Update the list of which images can be used in the UI BoofSwingUtil.invokeNowOrLater(() -> { results.safe(() -> { for (int i = 0; i < results.usedImages.size; i++) { int which = results.usedImages.get(i); imageListPanel.imageSuccess.set(which, true); } }); // Visually show the changes configurePanel.bCompute.setEnabled(true); getCalibrationPanel().repaint(); imageListPanel.repaint(); }); } /** If an image is selected, it returns the observed calibration landmarks */ protected @Nullable CalibrationObservation getObservationsForSelected() { BoofSwingUtil.checkGuiThread(); int selected = imageListPanel.selectedImage; return results.selectNull(() -> { if (selected < 0 || selected >= results.imagePaths.size()) return null; return results.imageObservations.get(results.imagePaths.get(selected)); }); } protected @Nullable ImageResults getResultsForSelected() { BoofSwingUtil.checkGuiThread(); if (resultsInvalid) return null; int selected = imageListPanel.selectedImage; return results.selectNull(() -> { if (selected < 0 || selected >= results.imagePaths.size()) return null; return results.getResults(results.imagePaths.get(selected)); }); } protected void updateVisualizationSettings() { DisplayCalibrationPanel panel = getCalibrationPanel(); panel.setDisplay(configurePanel.checkPoints.value, configurePanel.checkErrors.value, configurePanel.checkUndistorted.value, configurePanel.checkAll.value, configurePanel.checkNumbers.value, configurePanel.checkOrder.value, configurePanel.selectErrorScale.vdouble()); panel.showResiduals = configurePanel.checkResidual.value; panel.repaint(); } /** * Creates and configures a panel for displaying images names and control buttons for removing points/images */ protected CalibrationListPanel createImageListPanel() { var panel = new CalibrationListPanel(); panel.bRemovePoint.addActionListener(( e ) -> removePoint()); panel.bRemoveImage.addActionListener(( e ) -> removeImage()); panel.bReset.addActionListener(( e ) -> undoAllRemove()); panel.selectionChanged = this::changeSelectedGUI; return panel; } /** * Provides controls to configure detection and calibration while also listing all the files */ public class ConfigureInfoPanel extends StandardAlgConfigPanel { protected JSpinnerNumber zoom = spinnerWrap(1.0, MIN_ZOOM, MAX_ZOOM, 1.0); protected JLabel imageSizeLabel = new JLabel(); JButton bCompute = button("Compute", false); JCheckBoxValue checkPoints = checkboxWrap("Points", true).tt("Show calibration landmarks"); JCheckBoxValue checkResidual = checkboxWrap("Residual", false).tt("Line showing residual exactly"); JCheckBoxValue checkErrors = checkboxWrap("Errors", true).tt("Exaggerated residual errors"); JCheckBoxValue checkUndistorted = checkboxWrap("Undistort", false).tt("Visualize undistorted image"); JCheckBoxValue checkAll = checkboxWrap("All", false).tt("Show location of all landmarks in all images"); JCheckBoxValue checkNumbers = checkboxWrap("Numbers", false).tt("Draw feature numbers"); JCheckBoxValue checkOrder = checkboxWrap("Order", true).tt("Visualize landmark order"); JSpinnerNumber selectErrorScale = spinnerWrap(10.0, 0.1, 1000.0, 2.0); @Getter CalibrationModelPanel modelPanel = new CalibrationModelPanel(); @Getter CalibrationTargetPanel targetPanel = new CalibrationTargetPanel(( a, b ) -> handleUpdatedTarget(), true); // Displays a preview of the calibration target ImagePanel targetPreviewPanel = new ImagePanel(); // Displays calibration information JTextArea textAreaCalib = new JTextArea(); JTextArea textAreaStats = new JTextArea(); public ConfigureInfoPanel() { configureTextArea(textAreaCalib); configureTextArea(textAreaStats); modelPanel.listener = () -> settingsChanged(false, true); targetPreviewPanel.setScaling(ScaleOptions.DOWN); targetPreviewPanel.setCentering(true); targetPreviewPanel.setPreferredSize(new Dimension(200, 300)); var targetVerticalPanel = new JPanel(new BorderLayout()); targetVerticalPanel.add(targetPanel, BorderLayout.NORTH); targetVerticalPanel.add(targetPreviewPanel, BorderLayout.CENTER); handleUpdatedTarget(); JTabbedPane tabbedPane = new JTabbedPane(); tabbedPane.addTab("Model", modelPanel); tabbedPane.addTab("Target", targetVerticalPanel); tabbedPane.addTab("Calib", new JScrollPane(textAreaCalib)); tabbedPane.addTab("Stats", new JScrollPane(textAreaStats)); addLabeled(imageSizeLabel, "Image Size", "Size of image being viewed"); addLabeled(zoom.spinner, "Zoom", "Zoom of image being viewed"); addAlignCenter(bCompute, "Press to compute calibration with current settings."); add(createVisualFlagPanel()); addLabeled(selectErrorScale.spinner, "Error Scale", "Increases the error visualization"); add(tabbedPane); } private void configureTextArea( JTextArea textAreaCalib ) { textAreaCalib.setEditable(false); textAreaCalib.setWrapStyleWord(true); textAreaCalib.setLineWrap(true); textAreaCalib.setFont(new Font("monospaced", Font.PLAIN, 12)); } private void handleUpdatedTarget() { BufferedImage preview = UtilCalibrationGui.renderTargetBuffered( targetPanel.selected, targetPanel.getActiveConfig(), 40); targetPreviewPanel.setImageUI(preview); settingsChanged(true, false); } private JPanel createVisualFlagPanel() { var panel = new JPanel(new GridLayout(0, 3)); panel.setBorder(BorderFactory.createTitledBorder("Visual Flags")); panel.add(checkPoints.check); panel.add(checkErrors.check); panel.add(checkUndistorted.check); panel.add(checkResidual.check); panel.add(checkAll.check); panel.add(checkNumbers.check); panel.add(checkOrder.check); panel.setMaximumSize(panel.getPreferredSize()); return panel; } public void setZoom( double _zoom ) { _zoom = Math.max(MIN_ZOOM, _zoom); _zoom = Math.min(MAX_ZOOM, _zoom); if (_zoom == zoom.value.doubleValue()) return; zoom.value = _zoom; BoofSwingUtil.invokeNowOrLater(() -> zoom.spinner.setValue(zoom.value)); } public void setImageSize( final int width, final int height ) { BoofSwingUtil.invokeNowOrLater(() -> imageSizeLabel.setText(width + " x " + height)); } @Override public void controlChanged( final Object source ) { if (source == bCompute) { if (!runningCalibration) { new Thread(() -> handleProcessCalled(null), "bCompute").start(); } } else if (source == zoom.spinner) { zoom.updateValue(); getCalibrationPanel().setScale(zoom.vdouble()); } else { updateVisualizationSettings(); } } } private static class DetectorLocked extends VariableLockSet { protected DetectSingleFiducialCalibration detector; protected CalibrateMonoPlanar calibrator; protected boolean calibrationSuccess; } private static class ResultsLocked extends VariableLockSet { // Path to all input images protected final List<String> imagePaths = new ArrayList<>(); // List of found observations and results protected final Map<String, CalibrationObservation> imageObservations = new HashMap<>(); protected final Map<String, ImageResults> imageResults = new HashMap<>(); // All observations with at least 4 points protected final List<CalibrationObservation> allUsedObservations = new ArrayList<>(); // Index of images used when calibrating protected final DogArray_I32 usedImages = new DogArray_I32(); // Copy of original observation before any edits protected final DogArray<CalibrationObservation> originalObservations = new DogArray<>(CalibrationObservation::new); public CalibrationObservation getObservation( String key ) { return Objects.requireNonNull(imageObservations.get(key)); } public ImageResults getResults( String key ) { return Objects.requireNonNull(imageResults.get(key)); } public void reset() { safe(() -> { imagePaths.clear(); imageObservations.clear(); imageResults.clear(); usedImages.reset(); allUsedObservations.clear(); originalObservations.reset(); }); } } public static void main( @Nullable String[] args ) { SwingUtilities.invokeLater(() -> { var app = new CalibrateMonocularPlanarApp(); app.window = ShowImages.showWindow(app, "Monocular Planar Calibration", true); app.window.setJMenuBar(app.menuBar); }); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.UUIDs; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.test.ESAllocationTestCase; import java.util.Collections; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; /** */ public class UnassignedInfoTests extends ESAllocationTestCase { public void testReasonOrdinalOrder() { UnassignedInfo.Reason[] order = new UnassignedInfo.Reason[]{ UnassignedInfo.Reason.INDEX_CREATED, UnassignedInfo.Reason.CLUSTER_RECOVERED, UnassignedInfo.Reason.INDEX_REOPENED, UnassignedInfo.Reason.DANGLING_INDEX_IMPORTED, UnassignedInfo.Reason.NEW_INDEX_RESTORED, UnassignedInfo.Reason.EXISTING_INDEX_RESTORED, UnassignedInfo.Reason.REPLICA_ADDED, UnassignedInfo.Reason.ALLOCATION_FAILED, UnassignedInfo.Reason.NODE_LEFT, UnassignedInfo.Reason.REROUTE_CANCELLED, UnassignedInfo.Reason.REINITIALIZED, UnassignedInfo.Reason.REALLOCATED_REPLICA, UnassignedInfo.Reason.PRIMARY_FAILED}; for (int i = 0; i < order.length; i++) { assertThat(order[i].ordinal(), equalTo(i)); } assertThat(UnassignedInfo.Reason.values().length, equalTo(order.length)); } public void testSerialization() throws Exception { UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()); UnassignedInfo meta = reason == UnassignedInfo.Reason.ALLOCATION_FAILED ? new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null, null, randomIntBetween(1, 100), System.nanoTime(), System.currentTimeMillis(), false): new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null); BytesStreamOutput out = new BytesStreamOutput(); meta.writeTo(out); out.close(); UnassignedInfo read = new UnassignedInfo(out.bytes().streamInput()); assertThat(read.getReason(), equalTo(meta.getReason())); assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis())); assertThat(read.getMessage(), equalTo(meta.getMessage())); assertThat(read.getDetails(), equalTo(meta.getDetails())); assertThat(read.getNumFailedAllocations(), equalTo(meta.getNumFailedAllocations())); } public void testIndexCreated() { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(randomIntBetween(1, 3)).numberOfReplicas(randomIntBetween(0, 3))) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsNew(metaData.index("test")).build()).build(); for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.INDEX_CREATED)); } } public void testClusterRecovered() { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(randomIntBetween(1, 3)).numberOfReplicas(randomIntBetween(0, 3))) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsRecovery(metaData.index("test")).build()).build(); for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.CLUSTER_RECOVERED)); } } public void testIndexReopened() { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(randomIntBetween(1, 3)).numberOfReplicas(randomIntBetween(0, 3))) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsFromCloseToOpen(metaData.index("test")).build()).build(); for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.INDEX_REOPENED)); } } public void testNewIndexRestored() { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(randomIntBetween(1, 3)).numberOfReplicas(randomIntBetween(0, 3))) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsNewRestore(metaData.index("test"), new RestoreSource(new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), Version.CURRENT, "test"), new IntHashSet()).build()).build(); for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NEW_INDEX_RESTORED)); } } public void testExistingIndexRestored() { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(randomIntBetween(1, 3)).numberOfReplicas(randomIntBetween(0, 3))) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsRestore(metaData.index("test"), new RestoreSource(new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), Version.CURRENT, "test")).build()).build(); for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.EXISTING_INDEX_RESTORED)); } } public void testDanglingIndexImported() { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(randomIntBetween(1, 3)).numberOfReplicas(randomIntBetween(0, 3))) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsFromDangling(metaData.index("test")).build()).build(); for (ShardRouting shard : clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)) { assertThat(shard.unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.DANGLING_INDEX_IMPORTED)); } } public void testReplicaAdded() { AllocationService allocation = createAllocationService(); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) .build(); final Index index = metaData.index("test").getIndex(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsNew(metaData.index(index)).build()).build(); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build(); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); // starting primaries clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); IndexRoutingTable.Builder builder = IndexRoutingTable.builder(index); for (IndexShardRoutingTable indexShardRoutingTable : clusterState.routingTable().index(index)) { builder.addIndexShard(indexShardRoutingTable); } builder.addReplica(); clusterState = ClusterState.builder(clusterState).routingTable(RoutingTable.builder(clusterState.routingTable()).add(builder).build()).build(); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo(), notNullValue()); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.REPLICA_ADDED)); } /** * The unassigned meta is kept when a shard goes to INITIALIZING, but cleared when it moves to STARTED. */ public void testStateTransitionMetaHandling() { ShardRouting shard = TestShardRouting.newShardRouting("test", 1, null, null, null, true, ShardRoutingState.UNASSIGNED, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); assertThat(shard.unassignedInfo(), notNullValue()); shard = shard.initialize("test_node", null, -1); assertThat(shard.state(), equalTo(ShardRoutingState.INITIALIZING)); assertThat(shard.unassignedInfo(), notNullValue()); shard = shard.moveToStarted(); assertThat(shard.state(), equalTo(ShardRoutingState.STARTED)); assertThat(shard.unassignedInfo(), nullValue()); } /** * Tests that during reroute when a node is detected as leaving the cluster, the right unassigned meta is set */ public void testNodeLeave() { AllocationService allocation = createAllocationService(); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsNew(metaData.index("test")).build()).build(); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build(); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); // starting primaries clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); // starting replicas clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(false)); // remove node2 and reroute clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build(); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); // verify that NODE_LEAVE is the reason for meta assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(true)); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo(), notNullValue()); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT)); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), greaterThan(0L)); } /** * Verifies that when a shard fails, reason is properly set and details are preserved. */ public void testFailedShard() { AllocationService allocation = createAllocationService(); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsNew(metaData.index("test")).build()).build(); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build(); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); // starting primaries clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); // starting replicas clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(false)); // fail shard ShardRouting shardToFail = clusterState.getRoutingNodes().shardsWithState(STARTED).get(0); clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyFailedShards(clusterState, Collections.singletonList(new FailedRerouteAllocation.FailedShard(shardToFail, "test fail", null)))).build(); // verify the reason and details assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(true)); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo(), notNullValue()); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getMessage(), equalTo("test fail")); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getDetails(), equalTo("test fail")); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), greaterThan(0L)); } /** * Verifies that delayed allocation calculation are correct. */ public void testRemainingDelayCalculation() throws Exception { final long baseTime = System.nanoTime(); UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, 0, baseTime, System.currentTimeMillis(), randomBoolean()); final long totalDelayNanos = TimeValue.timeValueMillis(10).nanos(); final Settings indexSettings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueNanos(totalDelayNanos)).build(); long delay = unassignedInfo.getRemainingDelay(baseTime, indexSettings); assertThat(delay, equalTo(totalDelayNanos)); long delta1 = randomIntBetween(1, (int) (totalDelayNanos - 1)); delay = unassignedInfo.getRemainingDelay(baseTime + delta1, indexSettings); assertThat(delay, equalTo(totalDelayNanos - delta1)); delay = unassignedInfo.getRemainingDelay(baseTime + totalDelayNanos, indexSettings); assertThat(delay, equalTo(0L)); delay = unassignedInfo.getRemainingDelay(baseTime + totalDelayNanos + randomIntBetween(1, 20), indexSettings); assertThat(delay, equalTo(0L)); } public void testNumberOfDelayedUnassigned() throws Exception { MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsNew(metaData.index("test1")).addAsNew(metaData.index("test2")).build()).build(); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build(); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), equalTo(0)); // starting primaries clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); // starting replicas clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(false)); // remove node2 and reroute clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build(); // make sure both replicas are marked as delayed (i.e. not reallocated) clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); assertThat(clusterState.prettyPrint(), UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), equalTo(2)); } public void testFindNextDelayedAllocation() { MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); final TimeValue delayTest1 = TimeValue.timeValueMillis(randomIntBetween(1, 200)); final TimeValue delayTest2 = TimeValue.timeValueMillis(randomIntBetween(1, 200)); final long expectMinDelaySettingsNanos = Math.min(delayTest1.nanos(), delayTest2.nanos()); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayTest1)).numberOfShards(1).numberOfReplicas(1)) .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayTest2)).numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData) .routingTable(RoutingTable.builder().addAsNew(metaData.index("test1")).addAsNew(metaData.index("test2")).build()).build(); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build(); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), equalTo(0)); // starting primaries clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); // starting replicas clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(false)); // remove node2 and reroute final long baseTime = System.nanoTime(); allocation.setNanoTimeOverride(baseTime); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build(); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); final long delta = randomBoolean() ? 0 : randomInt((int) expectMinDelaySettingsNanos - 1); if (delta > 0) { allocation.setNanoTimeOverride(baseTime + delta); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "time moved")).build(); } assertThat(UnassignedInfo.findNextDelayedAllocation(baseTime + delta, clusterState), equalTo(expectMinDelaySettingsNanos - delta)); } }
/* JAI-Ext - OpenSource Java Advanced Image Extensions Library * http://www.geo-solutions.it/ * Copyright 2014 GeoSolutions * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.geosolutions.jaiext.piecewise; import it.geosolutions.jaiext.range.Range; import java.util.AbstractList; import java.util.Arrays; import java.util.Locale; import java.util.MissingResourceException; /** * Convenience implementation of the {@link Domain1D} interface. * * @author Simone Giannecchini * * @source $URL$ */ public class DefaultDomain1D<E extends DefaultDomainElement1D> extends AbstractList<E> implements Domain1D<E> { /* * (non-Javadoc) * * @see Domain1D#getName() */ public synchronized String getName() { if (name == null) { final StringBuffer buffer = new StringBuffer(30); final Locale locale = Locale.getDefault(); if (main != null) { buffer.append(main.getName()); } else { buffer.append('('); buffer.append("Untitled"); buffer.append(')'); } name = buffer.toString(); } return name; } /* * (non-Javadoc) * * @see Domain1D#getRange() */ public Range getApproximateDomainRange() { synchronized (elements) { // @todo TODO should I include the NaN value? if (range == null) { Range range = null; for (E element : elements) { final Range extent = element.getRange(); if (!Double.isNaN(extent.getMin().doubleValue()) && !Double.isNaN(extent.getMax().doubleValue())) { if (range != null) {// TODO FIXME ADD RANGE UNION range = range.union(extent); } else { range = extent; } } } this.range = range; } return range; } } /** * The list of elements. This list most be sorted in increasing order of left range element. */ private E[] elements; /** * {@code true} if there is gaps between elements, or {@code false} otherwise. A gap is found if for example the range of value is [-9999 .. * -9999] for the first domain element and [0 .. 1000] for the second one. */ private boolean hasGaps; /** * The "main" domain element, or {@code null} if there is none. The main domain element is the quantitative domain element with the widest range * of sample values. */ private E main; /** * List of {@link #inputMinimum} values for each domain element in {@link #elements} . This array <strong>must</strong> be in increasing order. * Actually, this is the need to sort this array that determines the element order in {@link #elements} . */ private double[] minimums; /** * The name for this domain element list. Will be constructed only when first needed. * * @see #getName */ private String name; /** * The range of values in this domain element list. This is the union of the range of values of every elements, excluding {@code NaN} values. This * field will be computed only when first requested. */ private Range range; /** * Lazily initialized hashcode for this class */ private int hashCode = -1; /** * Constructor for {@link DefaultDomain1D}. * * @param inDomainElements {@link DomainElement1D} objects that make up this list. */ public DefaultDomain1D(E[] inDomainElements) { init(inDomainElements); } /** * @param inDomainElements * @throws IllegalArgumentException * @throws MissingResourceException */ @SuppressWarnings("unchecked") private void init(E[] inDomainElements) throws IllegalArgumentException, MissingResourceException { // ///////////////////////////////////////////////////////////////////// // // input checks // // ///////////////////////////////////////////////////////////////////// PiecewiseUtilities.ensureNonNull("DomainElement1D[]", inDomainElements); // @todo TODOCHECK ME if (inDomainElements == null) this.elements = (E[]) new DefaultDomainElement1D[] { new DefaultPassthroughPiecewiseTransform1DElement( "p0") }; else this.elements = inDomainElements.clone(); // ///////////////////////////////////////////////////////////////////// // // Sort the input elements. // // ///////////////////////////////////////////////////////////////////// Arrays.sort(this.elements); // ///////////////////////////////////////////////////////////////////// // // Construct the array of minimum values. During the loop, we make sure // there is no overlapping in input and output. // // ///////////////////////////////////////////////////////////////////// hasGaps = false; minimums = new double[elements.length]; for (int i = 0; i < elements.length; i++) { final DefaultDomainElement1D c = elements[i]; final double inMinimum = minimums[i] = c.getInputMinimum(); if (i != 0) { assert !(inMinimum < minimums[i - 1]) : inMinimum; // Use '!' to accept NaN. final DefaultDomainElement1D previous = elements[i - 1]; if (PiecewiseUtilities.compare(inMinimum, previous.getInputMaximum()) <= 0) { PiecewiseUtilities.domainElementsOverlap(elements, i); } // Check if there is a gap between this domain element and the // previous one. if (!Double.isNaN(inMinimum)// TODO FIXME ADD POSSIBILITY TO GET THE MAXIMUM NOT INCLUDED/INCLUDED && inMinimum != ((Range) previous.getRange()).getMax(false).doubleValue()) { hasGaps = true; } } } /* * Search for what seems to be the "main" domain element. This loop looks for the quantitative domain element (if there is one) with the * widest range of sample values. */ double range = 0; E main = null; for (int i = elements.length; --i >= 0;) { final E candidate = elements[i]; if (Double.isInfinite(candidate.getInputMinimum()) && Double.isInfinite(candidate.getInputMaximum())) { range = Double.POSITIVE_INFINITY; main = candidate; continue; } final double candidateRange = candidate.getInputMaximum() - candidate.getInputMinimum(); if (candidateRange >= range) { range = candidateRange; main = candidate; } } this.main = main; // postcondition assert PiecewiseUtilities.isSorted(elements); } /** * Returns the domain element of the specified sample value. If no domain element fits, then this method returns {@code null}. * * @param value The value. * @return The domain element of the supplied value, or {@code null}. */ public E findDomainElement(final double value) { int i = getDomainElementIndex(value); // // // // Checks // // // if (i < 0) return null; E domainElement1D; if (i > elements.length) return null; // // // // First of all let's check if we spotted a break point in out domains // element. If so the index we got is not an insertion point but it is // an actual domain element index. This happens when we catch precisely // a minimum element for a domain. // // // if (i < elements.length) { domainElement1D = elements[i]; if (domainElement1D.contains(value)) return domainElement1D; // if the index was 0, unless we caught the smallest minimum we have // got something smaller than the leftmost domain if (i == 0) return null; } // // // // Ok, now we know that we did not precisely caught a minimum for a // domain, we therefore got an insertion point. This means that, unless // we have fallen into a gap we need to subtract 1 to check for // inclusion in the right domain. // // // domainElement1D = elements[i - 1]; if (domainElement1D.contains(value)) return domainElement1D; // // // // Well, if we get here, we have definitely fallen into a gap or the // value is beyond the limits of the last domain, too bad.... // // // assert i >= elements.length || hasGaps : value; return null; } /** * @param sample * @return */ private int getDomainElementIndex(final double sample) { int i = -1; // Special 'binarySearch' for NaN i = PiecewiseUtilities.binarySearch(minimums, sample); if (i >= 0) { // The value is exactly equals to one of minimum, // or is one of NaN values. There is nothing else to do. assert Double.doubleToRawLongBits(sample) == Double.doubleToRawLongBits(minimums[i]); return i; } assert i == Arrays.binarySearch(minimums, sample) : i; // 'binarySearch' found the index of "insertion point" (-(insertion // point) - 1). The // insertion point is defined as the point at which the key would be // inserted into the list: the index of the first element greater than // the key, or list.size(), if all elements in the list are less than // the specified key. Note that this guarantees that the return value // will be >= 0 if and only if the key is found. i = -i - 1; return i; } // //////////////////////////////////////////////////////////////////////////////////////// // ////// //////// // ////// I M P L E M E N T A T I O N O F List I N T E R F A C E //////// // ////// //////// // //////////////////////////////////////////////////////////////////////////////////////// /** * Returns the number of elements in this list. */ @Override public int size() { return elements.length; } /** * Returns the element at the specified position in this list. */ @Override public E get(final int i) { return elements[i]; } /** * Returns all elements in this {@code }. */ @Override public Object[] toArray() { return (DomainElement1D[]) elements.clone(); } /** * Compares the specified object with this domain element list for equality. If the two objects are instances of the {@link DefaultDomain1D} * class, then the test check for the equality of the single elements. */ @SuppressWarnings("unchecked") @Override public boolean equals(final Object object) { if (this == object) return true; if (!(object instanceof DefaultDomain1D)) return false; final DefaultDomain1D that = (DefaultDomain1D) object; if (getEquivalenceClass() != that.getEquivalenceClass()) return false; if (!this.getName().equals(that.getName())) return false; if (!this.getApproximateDomainRange().equals(that.getApproximateDomainRange())) return false; if (Arrays.equals(this.elements, that.elements)) { assert Arrays.equals(this.minimums, that.minimums); return true; } return false; } protected Class<?> getEquivalenceClass() { return DefaultDomain1D.class; } /* * (non-Javadoc) * * @see Domain1D#hasGaps() */ public boolean hasGaps() { return hasGaps; } /** * Return what seems to be the main {@link DomainElement1D} for this list. * * @return what seems to be the main {@link DomainElement1D} for this list. */ public E getMain() { return main; } /** * @return */ public double[] getMinimums() { return (double[]) minimums.clone(); } @Override public int hashCode() { if (hashCode < 0) { int result = PiecewiseUtilities.deepHashCode(elements); result = PiecewiseUtilities.deepHashCode(minimums); result = PiecewiseUtilities.hash(getName(), result); hashCode = PiecewiseUtilities.hash(getApproximateDomainRange(), hashCode); } return hashCode; } }
/** * Copyright (C) 2013 by Raphael Michel under the MIT license: * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software * is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package de.geeksfactory.opacclient.objects; import java.util.List; /** * Object representing a filter which can be applied to a list of search * results. This is currently NOT implemented or used in the open source version * and is more like a bridge between the "Community Edition" and the * "Plus Edition" of the App. * * @author Raphael Michel * @since 2.0.6 */ public class Filter { private String label; private String identifier; private List<Option> options; /** * @param label This fitler's label * @param options The Options for this filter */ public Filter(String label, List<Option> options) { super(); this.label = label; this.options = options; } /** */ public Filter() { super(); } /** * @return This fitler's label */ public String getLabel() { return label; } /** * @param label This fitler's label */ public void setLabel(String label) { this.label = label; } /** * @return The Options for this filter */ public List<Option> getOptions() { return options; } /** * @param options The Options for this filter */ public void setOptions(List<Option> options) { this.options = options; } /** * @return The internal identifier for the filter */ public String getIdentifier() { return identifier; } /** * @param identifier The internal identifier for the filter */ public void setIdentifier(String identifier) { this.identifier = identifier; } /* * (non-Javadoc) * * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((identifier == null) ? 0 : identifier.hashCode()); result = prime * result + ((label == null) ? 0 : label.hashCode()); return result; } /* * (non-Javadoc) * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Filter other = (Filter) obj; if (identifier == null) { if (other.identifier != null) return false; } else if (!identifier.equals(other.identifier)) return false; if (label == null) { if (other.label != null) return false; } else if (!label.equals(other.label)) return false; return true; } @Override public String toString() { return "Filter{" + "label='" + label + '\'' + ", identifier='" + identifier + '\'' + ", options=" + options + '}'; } public class Option { /** * Object representing an option which can be applied * * @since 2.0.6 */ private String label; private String identifier; private int results_expected; private boolean is_applied; private boolean is_loadnext; /** * @param label The label of the option. * @param identifier The internal identifier for the option */ public Option(String label, String identifier) { super(); this.label = label; this.identifier = identifier; } public Option() { super(); } /** * @return The label of the option. */ public String getLabel() { return label; } /** * @param label The label of the option. */ public void setLabel(String label) { this.label = label; } /** * @return The internal identifier for the option */ public String getIdentifier() { return identifier; } /** * @param identifier The internal identifier for the option */ public void setIdentifier(String identifier) { this.identifier = identifier; } /** * @return The results expected to be seen when this option is applied */ public int getResults_expected() { return results_expected; } /** * @param results_expected The results expected to be seen when this option is * applied */ public void setResults_expected(int results_expected) { this.results_expected = results_expected; } /** * @return Whether this filter is already applied with this option. */ public boolean isApplied() { return is_applied; } /** * @param is_applied Whether this filter is already applied with this option. */ public void setApplied(boolean is_applied) { this.is_applied = is_applied; } /** * @return the is_loadnext */ public boolean getLoadnext() { return is_loadnext; } /** * @param is_loadnext the is_loadnext to set */ public void setLoadnext(boolean is_loadnext) { this.is_loadnext = is_loadnext; } @Override public String toString() { return "Option{" + "label='" + label + '\'' + ", identifier='" + identifier + '\'' + ", results_expected=" + results_expected + ", is_applied=" + is_applied + ", is_loadnext=" + is_loadnext + '}'; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ranger.tagsync.sink.tagadmin; import java.io.IOException; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.Cookie; import javax.ws.rs.core.NewCookie; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.security.UserGroupInformation; import org.apache.ranger.admin.client.datatype.RESTResponse; import org.apache.ranger.plugin.util.RangerRESTClient; import org.apache.ranger.plugin.util.ServiceTags; import org.apache.ranger.tagsync.model.TagSink; import org.apache.ranger.tagsync.process.TagSyncConfig; import com.sun.jersey.api.client.ClientResponse; public class TagAdminRESTSink implements TagSink, Runnable { private static final Log LOG = LogFactory.getLog(TagAdminRESTSink.class); private static final String REST_PREFIX = "/service"; private static final String MODULE_PREFIX = "/tags"; private static final String REST_URL_IMPORT_SERVICETAGS_RESOURCE = REST_PREFIX + MODULE_PREFIX + "/importservicetags/"; private long rangerAdminConnectionCheckInterval; private Cookie sessionId=null; private boolean isValidRangerCookie=false; List<NewCookie> cookieList=new ArrayList<>(); private boolean isRangerCookieEnabled; private RangerRESTClient tagRESTClient = null; private boolean isKerberized; private BlockingQueue<UploadWorkItem> uploadWorkItems; private Thread myThread = null; @Override public boolean initialize(Properties properties) { if (LOG.isDebugEnabled()) { LOG.debug("==> TagAdminRESTSink.initialize()"); } boolean ret = false; String restUrl = TagSyncConfig.getTagAdminRESTUrl(properties); String sslConfigFile = TagSyncConfig.getTagAdminRESTSslConfigFile(properties); String userName = TagSyncConfig.getTagAdminUserName(properties); String password = TagSyncConfig.getTagAdminPassword(properties); rangerAdminConnectionCheckInterval = TagSyncConfig.getTagAdminConnectionCheckInterval(properties); isKerberized = TagSyncConfig.getTagsyncKerberosIdentity(properties) != null; isRangerCookieEnabled = TagSyncConfig.isTagSyncRangerCookieEnabled(properties); sessionId=null; if (LOG.isDebugEnabled()) { LOG.debug("restUrl=" + restUrl); LOG.debug("sslConfigFile=" + sslConfigFile); LOG.debug("userName=" + userName); LOG.debug("rangerAdminConnectionCheckInterval=" + rangerAdminConnectionCheckInterval); LOG.debug("isKerberized=" + isKerberized); } if (StringUtils.isNotBlank(restUrl)) { tagRESTClient = new RangerRESTClient(restUrl, sslConfigFile); if (!isKerberized) { tagRESTClient.setBasicAuthInfo(userName, password); } // Build and cache REST client. This will catch any errors in building REST client up-front tagRESTClient.getClient(); uploadWorkItems = new LinkedBlockingQueue<UploadWorkItem>(); ret = true; } else { LOG.error("No value specified for property 'ranger.tagsync.tagadmin.rest.url'!"); } if (LOG.isDebugEnabled()) { LOG.debug("<== TagAdminRESTSink.initialize(), result=" + ret); } return ret; } @Override public ServiceTags upload(ServiceTags toUpload) throws Exception { if(LOG.isDebugEnabled()) { LOG.debug("==> upload() "); } UploadWorkItem uploadWorkItem = new UploadWorkItem(toUpload); uploadWorkItems.put(uploadWorkItem); // Wait until message is successfully delivered ServiceTags ret = uploadWorkItem.waitForUpload(); if(LOG.isDebugEnabled()) { LOG.debug("<== upload()"); } return ret; } private ServiceTags doUpload(ServiceTags serviceTags) throws Exception { if(isKerberized) { try{ UserGroupInformation userGroupInformation = UserGroupInformation.getLoginUser(); if (userGroupInformation != null) { try { userGroupInformation.checkTGTAndReloginFromKeytab(); } catch (IOException ioe) { LOG.error("Error renewing TGT and relogin", ioe); userGroupInformation = null; } } if (userGroupInformation != null) { if (LOG.isDebugEnabled()) { LOG.debug("Using Principal = " + userGroupInformation.getUserName()); } final ServiceTags serviceTag = serviceTags; ServiceTags ret = userGroupInformation.doAs(new PrivilegedAction<ServiceTags>() { @Override public ServiceTags run() { try { return uploadServiceTags(serviceTag); } catch (Exception e) { LOG.error("Upload of service-tags failed with message ", e); } return null; } }); return ret; } else { LOG.error("Failed to get UserGroupInformation.getLoginUser()"); return null; // This will cause retries !!! } }catch(Exception e){ LOG.error("Upload of service-tags failed with message ", e); } return null; }else{ return uploadServiceTags(serviceTags); } } private ServiceTags uploadServiceTags(ServiceTags serviceTags) throws Exception { if(LOG.isDebugEnabled()) { LOG.debug("==> doUpload()"); } ClientResponse response = null; if (isRangerCookieEnabled) { response = uploadServiceTagsUsingCookie(serviceTags); } else { response = tagRESTClient.put(REST_URL_IMPORT_SERVICETAGS_RESOURCE, null, serviceTags); } if(response == null || response.getStatus() != HttpServletResponse.SC_NO_CONTENT) { RESTResponse resp = RESTResponse.fromClientResponse(response); LOG.error("Upload of service-tags failed with message " + resp.getMessage()); if (response == null || resp.getHttpStatusCode() != HttpServletResponse.SC_BAD_REQUEST) { // NOT an application error throw new Exception("Upload of service-tags failed with response: " + response); } } if(LOG.isDebugEnabled()) { LOG.debug("<== doUpload()"); } return serviceTags; } private ClientResponse uploadServiceTagsUsingCookie(ServiceTags serviceTags) { if (LOG.isDebugEnabled()) { LOG.debug("==> uploadServiceTagCache()"); } ClientResponse clientResponse = null; if (sessionId != null && isValidRangerCookie) { clientResponse = tryWithCookie(serviceTags); } else { clientResponse = tryWithCred(serviceTags); } if (LOG.isDebugEnabled()) { LOG.debug("<== uploadServiceTagCache()"); } return clientResponse; } private ClientResponse tryWithCred(ServiceTags serviceTags) { if (LOG.isDebugEnabled()) { LOG.debug("==> tryWithCred"); } ClientResponse clientResponsebyCred = uploadTagsWithCred(serviceTags); if (clientResponsebyCred != null && clientResponsebyCred.getStatus() != HttpServletResponse.SC_NO_CONTENT && clientResponsebyCred.getStatus() != HttpServletResponse.SC_BAD_REQUEST && clientResponsebyCred.getStatus() != HttpServletResponse.SC_OK) { sessionId = null; clientResponsebyCred = null; } if (LOG.isDebugEnabled()) { LOG.debug("<== tryWithCred"); } return clientResponsebyCred; } private ClientResponse tryWithCookie(ServiceTags serviceTags) { ClientResponse clientResponsebySessionId = uploadTagsWithCookie(serviceTags); if (clientResponsebySessionId != null && clientResponsebySessionId.getStatus() != HttpServletResponse.SC_NO_CONTENT && clientResponsebySessionId.getStatus() != HttpServletResponse.SC_BAD_REQUEST && clientResponsebySessionId.getStatus() != HttpServletResponse.SC_OK) { sessionId = null; isValidRangerCookie = false; clientResponsebySessionId = null; } return clientResponsebySessionId; } private synchronized ClientResponse uploadTagsWithCred(ServiceTags serviceTags) { if (sessionId == null) { tagRESTClient.resetClient(); ClientResponse response = null; try { response = tagRESTClient.put(REST_URL_IMPORT_SERVICETAGS_RESOURCE, null, serviceTags); } catch (Exception e) { LOG.error("Failed to get response, Error is : "+e.getMessage()); } if (response != null) { if (!(response.toString().contains(REST_URL_IMPORT_SERVICETAGS_RESOURCE))) { response.setStatus(HttpServletResponse.SC_NOT_FOUND); } else if (response.getStatus() == HttpServletResponse.SC_UNAUTHORIZED) { LOG.warn("Credentials response from ranger is 401."); } else if (response.getStatus() == HttpServletResponse.SC_OK || response.getStatus() == HttpServletResponse.SC_NO_CONTENT) { cookieList = response.getCookies(); // save cookie received from credentials session login for (NewCookie cookie : cookieList) { if (cookie.getName().equalsIgnoreCase("RANGERADMINSESSIONID")) { sessionId = cookie.toCookie(); isValidRangerCookie = true; break; } else { isValidRangerCookie = false; } } } } return response; } else { ClientResponse clientResponsebySessionId = uploadTagsWithCookie(serviceTags); if (!(clientResponsebySessionId.toString().contains(REST_URL_IMPORT_SERVICETAGS_RESOURCE))) { clientResponsebySessionId.setStatus(HttpServletResponse.SC_NOT_FOUND); } return clientResponsebySessionId; } } private ClientResponse uploadTagsWithCookie(ServiceTags serviceTags) { if (LOG.isDebugEnabled()) { LOG.debug("==> uploadTagsWithCookie"); } ClientResponse response = null; try { response = tagRESTClient.put(REST_URL_IMPORT_SERVICETAGS_RESOURCE, serviceTags, sessionId); } catch (Exception e) { LOG.error("Failed to get response, Error is : "+e.getMessage()); } if (response != null) { if (!(response.toString().contains(REST_URL_IMPORT_SERVICETAGS_RESOURCE))) { response.setStatus(HttpServletResponse.SC_NOT_FOUND); sessionId = null; isValidRangerCookie = false; } else if (response.getStatus() == HttpServletResponse.SC_UNAUTHORIZED) { sessionId = null; isValidRangerCookie = false; } else if (response.getStatus() == HttpServletResponse.SC_NO_CONTENT || response.getStatus() == HttpServletResponse.SC_OK) { isValidRangerCookie = true; } } if (LOG.isDebugEnabled()) { LOG.debug("<== uploadTagsWithCookie"); } return response; } @Override public boolean start() { myThread = new Thread(this); myThread.setDaemon(true); myThread.start(); return true; } @Override public void stop() { if (myThread != null && myThread.isAlive()) { myThread.interrupt(); } } @Override public void run() { if (LOG.isDebugEnabled()) { LOG.debug("==> TagAdminRESTSink.run()"); } while (true) { UploadWorkItem uploadWorkItem; try { uploadWorkItem = uploadWorkItems.take(); ServiceTags toUpload = uploadWorkItem.getServiceTags(); boolean doRetry; do { doRetry = false; try { ServiceTags uploaded = doUpload(toUpload); if (uploaded == null) { // Treat this as if an Exception is thrown by doUpload doRetry = true; Thread.sleep(rangerAdminConnectionCheckInterval); } else { // ServiceTags uploaded successfully uploadWorkItem.uploadCompleted(uploaded); } } catch (InterruptedException interrupted) { LOG.error("Caught exception..: ", interrupted); return; } catch (Exception exception) { doRetry = true; Thread.sleep(rangerAdminConnectionCheckInterval); } } while (doRetry); } catch (InterruptedException exception) { LOG.error("Interrupted..: ", exception); return; } } } static class UploadWorkItem { private ServiceTags serviceTags; private BlockingQueue<ServiceTags> uploadedServiceTags; ServiceTags getServiceTags() { return serviceTags; } ServiceTags waitForUpload() throws InterruptedException { return uploadedServiceTags.take(); } void uploadCompleted(ServiceTags uploaded) throws InterruptedException { // ServiceTags uploaded successfully uploadedServiceTags.put(uploaded); } UploadWorkItem(ServiceTags serviceTags) { setServiceTags(serviceTags); uploadedServiceTags = new ArrayBlockingQueue<ServiceTags>(1); } void setServiceTags(ServiceTags serviceTags) { this.serviceTags = serviceTags; } } }
/* * ****************************************************************************** * Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * **************************************************************************** */ package com.spectralogic.ds3autogen.utils; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.spectralogic.ds3autogen.api.models.apispec.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.Map; public final class ConverterUtil { private static final Logger LOG = LoggerFactory.getLogger(ConverterUtil.class); private static final String CONTRACT_DEFINED_TYPE = "com.spectralogic."; private ConverterUtil() { } public static boolean hasContent(final Collection<?> collection) { return !isEmpty(collection); } public static boolean hasContent(final Map<?,?> map) { return !isEmpty(map); } public static boolean hasContent(final String string) { return !isEmpty(string); } public static boolean isEmpty(final Collection<?> collection) { return collection == null || collection.isEmpty(); } public static boolean isEmpty(final Map<?,?> map) { return map == null || map.isEmpty(); } public static boolean isEmpty(final String string) { return string == null || string.isEmpty(); } /** * Checks if two enums of the same class have the same value * @param leftEnum An enum * @param rightEnum An enum * @param <E> The class of leftEnum and rightEnum * @return True if both enums are of the same non-null value. False is returned if either * enum is null or if the enums do not have the same non-null value. */ protected static <E extends Enum<E>> boolean enumsEqual(final E leftEnum, final E rightEnum) { if (leftEnum == null || rightEnum == null) { return false; } return leftEnum == rightEnum; } /** * Removes all unused types from the Ds3Type map. Types are considered to be used if * they are used within a Ds3Request, and/or if they are used within another type that * is also used. * @param types A Ds3Type map * @param requests A list of Ds3Requests */ public static ImmutableMap<String, Ds3Type> removeUnusedTypes( final ImmutableMap<String, Ds3Type> types, final ImmutableList<Ds3Request> requests) { if (isEmpty(types) || isEmpty(requests)) { return ImmutableMap.of(); } final ImmutableSet.Builder<String> usedTypesBuilder = ImmutableSet.builder(); usedTypesBuilder.addAll(getUsedTypesFromRequests(requests)); usedTypesBuilder.addAll(getUsedTypesFromAllTypes(types, usedTypesBuilder.build())); final ImmutableSet<String> usedTypes = usedTypesBuilder.build(); final ImmutableMap.Builder<String, Ds3Type> builder = ImmutableMap.builder(); for (final Map.Entry<String, Ds3Type> entry : types.entrySet()) { if (usedTypes.contains(entry.getKey())) { builder.put(entry.getKey(), entry.getValue()); } } return builder.build(); } /** * Gets a set of type names used within a list of Ds3Types */ protected static ImmutableSet<String> getUsedTypesFromAllTypes( final ImmutableMap<String, Ds3Type> typeMap, final ImmutableSet<String> usedTypes) { if (isEmpty(usedTypes) || isEmpty(typeMap)) { return ImmutableSet.of(); } final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); builder.addAll(usedTypes); for (final String type : usedTypes) { final Ds3Type ds3Type = typeMap.get(type); if (ds3Type != null) { builder.addAll(getUsedTypesFromType(ds3Type)); } else { //Log but do not throw an exception because there are cases where a type //doesn't need to be generated. Especially true during testing. LOG.error("Could not find used type in Type Map: " + type); } } final ImmutableSet<String> newUsedTypes = builder.build(); if (newUsedTypes.size() > usedTypes.size()) { return getUsedTypesFromAllTypes(typeMap, newUsedTypes); } return newUsedTypes; } /** * Gets a set of type names used within a Ds3Type */ protected static ImmutableSet<String> getUsedTypesFromType(final Ds3Type ds3Type) { if (isEnum(ds3Type) || isEmpty(ds3Type.getElements())) { return ImmutableSet.of(); } final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (final Ds3Element ds3Element : ds3Type.getElements()) { if (includeType(ds3Element.getType())) { builder.add(ds3Element.getType()); } if (hasContent(ds3Element.getComponentType()) && includeType(ds3Element.getComponentType())) { builder.add(ds3Element.getComponentType()); } } return builder.build(); } /** * Determines if a Ds3Type is describing an Enum */ public static boolean isEnum(final Ds3Type ds3Type) { return hasContent(ds3Type.getEnumConstants()); } /** * Determines if a type name is a Spectra defined type */ protected static boolean includeType(final String type) { return hasContent(type) && type.startsWith(CONTRACT_DEFINED_TYPE); } /** * Gets a set of type names used within a list of Ds3Requests. This includes all Spectra defined * parameter types and response types used within the requests. */ protected static ImmutableSet<String> getUsedTypesFromRequests(final ImmutableList<Ds3Request> requests) { if (isEmpty(requests)) { return ImmutableSet.of(); } final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (final Ds3Request request : requests) { builder.addAll(getUsedTypesFromParams(request.getRequiredQueryParams())); builder.addAll(getUsedTypesFromParams(request.getOptionalQueryParams())); builder.addAll(getUsedTypesFromResponseCodes(request.getDs3ResponseCodes())); } return builder.build(); } /** * Gets a set of type names used within a list of Ds3Params */ protected static ImmutableSet<String> getUsedTypesFromParams(final ImmutableList<Ds3Param> params) { if (isEmpty(params)) { return ImmutableSet.of(); } final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (final Ds3Param param : params) { if (includeType(param.getType())) { builder.add(param.getType()); } } return builder.build(); } /** * Gets a set of type names used within a list of Ds3ResponseCodes */ protected static ImmutableSet<String> getUsedTypesFromResponseCodes(final ImmutableList<Ds3ResponseCode> responseCodes){ if (isEmpty(responseCodes)) { return ImmutableSet.of(); } final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (final Ds3ResponseCode responseCode : responseCodes) { if (hasContent(responseCode.getDs3ResponseTypes())) { builder.addAll(getUsedTypesFromResponseTypes(responseCode.getDs3ResponseTypes())); } } return builder.build(); } /** * Gets a set of type names used within a list of Ds3ResponseTypes */ protected static ImmutableSet<String> getUsedTypesFromResponseTypes(final ImmutableList<Ds3ResponseType> responseTypes) { if (isEmpty(responseTypes)) { return ImmutableSet.of(); } final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (final Ds3ResponseType responseType : responseTypes) { if (includeResponseType(responseType)) { builder.add(getTypeFromResponseType(responseType)); } } return builder.build(); } /** * Determines if a Response Type contains a Spectra defined type. */ protected static boolean includeResponseType(final Ds3ResponseType responseType) { return includeType(responseType.getType()) || includeType(responseType.getComponentType()); } /** * Retrieves the Spectra defined type within the Response Type. Throws an error if * neither type nor component type is a Spectra defined type. */ protected static String getTypeFromResponseType(final Ds3ResponseType responseType) { if (includeType(responseType.getType())) { return responseType.getType(); } if (includeType(responseType.getComponentType())) { return responseType.getComponentType(); } throw new IllegalArgumentException("Cannot get Spectra type name if neither the Response Type nor the Response Component Type is a Spectra defined type"); } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model; import org.eclipse.core.runtime.FileLocator; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.utils.GeneralUtils; import org.jkiss.utils.CommonUtils; import org.osgi.framework.Bundle; import org.osgi.framework.FrameworkUtil; import java.io.File; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.HashMap; import java.util.Map; /** * DBIcon */ public class DBIcon implements DBPImage { private static final Log log = Log.getLog(DBIcon.class); public static final DBIcon TREE_ADMIN = new DBIcon("admin", "tree/admin.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_DATABASE = new DBIcon("database", "tree/database.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_DATABASE_CATEGORY = new DBIcon("database_category", "tree/database_category.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_SCHEMA = new DBIcon("schema", "tree/schema.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_SCHEMA_SYSTEM = new DBIcon("schema_system", "tree/schema_system.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_SCHEMA_UTIL = new DBIcon("schema_util", "tree/schema_util.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_TABLE = new DBIcon("table", "tree/table.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_TABLE_ALIAS = new DBIcon("table_alias", "tree/table_alias.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_TABLE_LINK = new DBIcon("table_link", "tree/table_link.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_TABLE_INDEX = new DBIcon("table_index", "tree/table_index.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_TABLE_SYSTEM = new DBIcon("table_system", "tree/table_index.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_VIEW = new DBIcon("view", "tree/view.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FUNCTION = new DBIcon("function", "tree/function.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_PROCEDURE = new DBIcon("procedure", "tree/procedure.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_COLUMNS = new DBIcon("columns", "tree/columns.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_COLUMN = new DBIcon("column", "tree/column.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_INDEX = new DBIcon("index", "tree/index.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_CONSTRAINT = new DBIcon("constraint", "tree/constraint.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_UNIQUE_KEY = new DBIcon("unique-key", "tree/unique_constraint.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOREIGN_KEY = new DBIcon("foreign-key", "tree/foreign_key.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOREIGN_KEY_COLUMN = new DBIcon("foreign-key-column", "tree/foreign_key_column.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_REFERENCE = new DBIcon("reference", "tree/reference.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_TRIGGER = new DBIcon("trigger", "tree/trigger.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_USER = new DBIcon("user", "tree/user.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_USER_GROUP = new DBIcon("user_group", "tree/group.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_PERMISSIONS = new DBIcon("permissions", "tree/permissions.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_PAGE = new DBIcon("page", "tree/page.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_INFO = new DBIcon("info", "tree/info.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_SESSIONS = new DBIcon("sessions", "tree/sessions.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_LOCKS = new DBIcon("locks", "tree/locks.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_PACKAGE = new DBIcon("package", "tree/package.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_DATA_TYPE = new DBIcon("data_type", "tree/data_type.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_SEQUENCE = new DBIcon("sequence", "tree/sequence.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_SYNONYM = new DBIcon("synonym", "tree/synonym.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_TABLESPACE = new DBIcon("tablespace", "tree/tablespace.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_PARTITION = new DBIcon("partition", "tree/partition.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_ATTRIBUTE = new DBIcon("attribute", "tree/attribute.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_ARGUMENT = new DBIcon("argument", "tree/argument.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_JAVA_CLASS = new DBIcon("javaClass", "tree/java_class.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_JAVA_INTERFACE = new DBIcon("javaInterface", "tree/java_interface.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_LINK = new DBIcon("link", "tree/link.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FILE = new DBIcon("file", "tree/data_file.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_CLASS = new DBIcon("class", "tree/class.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_ASSOCIATION = new DBIcon("association", "tree/association.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_SERVER = new DBIcon("server", "tree/server.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_SERVERS = new DBIcon("servers", "tree/servers.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_RECYCLE_BIN = new DBIcon("recycle_bin", "tree/recycle_bin.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_EVENT = new DBIcon("event", "tree/event.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER = new DBIcon("folder", "tree/folder.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_LINK = new DBIcon("folder_link", "tree/folder_link.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_DATABASE = new DBIcon("folder_database", "tree/folder_database.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_SCHEMA = new DBIcon("folder_schema", "tree/folder_schema.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_TABLE = new DBIcon("folder_table", "tree/folder_table.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_VIEW = new DBIcon("folder_view", "tree/folder_view.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_USER = new DBIcon("folder_user", "tree/folder_user.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_ADMIN = new DBIcon("folder_admin", "tree/folder_admin.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_CONSTRAINT = new DBIcon("constraints", "tree/folder_constraint.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_INFO = new DBIcon("folder_info", "tree/folder_info.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_KEY = new DBIcon("key", "tree/key.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TREE_FOLDER_KEY = new DBIcon("folder_key", "tree/folder_key.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon PROJECT = new DBIcon("project", "project.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_BOOLEAN = new DBIcon("boolean", "types/boolean.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_NUMBER = new DBIcon("number", "types/number.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_STRING = new DBIcon("string", "types/string.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_DATETIME = new DBIcon("datetime", "types/datetime.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_BINARY = new DBIcon("binary", "types/binary.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_TEXT = new DBIcon("text", "types/text.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_JSON = new DBIcon("json", "types/json.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_XML = new DBIcon("xml", "types/xml.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_LOB = new DBIcon("lob", "types/lob.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_ARRAY = new DBIcon("array", "types/array.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_STRUCT = new DBIcon("struct", "types/struct.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_DOCUMENT = new DBIcon("document", "types/document.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_OBJECT = new DBIcon("object", "types/object.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_IMAGE = new DBIcon("image", "types/image.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_REFERENCE = new DBIcon("reference", "types/reference.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_ROWID = new DBIcon("rowid", "types/rowid.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_ANY = new DBIcon("any", "types/any.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_UUID = new DBIcon("uuid", "types/uuid.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon TYPE_UNKNOWN = new DBIcon("unknown", "types/unknown.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_ADD = new DBIcon("over_add", "over/add_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_SUCCESS = new DBIcon("over_success", "over/success_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_FAILED = new DBIcon("over_failed", "over/failed_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_ERROR = new DBIcon("over_error", "over/error_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_UNKNOWN = new DBIcon("over_condition", "over/conditional_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_LAMP = new DBIcon("over_lamp", "over/lamp_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_KEY = new DBIcon("over_key", "over/key_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_LOCK = new DBIcon("over_lock", "over/lock_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_EXTERNAL = new DBIcon("over_external", "over/external_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_REFERENCE = new DBIcon("over_reference", "over/reference_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon OVER_FOLDER = new DBIcon("over_folder", "over/folder_ovr.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon DATABASE_DEFAULT = new DBIcon("database_icon_default", "connection/database_icon.png"); //$NON-NLS-1$ //$NON-NLS-2$ public static final DBIcon DATABASE_BIG_DEFAULT = new DBIcon("database_icon_big_default", "connection/database_icon_big.png"); //$NON-NLS-1$ //$NON-NLS-2$ private static Map<String, DBPImage> iconMap = new HashMap<>(); static { loadIcons(DBIcon.class); } private final String token; private String path; static public void loadIcons(Class<?> aClass) { Bundle iconBundle = FrameworkUtil.getBundle(aClass); if (iconBundle == null) { log.error("Can't find bundle for class '" + aClass.getName() + "'"); return; } for (Field field : aClass.getDeclaredFields()) { if ((field.getModifiers() & Modifier.STATIC) == 0 || field.getType() != DBIcon.class) { continue; } try { DBIcon icon = (DBIcon) field.get(null); if (!icon.path.startsWith("platform:")) { icon.path = "platform:/plugin/" + iconBundle.getSymbolicName() + "/icons/" + icon.path; } URL fileURL = FileLocator.toFileURL(new URL(icon.path)); try { URI filePath = GeneralUtils.makeURIFromFilePath(fileURL.toString()); File file = new File(filePath); if (!file.exists()) { log.warn("Bad image '" + icon.getToken() + "' location: " + icon.getLocation()); continue; } DBIcon.iconMap.put(icon.getToken(), icon); } catch (URISyntaxException e) { throw new IOException("Bad local file path: " + fileURL, e); } } catch (Exception e) { log.error(e); } } } public DBIcon(String path) { this.token = null; this.path = path; } public DBIcon(String token, String path) { this.token = token; this.path = path; } public static DBPImage getImageById(String token) { return iconMap.get(token); } /** * Token is icon id. It can be used to refer on icons in plugin extensions * @return unique token */ public String getToken() { return token; } @Override public String getLocation() { return path; } @Override public boolean equals(Object obj) { if (obj instanceof DBIcon) { return CommonUtils.equalObjects(token, ((DBIcon) obj).token) && CommonUtils.equalObjects(path, ((DBIcon) obj).path); } else { return false; } } @Override public String toString() { return token + ":" + path; } }
package org.jenkinsci.plugins.ghprb; import hudson.util.CopyOnWriteMap.Hash; import org.joda.time.DateTime; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kohsuke.github.GHCommitPointer; import org.kohsuke.github.GHCommitState; import org.kohsuke.github.GHIssueComment; import org.kohsuke.github.GHIssueState; import org.kohsuke.github.GHPullRequest; import org.kohsuke.github.GHPullRequestCommitDetail; import org.kohsuke.github.GHRateLimit; import org.kohsuke.github.GHRepository; import org.kohsuke.github.GHUser; import org.kohsuke.github.GitHub; import org.kohsuke.github.PagedIterable; import org.kohsuke.github.PagedIterator; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.runners.MockitoJUnitRunner; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static org.kohsuke.github.GHCommitState.PENDING; import static org.kohsuke.github.GHIssueState.OPEN; import static org.mockito.BDDMockito.given; import static org.mockito.BDDMockito.doReturn; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.only; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; /** * Unit tests for {@link GhprbRepository}. */ @RunWith(MockitoJUnitRunner.class) public class GhprbRepositoryTest { private static final String TEST_USER_NAME = "test-user"; private static final String TEST_REPO_NAME = "test-repo"; private static final Date UPDATE_DATE = new Date(); @Mock private GitHub gt; @Mock private GHRepository ghRepository; @Mock private GhprbGitHub gitHub; @Mock private Ghprb helper; @Mock private GHRateLimit ghRateLimit; @Mock private GHPullRequest ghPullRequest; @Mock private GHCommitPointer base; @Mock private GHCommitPointer head; @Mock private GHUser ghUser; private GhprbRepository ghprbRepository; private ConcurrentMap<Integer,GhprbPullRequest> pulls; private GhprbPullRequest ghprbPullRequest; @Before public void setUp() throws IOException { initGHPRWithTestData(); // Mock github API given(helper.getGitHub()).willReturn(gitHub); given(gitHub.get()).willReturn(gt); given(gt.getRepository(anyString())).willReturn(ghRepository); // Mock rate limit given(gt.getRateLimit()).willReturn(ghRateLimit); increaseRateLimitToDefaults(); } @Test public void testCheckMethodWhenUsingGitHubEnterprise() throws IOException { // GIVEN given(gt.getRateLimit()).willThrow(new FileNotFoundException()); List<GHPullRequest> ghPullRequests = createListWithMockPR(); given(ghRepository.getPullRequests(eq(GHIssueState.OPEN))).willReturn(ghPullRequests); mockHeadAndBase(); mockCommitList(); given(helper.ifOnlyTriggerPhrase()).willReturn(true); pulls.put(1, ghprbPullRequest); given(ghPullRequest.getUpdatedAt()).willReturn(UPDATE_DATE); given(ghPullRequest.getNumber()).willReturn(1); // WHEN ghprbRepository.check(); // THEN verifyGetGithub(1); } @Test public void testCheckMethodWithOnlyExistingPRs() throws IOException { // GIVEN List<GHPullRequest> ghPullRequests = createListWithMockPR(); given(ghRepository.getPullRequests(eq(GHIssueState.OPEN))).willReturn(ghPullRequests); mockHeadAndBase(); mockCommitList(); given(helper.ifOnlyTriggerPhrase()).willReturn(true); pulls.put(1, ghprbPullRequest); given(ghPullRequest.getUpdatedAt()).willReturn(UPDATE_DATE); given(ghPullRequest.getNumber()).willReturn(1); // WHEN ghprbRepository.check(); // THEN verifyGetGithub(1); /** GH Repo verifications */ verify(ghRepository, only()).getPullRequests(OPEN); // Call to Github API verifyNoMoreInteractions(ghRepository); /** GH PR verifications */ verify(ghPullRequest, times(3)).getHead(); verify(ghPullRequest, times(1)).getBase(); verify(ghPullRequest, times(2)).getNumber(); verify(ghPullRequest, times(1)).getUpdatedAt(); verify(ghPullRequest, times(1)).getBody(); verifyNoMoreInteractions(ghPullRequest); verify(helper).ifOnlyTriggerPhrase(); verify(helper).getWhiteListTargetBranches(); verifyNoMoreInteractions(helper); verifyNoMoreInteractions(gt); verifyZeroInteractions(ghUser); } @Test public void testCheckMethodWithNewPR() throws IOException { // GIVEN List<GHPullRequest> ghPullRequests = createListWithMockPR(); ghPullRequests.add(ghPullRequest); GhprbBuilds builds = mockBuilds(); mockHeadAndBase(); mockCommitList(); given(ghRepository.getPullRequests(eq(GHIssueState.OPEN))).willReturn(ghPullRequests); given(ghPullRequest.getUpdatedAt()).willReturn(UPDATE_DATE); given(ghPullRequest.getNumber()).willReturn(100); given(ghPullRequest.getMergeable()).willReturn(true); given(ghPullRequest.getTitle()).willReturn("title"); given(ghPullRequest.getUser()).willReturn(ghUser); given(ghPullRequest.getUrl()).willReturn(new URL("https://github.com/org/repo/pull/100")); given(ghUser.getEmail()).willReturn("email"); given(helper.ifOnlyTriggerPhrase()).willReturn(false); given(helper.isWhitelisted(ghUser)).willReturn(true); // WHEN ghprbRepository.check(); // THEN verifyGetGithub(1); verifyNoMoreInteractions(gt); /** GH PR verifications */ verify(builds, times(1)).build(any(GhprbPullRequest.class), any(GHUser.class), any(String.class)); verify(ghRepository, times(1)).getPullRequests(OPEN); // Call to Github API verify(ghRepository, times(1)) .createCommitStatus(eq("head sha"), eq(PENDING), isNull(String.class), eq("msg")); // Call to Github API verifyNoMoreInteractions(ghRepository); verify(ghPullRequest, times(1)).getTitle(); verify(ghPullRequest, times(2)).getUser(); verify(ghPullRequest, times(1)).getMergeable(); // Call to Github API verify(ghPullRequest, times(8)).getHead(); verify(ghPullRequest, times(3)).getBase(); verify(ghPullRequest, times(5)).getNumber(); verify(ghPullRequest, times(3)).getUpdatedAt(); verify(ghPullRequest, times(1)).getUrl(); verify(ghPullRequest, times(1)).listCommits(); verify(ghPullRequest, times(2)).getBody(); verifyNoMoreInteractions(ghPullRequest); verify(helper, times(1)).isWhitelisted(eq(ghUser)); // Call to Github API verify(helper, times(2)).ifOnlyTriggerPhrase(); verify(helper, times(1)).getBuilds(); verify(helper, times(2)).getWhiteListTargetBranches(); verifyNoMoreInteractions(helper); verify(ghUser, times(1)).getEmail(); // Call to Github API verify(ghUser, times(1)).getLogin(); verifyNoMoreInteractions(ghUser); } private GhprbBuilds mockBuilds() throws IOException { GhprbBuilds builds = mock(GhprbBuilds.class); given(helper.getBuilds()).willReturn(builds); given(builds.build(any(GhprbPullRequest.class), any(GHUser.class), any(String.class))).willReturn("msg"); given(ghRepository.createCommitStatus(anyString(), any(GHCommitState.class), anyString(), anyString())).willReturn(null); return builds; } @Test public void testCheckMethodWhenPrWasUpdatedWithNonKeyPhrase() throws IOException { // GIVEN List<GHPullRequest> ghPullRequests = createListWithMockPR(); mockComments("comment body"); mockHeadAndBase(); mockCommitList(); GhprbBuilds builds = mockBuilds(); given(ghRepository.getPullRequests(eq(GHIssueState.OPEN))).willReturn(ghPullRequests); given(ghPullRequest.getUpdatedAt()).willReturn(new Date()).willReturn(new DateTime().plusDays(1).toDate()); given(ghPullRequest.getNumber()).willReturn(100); given(ghPullRequest.getMergeable()).willReturn(true); given(ghPullRequest.getTitle()).willReturn("title"); given(ghPullRequest.getUser()).willReturn(ghUser); given(ghPullRequest.getUrl()).willReturn(new URL("https://github.com/org/repo/pull/100")); given(ghUser.getEmail()).willReturn("email"); given(ghUser.getLogin()).willReturn("login"); given(helper.ifOnlyTriggerPhrase()).willReturn(false); given(helper.isWhitelisted(ghUser)).willReturn(true); // WHEN ghprbRepository.check(); // PR was created ghprbRepository.check(); // PR was updated // THEN verifyGetGithub(2); verifyNoMoreInteractions(gt); /** GH PR verifications */ verify(builds, times(1)).build(any(GhprbPullRequest.class), any(GHUser.class), any(String.class)); verify(ghRepository, times(2)).getPullRequests(eq(OPEN)); // Call to Github API verify(ghRepository, times(1)) .createCommitStatus(eq("head sha"), eq(PENDING), isNull(String.class), eq("msg")); // Call to Github API verifyNoMoreInteractions(ghRepository); verify(ghPullRequest, times(2)).getTitle(); verify(ghPullRequest, times(2)).getUser(); verify(ghPullRequest, times(1)).getMergeable(); // Call to Github API verify(ghPullRequest, times(8)).getHead(); verify(ghPullRequest, times(3)).getBase(); verify(ghPullRequest, times(5)).getNumber(); verify(ghPullRequest, times(1)).getUrl(); verify(ghPullRequest, times(4)).getUpdatedAt(); verify(ghPullRequest, times(1)).getComments(); verify(ghPullRequest, times(1)).listCommits(); verify(ghPullRequest, times(2)).getBody(); verifyNoMoreInteractions(ghPullRequest); verify(helper, times(1)).isWhitelisted(eq(ghUser)); // Call to Github API verify(helper, times(2)).ifOnlyTriggerPhrase(); verify(helper, times(1)).getBuilds(); verify(helper, times(2)).getWhiteListTargetBranches(); verify(helper).isWhitelistPhrase(eq("comment body")); verify(helper).isOktotestPhrase(eq("comment body")); verify(helper).isRetestPhrase(eq("comment body")); verify(helper).isTriggerPhrase(eq("comment body")); verifyNoMoreInteractions(helper); verify(ghUser, times(1)).getEmail(); // Call to Github API verify(ghUser, times(2)).getLogin(); verifyNoMoreInteractions(ghUser); } private List<GHPullRequest> createListWithMockPR() { List<GHPullRequest> ghPullRequests = new ArrayList<GHPullRequest>(); ghPullRequests.add(ghPullRequest); return ghPullRequests; } @Test public void testCheckMethodWhenPrWasUpdatedWithRetestPhrase() throws IOException { // GIVEN List<GHPullRequest> ghPullRequests = createListWithMockPR(); mockComments("test this please"); mockHeadAndBase(); mockCommitList(); GhprbBuilds builds = mockBuilds(); given(ghRepository.getPullRequests(eq(GHIssueState.OPEN))).willReturn(ghPullRequests); given(ghPullRequest.getUpdatedAt()).willReturn(new Date()).willReturn(new Date()).willReturn(new DateTime().plusDays(1).toDate()); given(ghPullRequest.getNumber()).willReturn(100); given(ghPullRequest.getMergeable()).willReturn(true); given(ghPullRequest.getTitle()).willReturn("title"); given(ghPullRequest.getUser()).willReturn(ghUser); given(ghPullRequest.getUrl()).willReturn(new URL("https://github.com/org/repo/pull/100")); given(ghUser.getEmail()).willReturn("email"); given(ghUser.getLogin()).willReturn("login"); given(helper.ifOnlyTriggerPhrase()).willReturn(false); given(helper.isRetestPhrase(eq("test this please"))).willReturn(true); given(helper.isWhitelisted(ghUser)).willReturn(true); // WHEN ghprbRepository.check(); // PR was created ghprbRepository.check(); // PR was updated // THEN verifyGetGithub(2); verifyNoMoreInteractions(gt); /** GH PR verifications */ verify(builds, times(2)).build(any(GhprbPullRequest.class), any(GHUser.class), any(String.class)); verify(ghRepository, times(2)).getPullRequests(eq(OPEN)); // Call to Github API verify(ghRepository, times(2)) .createCommitStatus(eq("head sha"), eq(PENDING), isNull(String.class), eq("msg")); // Call to Github API verifyNoMoreInteractions(ghRepository); verify(ghPullRequest, times(2)).getTitle(); verify(ghPullRequest, times(2)).getUser(); verify(ghPullRequest, times(2)).getMergeable(); // Call to Github API verify(ghPullRequest, times(8)).getHead(); verify(ghPullRequest, times(3)).getBase(); verify(ghPullRequest, times(5)).getNumber(); verify(ghPullRequest, times(4)).getUpdatedAt(); verify(ghPullRequest, times(1)).getUrl(); verify(ghPullRequest, times(1)).getComments(); verify(ghPullRequest, times(2)).listCommits(); verify(ghPullRequest, times(2)).getBody(); verifyNoMoreInteractions(ghPullRequest); verify(helper, times(2)).isWhitelisted(eq(ghUser)); // Call to Github API verify(helper, times(2)).ifOnlyTriggerPhrase(); verify(helper, times(2)).getBuilds(); verify(helper, times(2)).getWhiteListTargetBranches(); verify(helper).isWhitelistPhrase(eq("test this please")); verify(helper).isOktotestPhrase(eq("test this please")); verify(helper).isRetestPhrase(eq("test this please")); verify(helper).isAdmin(eq("login")); verifyNoMoreInteractions(helper); verify(ghUser, times(1)).getEmail(); // Call to Github API verify(ghUser, times(2)).getLogin(); verifyNoMoreInteractions(ghUser); verify(builds, times(2)).build(any(GhprbPullRequest.class), any(GHUser.class), any(String.class)); verifyNoMoreInteractions(builds); } private void mockComments(String commentBody) throws IOException { GHIssueComment comment = mock(GHIssueComment.class); given(comment.getUpdatedAt()).willReturn(new DateTime().plusDays(3).toDate()); given(comment.getUser()).willReturn(ghUser); given(comment.getBody()).willReturn(commentBody); List<GHIssueComment> comments = new ArrayList<GHIssueComment>(); comments.add(comment); given(ghPullRequest.getComments()).willReturn(comments); } private void mockHeadAndBase() { /** Mock head\base */ given(ghPullRequest.getHead()).willReturn(head); given(base.getSha()).willReturn("base sha"); given(ghPullRequest.getBase()).willReturn(base); given(head.getSha()).willReturn("head sha"); } private void mockCommitList() { PagedIterator itr = Mockito.mock(PagedIterator.class); PagedIterable pagedItr = Mockito.mock(PagedIterable.class); Mockito.when(ghPullRequest.listCommits()).thenReturn(pagedItr); Mockito.when(pagedItr.iterator()).thenReturn(itr); Mockito.when(itr.hasNext()).thenReturn(false); } @Test public void testCheckMethodWithNoPR() throws IOException { // GIVEN List<GHPullRequest> ghPullRequests = new ArrayList<GHPullRequest>(); given(ghRepository.getPullRequests(eq(GHIssueState.OPEN))).willReturn(ghPullRequests); // WHEN ghprbRepository.check(); // THEN verifyGetGithub(1); verifyNoMoreInteractions(gt); verify(ghRepository, times(1)).getPullRequests(OPEN); // Call to Github API verifyNoMoreInteractions(ghRepository); verifyZeroInteractions(helper); } @Test public void testExceedRateLimit() throws IOException { // GIVEN ghRateLimit.remaining = 0; // WHEN ghprbRepository.check(); // THEN verify(helper, only()).getGitHub(); verify(gitHub, only()).get(); verify(gt, only()).getRateLimit(); verifyZeroInteractions(ghRepository); verifyZeroInteractions(gitHub); verifyZeroInteractions(gt); } private void initGHPRWithTestData() throws IOException { /** Mock PR data */ given(ghPullRequest.getUser()).willReturn(ghUser); given(ghUser.getEmail()).willReturn("email"); given(helper.isWhitelisted(ghUser)).willReturn(true); given(ghPullRequest.getUpdatedAt()).willReturn(UPDATE_DATE); /** Mock head\base */ given(base.getSha()).willReturn("base sha"); given(ghPullRequest.getBase()).willReturn(base); given(ghPullRequest.getHead()).willReturn(head); given(head.getSha()).willReturn("head sha"); pulls = new ConcurrentHashMap<Integer, GhprbPullRequest>(); ghprbRepository = new GhprbRepository(TEST_USER_NAME, TEST_REPO_NAME, helper, pulls); ghprbPullRequest = new GhprbPullRequest(ghPullRequest, helper, ghprbRepository); // Reset mocks not to mix init data invocations with tests reset(ghPullRequest, ghUser, helper, head, base); } private void increaseRateLimitToDefaults() { ghRateLimit.remaining = 5000; } // Verifications private void verifyGetGithub(int callsCount) throws IOException { verify(helper, times(callsCount)).getGitHub(); verify(gitHub, times(callsCount)).get(); // Call to Github API (once, than cached) verify(gt, times(1)).getRepository(anyString()); // Call to Github API verify(gt, times(callsCount)).getRateLimit(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.async.perftest; import java.io.FileWriter; import java.io.IOException; import java.nio.charset.Charset; import java.util.Arrays; import java.util.concurrent.TimeUnit; import org.apache.logging.log4j.core.util.Loader; /** * Single-threaded performance test. Usually invoked from PerfTestDriver as part of a series of tests. * <p> * To run a single instance of this class for the log4j2 test runner:<br> * java -Dlog4j.configurationFile=mylog4j2.xml org.apache.logging.log4j.core.async.perftest.PerfTest \ * org.apache.logging.log4j.core.async.perftest.RunLog4j2 <name> <resultfile.txt> <-verbose> <-throughput> */ public class PerfTest { private static final String LINE100 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890!\"#$%&'()-=^~|\\@`[]{};:+*,.<>/?_123456"; public static final String LINE500 = LINE100 + LINE100 + LINE100 + LINE100 + LINE100; static boolean verbose = false; static boolean throughput; // determine how long it takes to call System.nanoTime() (on average) static long calcNanoTimeCost() { final long iterations = 10000000; final long start = System.nanoTime(); long finish = start; for (int i = 0; i < iterations; i++) { finish = System.nanoTime(); } if (finish <= start) { throw new IllegalStateException(); } finish = System.nanoTime(); return (finish - start) / iterations; } static Histogram createHistogram() { final long[] intervals = new long[31]; long intervalUpperBound = 1L; for (int i = 0, size = intervals.length - 1; i < size; i++) { intervalUpperBound *= 2; intervals[i] = intervalUpperBound; } intervals[intervals.length - 1] = Long.MAX_VALUE; return new Histogram(intervals); } public static void main(final String[] args) throws Exception { new PerfTest().doMain(args); } public void doMain(final String[] args) throws Exception { final String runnerClass = args[0]; final IPerfTestRunner runner = Loader.newCheckedInstanceOf(runnerClass, IPerfTestRunner.class); final String name = args[1]; final String resultFile = args.length > 2 ? args[2] : null; for (final String arg : args) { if (verbose && throughput) { break; } if ("-verbose".equalsIgnoreCase(arg)) { verbose = true; } if ("-throughput".equalsIgnoreCase(arg)) { throughput = true; } } final int threadCount = 1; printf("Starting %s %s (%d)...%n", getClass().getSimpleName(), name, threadCount); runTestAndPrintResult(runner, name, threadCount, resultFile); runner.shutdown(); System.exit(0); } public void runTestAndPrintResult(final IPerfTestRunner runner, final String name, final int threadCount, final String resultFile) throws Exception { final Histogram warmupHist = createHistogram(); // ThreadContext.put("aKey", "mdcVal"); println("Warming up the JVM..."); final long t1 = System.nanoTime(); // warmup at least 10 seconds final int LINES = 50000; int iterations = 0; final long stop = System.nanoTime() + TimeUnit.SECONDS.toNanos(10); do { runTest(runner, LINES, null, warmupHist, 1); iterations++; } while (System.nanoTime() - stop < 0); printf("Warmup complete in %.1f seconds (%d iterations)%n", (System.nanoTime() - t1) / (1000.0 * 1000.0 * 1000.0), iterations); println("Waiting 10 seconds for buffers to drain warmup data..."); Thread.sleep(3000); //forceRemap(LINES, iterations, runner); Thread.sleep(7000); println("Starting the main test..."); runSingleThreadedTest(runner, LINES, name, resultFile); Thread.sleep(1000); } /** * Log some extra bytes to fill the memory mapped buffer to force it to remap. */ private void forceRemap(final int linesPerIteration, final int iterations, final IPerfTestRunner runner) { final int LINESEP = System.lineSeparator().getBytes(Charset.defaultCharset()).length; final int bytesPerLine = 0 + IPerfTestRunner.THROUGHPUT_MSG.getBytes().length; final int bytesWritten = bytesPerLine * linesPerIteration * iterations; final int threshold = 1073741824; // magic number: defined in perf9MMapLocation.xml int todo = threshold - bytesWritten; if (todo <= 0) { return; } final byte[] filler = new byte[4096]; Arrays.fill(filler, (byte) 'X'); final String str = new String(filler, Charset.defaultCharset()); do { runner.log(str); } while ((todo -= (4096 + LINESEP)) > 0); } private int runSingleThreadedTest(final IPerfTestRunner runner, final int LINES, final String name, final String resultFile) throws IOException { final Histogram latency = createHistogram(); runTest(runner, LINES, "end", latency, 1); reportResult(resultFile, name, latency); return LINES; } static void reportResult(final String file, final String name, final Histogram histogram) throws IOException { final String result = createSamplingReport(name, histogram); println(result); if (file != null) { try (final FileWriter writer = new FileWriter(file, true)) { writer.write(result); writer.write(System.lineSeparator()); } } } static void printf(final String msg, final Object... objects) { if (verbose) { System.out.printf(msg, objects); } } static void println(final String msg) { if (verbose) { System.out.println(msg); } } static String createSamplingReport(final String name, final Histogram histogram) { final Histogram data = histogram; if (throughput) { return data.getMax() + " operations/second"; } final String result = String.format("avg=%.0f 99%%=%d 99.99%%=%d sampleCount=%d", // data.getMean(), // data.getTwoNinesUpperBound(), // data.getFourNinesUpperBound(), // data.getCount() // ); return result; } public void runTest(final IPerfTestRunner runner, final int lines, final String finalMessage, final Histogram histogram, final int threadCount) { if (throughput) { runner.runThroughputTest(lines, histogram); } else { final long nanoTimeCost = calcNanoTimeCost(); runner.runLatencyTest(lines, histogram, nanoTimeCost, threadCount); } if (finalMessage != null) { runner.log(finalMessage); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy.util.url; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.auth.AuthSchemeProvider; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.auth.NTCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.config.AuthSchemes; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpPut; import org.apache.http.config.Lookup; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.HttpClientConnectionManager; import org.apache.http.conn.routing.HttpRoutePlanner; import org.apache.http.entity.ContentType; import org.apache.http.entity.FileEntity; import org.apache.http.impl.auth.BasicSchemeFactory; import org.apache.http.impl.auth.DigestSchemeFactory; import org.apache.http.impl.auth.NTLMSchemeFactory; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.impl.conn.SystemDefaultRoutePlanner; import org.apache.ivy.core.settings.TimeoutConstraint; import org.apache.ivy.util.CopyProgressListener; import org.apache.ivy.util.FileUtil; import org.apache.ivy.util.HostUtil; import org.apache.ivy.util.Message; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.ProxySelector; import java.net.URL; import java.nio.charset.Charset; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.concurrent.ConcurrentHashMap; /** * */ public class HttpClientHandler extends AbstractURLHandler implements TimeoutConstrainedURLHandler, AutoCloseable { private static final SimpleDateFormat LAST_MODIFIED_FORMAT = new SimpleDateFormat( "EEE, d MMM yyyy HH:mm:ss z", Locale.US); // A instance of the HttpClientHandler which gets registered to be closed // when the JVM exits static final HttpClientHandler DELETE_ON_EXIT_INSTANCE; static { DELETE_ON_EXIT_INSTANCE = new HttpClientHandler(); final Thread shutdownHook = new Thread(new Runnable() { @Override public void run() { try { DELETE_ON_EXIT_INSTANCE.close(); } catch (Exception e) { // ignore since this is anyway happening during shutdown of the JVM } } }); shutdownHook.setName("ivy-httpclient-shutdown-handler"); shutdownHook.setDaemon(true); Runtime.getRuntime().addShutdownHook(shutdownHook); } private final CloseableHttpClient httpClient; public HttpClientHandler() { this.httpClient = buildUnderlyingClient(); } private CloseableHttpClient buildUnderlyingClient() { return HttpClients.custom() .setConnectionManager(createConnectionManager()) .setRoutePlanner(createProxyRoutePlanner()) .setUserAgent(this.getUserAgent()) .setDefaultAuthSchemeRegistry(createAuthSchemeRegistry()) .setDefaultCredentialsProvider(new IvyCredentialsProvider()) .build(); } private static HttpRoutePlanner createProxyRoutePlanner() { // use the standard JRE ProxySelector to get proxy information Message.verbose("Using JRE standard ProxySelector for configuring HTTP proxy"); return new SystemDefaultRoutePlanner(ProxySelector.getDefault()); } private static Lookup<AuthSchemeProvider> createAuthSchemeRegistry() { return RegistryBuilder.<AuthSchemeProvider>create().register(AuthSchemes.DIGEST, new DigestSchemeFactory()) .register(AuthSchemes.BASIC, new BasicSchemeFactory()) .register(AuthSchemes.NTLM, new NTLMSchemeFactory()) .build(); } private static HttpClientConnectionManager createConnectionManager() { return new PoolingHttpClientConnectionManager(); } private static List<String> getAuthSchemePreferredOrder() { return Arrays.asList(AuthSchemes.DIGEST, AuthSchemes.BASIC, AuthSchemes.NTLM); } @Override public InputStream openStream(final URL url) throws IOException { return this.openStream(url, null); } @Override public InputStream openStream(final URL url, final TimeoutConstraint timeoutConstraint) throws IOException { final int connectionTimeout = (timeoutConstraint == null || timeoutConstraint.getConnectionTimeout() < 0) ? 0 : timeoutConstraint.getConnectionTimeout(); final int readTimeout = (timeoutConstraint == null || timeoutConstraint.getReadTimeout() < 0) ? 0 : timeoutConstraint.getReadTimeout(); final CloseableHttpResponse response = doGet(url, connectionTimeout, readTimeout); this.requireSuccessStatus(HttpGet.METHOD_NAME, url, response); final Header encoding = this.getContentEncoding(response); return getDecodingInputStream(encoding == null ? null : encoding.getValue(), response.getEntity().getContent()); } @Override public void download(final URL src, final File dest, final CopyProgressListener l) throws IOException { this.download(src, dest, l, null); } @Override public void download(final URL src, final File dest, final CopyProgressListener listener, final TimeoutConstraint timeoutConstraint) throws IOException { final int connectionTimeout = (timeoutConstraint == null || timeoutConstraint.getConnectionTimeout() < 0) ? 0 : timeoutConstraint.getConnectionTimeout(); final int readTimeout = (timeoutConstraint == null || timeoutConstraint.getReadTimeout() < 0) ? 0 : timeoutConstraint.getReadTimeout(); try (final CloseableHttpResponse response = doGet(src, connectionTimeout, readTimeout)) { // We can only figure the content we got is want we want if the status is success. this.requireSuccessStatus(HttpGet.METHOD_NAME, src, response); final Header encoding = this.getContentEncoding(response); try (final InputStream is = getDecodingInputStream(encoding == null ? null : encoding.getValue(), response.getEntity().getContent())) { FileUtil.copy(is, dest, listener); } dest.setLastModified(getLastModified(response)); } } @Override public void upload(final File src, final URL dest, final CopyProgressListener l) throws IOException { this.upload(src, dest, l, null); } @Override public void upload(final File src, final URL dest, final CopyProgressListener listener, final TimeoutConstraint timeoutConstraint) throws IOException { final int connectionTimeout = (timeoutConstraint == null || timeoutConstraint.getConnectionTimeout() < 0) ? 0 : timeoutConstraint.getConnectionTimeout(); final int readTimeout = (timeoutConstraint == null || timeoutConstraint.getReadTimeout() < 0) ? 0 : timeoutConstraint.getReadTimeout(); final RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(readTimeout) .setConnectTimeout(connectionTimeout) .setAuthenticationEnabled(hasCredentialsConfigured(dest)) .setTargetPreferredAuthSchemes(getAuthSchemePreferredOrder()) .setProxyPreferredAuthSchemes(getAuthSchemePreferredOrder()) .setExpectContinueEnabled(true) .build(); final HttpPut put = new HttpPut(normalizeToString(dest)); put.setConfig(requestConfig); put.setEntity(new FileEntity(src)); try (final CloseableHttpResponse response = this.httpClient.execute(put)) { validatePutStatusCode(dest, response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); } } @SuppressWarnings("deprecation") @Override public URLInfo getURLInfo(final URL url) { return this.getURLInfo(url, null); } @SuppressWarnings("deprecation") @Override public URLInfo getURLInfo(final URL url, final int timeout) { return this.getURLInfo(url, createTimeoutConstraints(timeout)); } @SuppressWarnings("deprecation") @Override public boolean isReachable(final URL url, final TimeoutConstraint timeoutConstraint) { return this.getURLInfo(url, timeoutConstraint).isReachable(); } @SuppressWarnings("deprecation") @Override public long getContentLength(final URL url, final TimeoutConstraint timeoutConstraint) { return this.getURLInfo(url, timeoutConstraint).getContentLength(); } @SuppressWarnings("deprecation") @Override public long getLastModified(final URL url, final TimeoutConstraint timeoutConstraint) { return this.getURLInfo(url, timeoutConstraint).getLastModified(); } @SuppressWarnings("deprecation") @Override public URLInfo getURLInfo(final URL url, final TimeoutConstraint timeoutConstraint) { final int connectionTimeout = (timeoutConstraint == null || timeoutConstraint.getConnectionTimeout() < 0) ? 0 : timeoutConstraint.getConnectionTimeout(); final int readTimeout = (timeoutConstraint == null || timeoutConstraint.getReadTimeout() < 0) ? 0 : timeoutConstraint.getReadTimeout(); CloseableHttpResponse response = null; try { final String httpMethod; if (getRequestMethod() == TimeoutConstrainedURLHandler.REQUEST_METHOD_HEAD) { httpMethod = HttpHead.METHOD_NAME; response = doHead(url, connectionTimeout, readTimeout); } else { httpMethod = HttpGet.METHOD_NAME; response = doGet(url, connectionTimeout, readTimeout); } if (checkStatusCode(httpMethod, url, response)) { final HttpEntity responseEntity = response.getEntity(); final Charset charSet = ContentType.getOrDefault(responseEntity).getCharset(); return new URLInfo(true, responseEntity == null ? 0 : responseEntity.getContentLength(), getLastModified(response), charSet.name()); } } catch (IOException | IllegalArgumentException e) { // IllegalArgumentException is thrown by HttpClient library to indicate the URL is not valid, // this happens for instance when trying to download a dynamic version (cfr IVY-390) Message.error("HttpClientHandler: " + e.getMessage() + " url=" + url); } finally { if (response != null) { try { response.close(); } catch (IOException e) { // ignore } } } return UNAVAILABLE; } private boolean checkStatusCode(final String httpMethod, final URL sourceURL, final HttpResponse response) { final int status = response.getStatusLine().getStatusCode(); if (status == HttpStatus.SC_OK) { return true; } // IVY-1328: some servers return a 204 on a HEAD request if (HttpHead.METHOD_NAME.equals(httpMethod) && (status == 204)) { return true; } Message.debug("HTTP response status: " + status + " url=" + sourceURL); if (status == HttpStatus.SC_PROXY_AUTHENTICATION_REQUIRED) { Message.warn("Your proxy requires authentication."); } else if (String.valueOf(status).startsWith("4")) { Message.verbose("CLIENT ERROR: " + response.getStatusLine().getReasonPhrase() + " url=" + sourceURL); } else if (String.valueOf(status).startsWith("5")) { Message.error("SERVER ERROR: " + response.getStatusLine().getReasonPhrase() + " url=" + sourceURL); } return false; } /** * Checks the status code of the response and if it's considered as successful response, then * this method just returns back. Else it {@link CloseableHttpResponse#close() closes the * response} and throws an {@link IOException} for the unsuccessful response. * * @param httpMethod The HTTP method that was used for the source request * @param sourceURL The URL of the source request * @param response The response to the source request * @throws IOException Thrown if the response was considered unsuccessful */ private void requireSuccessStatus(final String httpMethod, final URL sourceURL, final CloseableHttpResponse response) throws IOException { if (this.checkStatusCode(httpMethod, sourceURL, response)) { return; } // this is now considered an unsuccessful response, so close the response and throw an exception try { response.close(); } catch (Exception e) { // log and move on Message.debug("Could not close the HTTP response for url=" + sourceURL, e); } throw new IOException("Failed response to request '" + httpMethod + " " + sourceURL + "' " + response.getStatusLine().getStatusCode() + " - '" + response.getStatusLine().getReasonPhrase()); } private Header getContentEncoding(final HttpResponse response) { return response.getFirstHeader("Content-Encoding"); } private long getLastModified(final HttpResponse response) { final Header header = response.getFirstHeader("last-modified"); if (header == null) { return System.currentTimeMillis(); } final String lastModified = header.getValue(); try { return LAST_MODIFIED_FORMAT.parse(lastModified).getTime(); } catch (ParseException e) { // ignored } return System.currentTimeMillis(); } private CloseableHttpResponse doGet(final URL url, final int connectionTimeout, final int readTimeout) throws IOException { final RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(readTimeout) .setConnectTimeout(connectionTimeout) .setAuthenticationEnabled(hasCredentialsConfigured(url)) .setTargetPreferredAuthSchemes(getAuthSchemePreferredOrder()) .setProxyPreferredAuthSchemes(getAuthSchemePreferredOrder()) .build(); final HttpGet httpGet = new HttpGet(normalizeToString(url)); httpGet.setConfig(requestConfig); httpGet.addHeader("Accept-Encoding", "gzip,deflate"); return this.httpClient.execute(httpGet); } private CloseableHttpResponse doHead(final URL url, final int connectionTimeout, final int readTimeout) throws IOException { final RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(readTimeout) .setConnectTimeout(connectionTimeout) .setAuthenticationEnabled(hasCredentialsConfigured(url)) .setTargetPreferredAuthSchemes(getAuthSchemePreferredOrder()) .setProxyPreferredAuthSchemes(getAuthSchemePreferredOrder()) .build(); final HttpHead httpHead = new HttpHead(normalizeToString(url)); httpHead.setConfig(requestConfig); return this.httpClient.execute(httpHead); } private boolean hasCredentialsConfigured(final URL url) { return CredentialsStore.INSTANCE.hasCredentials(url.getHost()); } @Override public void close() throws Exception { if (this.httpClient != null) { this.httpClient.close(); } } private static class IvyCredentialsProvider implements CredentialsProvider { private final ConcurrentHashMap<AuthScope, Credentials> cachedCreds = new ConcurrentHashMap<>(); @Override public void setCredentials(final AuthScope authscope, final Credentials credentials) { if (authscope == null) { throw new IllegalArgumentException("AuthScope cannot be null"); } this.cachedCreds.put(authscope, credentials); } @Override public Credentials getCredentials(final AuthScope authscope) { if (authscope == null) { return null; } final String realm = authscope.getRealm(); final String host = authscope.getHost(); final org.apache.ivy.util.Credentials ivyConfiguredCred = CredentialsStore.INSTANCE.getCredentials(realm, host); if (ivyConfiguredCred == null) { return null; } return createCredentials(ivyConfiguredCred.getUserName(), ivyConfiguredCred.getPasswd()); } @Override public void clear() { this.cachedCreds.clear(); } private static Credentials createCredentials(final String username, final String password) { final String user; final String domain; int backslashIndex = username.indexOf('\\'); if (backslashIndex >= 0) { user = username.substring(backslashIndex + 1); domain = username.substring(0, backslashIndex); } else { user = username; domain = System.getProperty("http.auth.ntlm.domain", ""); } return new NTCredentials(user, password, HostUtil.getLocalHostName(), domain); } } }
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Copyright 2008 Google Inc. All Rights Reserved. package org.openqa.selenium.remote; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.openqa.selenium.WebDriverException; import java.beans.BeanInfo; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.Array; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Utility class for converting between JSON and Java Objects. */ public class BeanToJsonConverter { private static final int MAX_DEPTH = 5; /** * Convert an object that may or may not be a JSONArray or JSONObject into * its JSON string representation, handling the case where it is neither in a * graceful way. * * @param object which needs conversion * @return the JSON string representation of object */ public String convert(Object object) { if (object == null) return null; try { Object converted = convertObject(object, MAX_DEPTH); if (converted instanceof JSONObject || converted instanceof JSONArray) { return converted.toString(); } return String.valueOf(object); } catch (JSONException e) { throw new WebDriverException("Unable to convert: " + object, e); } } /** * Convert a JSON[Array|Object] into the equivalent Java Collection type * (that is, List|Map) returning other objects untouched. This method is used * for preparing values for use by the HttpCommandExecutor * * @param o Object to convert * @return a Map, List or the unconverted Object. */ private Object convertUnknownObjectFromJson(Object o) { if (o instanceof JSONArray) { return convertJsonArray((JSONArray) o); } if (o instanceof JSONObject) { return convertJsonObject((JSONObject) o); } return o; } private Map<String, Object> convertJsonObject(JSONObject jsonObject) { Map<String, Object> toReturn = new HashMap<String, Object>(); Iterator<?> allKeys = jsonObject.keys(); while (allKeys.hasNext()) { String key = (String) allKeys.next(); try { toReturn.put(key, convertUnknownObjectFromJson(jsonObject.get(key))); } catch (JSONException e) { throw new IllegalStateException("Unable to access key: " + key, e); } } return toReturn; } private List<Object> convertJsonArray(JSONArray jsonArray) { List<Object> toReturn = new ArrayList<Object>(); for (int i = 0; i < jsonArray.length(); i++) { try { toReturn.add(convertUnknownObjectFromJson(jsonArray.get(i))); } catch (JSONException e) { throw new IllegalStateException("Cannot convert object at index: " + i, e); } } return toReturn; } @SuppressWarnings({"unchecked"}) private Object convertObject(Object toConvert, int maxDepth) throws JSONException { if (toConvert == null) return null; if (toConvert instanceof Boolean || toConvert instanceof CharSequence || toConvert instanceof Number) { return toConvert; } if (toConvert.getClass().isEnum() || toConvert instanceof Enum) { return toConvert.toString(); } if (toConvert instanceof Map) { JSONObject converted = new JSONObject(); for (Object objectEntry : ((Map) toConvert).entrySet()) { Map.Entry<String, Object> entry = (Map.Entry) objectEntry; converted.put(entry.getKey(), convertObject(entry.getValue(), maxDepth - 1)); } return converted; } if (toConvert instanceof JSONObject) { return toConvert; } if (toConvert instanceof Collection) { JSONArray array = new JSONArray(); for (Object o : (Collection) toConvert) { array.put(convertObject(o, maxDepth - 1)); } return array; } if (toConvert.getClass().isArray()) { JSONArray converted = new JSONArray(); int length = Array.getLength(toConvert); for (int i = 0; i < length; i++) { converted.put(convertObject(Array.get(toConvert, i), maxDepth - 1)); } return converted; } if (toConvert instanceof Context) { JSONObject converted = new JSONObject(); converted.put("value", toConvert.toString()); return converted; } if (toConvert instanceof SessionId) { JSONObject converted = new JSONObject(); converted.put("value", toConvert.toString()); return converted; } try { return mapObject(toConvert, maxDepth - 1); } catch(Exception e) { throw new WebDriverException(e); } } private Object mapObject(Object toConvert, int maxDepth) throws Exception { if (maxDepth == 0) return null; // Raw object via reflection? Nope, not needed JSONObject mapped = new JSONObject(); BeanInfo beanInfo = Introspector.getBeanInfo(toConvert.getClass()); for (PropertyDescriptor pd : beanInfo.getPropertyDescriptors()) { if ("class".equals(pd.getName())) { mapped.put("class", toConvert.getClass().getName()); continue; } Method readMethod = pd.getReadMethod(); if (readMethod == null) continue; readMethod.setAccessible(true); Object result = readMethod.invoke(toConvert); mapped.put(pd.getName(), convertObject(result, maxDepth - 1)); } return mapped; } }
package com.fsck.k9.mail; import java.io.IOException; import java.util.Collections; import java.util.Date; import java.util.EnumSet; import java.util.Set; import android.util.Log; import com.fsck.k9.mail.filter.CountingOutputStream; import com.fsck.k9.mail.filter.EOLConvertingOutputStream; import static com.fsck.k9.mail.K9MailLib.LOG_TAG; public abstract class Message implements Part, CompositeBody { public enum RecipientType { TO, CC, BCC, } protected String mUid; private Set<Flag> mFlags = EnumSet.noneOf(Flag.class); private Date mInternalDate; protected Folder mFolder; public boolean olderThan(Date earliestDate) { if (earliestDate == null) { return false; } Date myDate = getSentDate(); if (myDate == null) { myDate = getInternalDate(); } if (myDate != null) { return myDate.before(earliestDate); } return false; } @Override public boolean equals(Object o) { if (o == null || !(o instanceof Message)) { return false; } Message other = (Message)o; return (getUid().equals(other.getUid()) && getFolder().getName().equals(other.getFolder().getName())); } @Override public int hashCode() { final int MULTIPLIER = 31; int result = 1; result = MULTIPLIER * result + mFolder.getName().hashCode(); result = MULTIPLIER * result + mUid.hashCode(); return result; } public String getUid() { return mUid; } public void setUid(String uid) { this.mUid = uid; } public Folder getFolder() { return mFolder; } public abstract String getSubject(); public abstract void setSubject(String subject) throws MessagingException; public Date getInternalDate() { return mInternalDate; } public void setInternalDate(Date internalDate) { this.mInternalDate = internalDate; } public abstract Date getSentDate(); public abstract void setSentDate(Date sentDate, boolean hideTimeZone) throws MessagingException; public abstract Address[] getRecipients(RecipientType type) throws MessagingException; public abstract void setRecipients(RecipientType type, Address[] addresses) throws MessagingException; public void setRecipient(RecipientType type, Address address) throws MessagingException { setRecipients(type, new Address[] { address }); } public abstract Address[] getFrom(); public abstract void setFrom(Address from) throws MessagingException; public abstract Address[] getReplyTo(); public abstract void setReplyTo(Address[] from) throws MessagingException; public abstract String getMessageId() throws MessagingException; public abstract void setInReplyTo(String inReplyTo) throws MessagingException; public abstract String[] getReferences() throws MessagingException; public abstract void setReferences(String references) throws MessagingException; @Override public abstract Body getBody(); @Override public abstract void addHeader(String name, String value) throws MessagingException; @Override public abstract void addRawHeader(String name, String raw) throws MessagingException; @Override public abstract void setHeader(String name, String value) throws MessagingException; @Override public abstract String[] getHeader(String name) throws MessagingException; public abstract Set<String> getHeaderNames() throws MessagingException; @Override public abstract void removeHeader(String name) throws MessagingException; @Override public abstract void setBody(Body body); public abstract long getId(); public abstract boolean hasAttachments(); public abstract int getSize(); public void delete(String trashFolderName) throws MessagingException {} /* * TODO Refactor Flags at some point to be able to store user defined flags. */ public Set<Flag> getFlags() { return Collections.unmodifiableSet(mFlags); } /** * @param flag * Flag to set. Never <code>null</code>. * @param set * If <code>true</code>, the flag is added. If <code>false</code> * , the flag is removed. * @throws MessagingException */ public void setFlag(Flag flag, boolean set) throws MessagingException { if (set) { mFlags.add(flag); } else { mFlags.remove(flag); } } /** * This method calls setFlag(Flag, boolean) * @param flags * @param set */ public void setFlags(final Set<Flag> flags, boolean set) throws MessagingException { for (Flag flag : flags) { setFlag(flag, set); } } public boolean isSet(Flag flag) { return mFlags.contains(flag); } public void destroy() throws MessagingException {} @Override public abstract void setEncoding(String encoding) throws MessagingException; public abstract void setCharset(String charset) throws MessagingException; public long calculateSize() { try { CountingOutputStream out = new CountingOutputStream(); EOLConvertingOutputStream eolOut = new EOLConvertingOutputStream(out); writeTo(eolOut); eolOut.flush(); return out.getCount(); } catch (IOException e) { Log.e(LOG_TAG, "Failed to calculate a message size", e); } catch (MessagingException e) { Log.e(LOG_TAG, "Failed to calculate a message size", e); } return 0; } /** * Copy the contents of this object into another {@code Message} object. * * @param destination The {@code Message} object to receive the contents of this instance. */ protected void copy(Message destination) { destination.mUid = mUid; destination.mInternalDate = mInternalDate; destination.mFolder = mFolder; // mFlags contents can change during the object lifetime, so copy the Set destination.mFlags = EnumSet.copyOf(mFlags); } /** * Creates a new {@code Message} object with the same content as this object. * * <p> * <strong>Note:</strong> * This method was introduced as a hack to prevent {@code ConcurrentModificationException}s. It * shouldn't be used unless absolutely necessary. See the comment in * {@link com.fsck.k9.activity.MessageView.Listener#loadMessageForViewHeadersAvailable(com.fsck.k9.Account, String, String, Message)} * for more information. * </p> */ @Override public abstract Message clone(); }
package org.altbeacon.beacon.service; import android.app.job.JobInfo; import android.app.job.JobScheduler; import android.bluetooth.le.ScanResult; import android.content.ComponentName; import android.content.Context; import android.os.Build; import android.os.PersistableBundle; import android.os.SystemClock; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.RequiresApi; import org.altbeacon.beacon.BeaconLocalBroadcastProcessor; import org.altbeacon.beacon.BeaconManager; import org.altbeacon.beacon.logging.LogManager; import java.util.ArrayList; import java.util.List; /** * Schedules two types of ScanJobs: * 1. Periodic, which are set to go every scanPeriod+betweenScanPeriod * 2. Immediate, which go right now. * * Immediate ScanJobs are used when the app is in the foreground and wants to get immediate results * or when beacons have been detected with background scan filters and delivered via Intents and * a scan needs to run in a timely manner to collect data about those beacons known to be newly * in the vicinity despite the app being in the background. * * Created by dyoung on 6/7/17. * @hide */ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) public class ScanJobScheduler { private static final String TAG = ScanJobScheduler.class.getSimpleName(); private static final Object SINGLETON_LOCK = new Object(); private static final long MIN_MILLIS_BETWEEN_SCAN_JOB_SCHEDULING = 10000L; @Nullable private static volatile ScanJobScheduler sInstance = null; @NonNull private Long mScanJobScheduleTime = 0L; @NonNull private List<ScanResult> mBackgroundScanResultQueue = new ArrayList<>(); @Nullable private BeaconLocalBroadcastProcessor mBeaconNotificationProcessor; @NonNull public static ScanJobScheduler getInstance() { ScanJobScheduler instance = sInstance; if (instance == null) { synchronized (SINGLETON_LOCK) { instance = sInstance; if (instance == null) { sInstance = instance = new ScanJobScheduler(); } } } return instance; } private ScanJobScheduler() { } private void ensureNotificationProcessorSetup(Context context) { if (mBeaconNotificationProcessor == null) { mBeaconNotificationProcessor = new BeaconLocalBroadcastProcessor(context); mBeaconNotificationProcessor.register(); } } /** * @return previoulsy queued scan results delivered in the background */ List<ScanResult> dumpBackgroundScanResultQueue() { List<ScanResult> retval = mBackgroundScanResultQueue; mBackgroundScanResultQueue = new ArrayList<>(); return retval; } private void applySettingsToScheduledJob(Context context, BeaconManager beaconManager, ScanState scanState) { scanState.applyChanges(beaconManager); LogManager.d(TAG, "Applying scan job settings with background mode "+scanState.getBackgroundMode()); schedule(context, scanState, false); } public void applySettingsToScheduledJob(Context context, BeaconManager beaconManager) { LogManager.d(TAG, "Applying settings to ScanJob"); JobScheduler jobScheduler = (JobScheduler) context.getSystemService(Context.JOB_SCHEDULER_SERVICE); ScanState scanState = ScanState.restore(context); applySettingsToScheduledJob(context, beaconManager, scanState); } // This method appears to be never used, because it is only used by Android O APIs, which // must exist on another branch until the SDKs are released. public void scheduleAfterBackgroundWakeup(Context context, List<ScanResult> scanResults) { if (scanResults != null) { mBackgroundScanResultQueue.addAll(scanResults); } synchronized (this) { // We typically get a bunch of calls in a row here, separated by a few millis. Only do this once. if (System.currentTimeMillis() - mScanJobScheduleTime > MIN_MILLIS_BETWEEN_SCAN_JOB_SCHEDULING) { LogManager.d(TAG, "scheduling an immediate scan job because last did "+(System.currentTimeMillis() - mScanJobScheduleTime)+"seconds ago."); mScanJobScheduleTime = System.currentTimeMillis(); } else { LogManager.d(TAG, "Not scheduling an immediate scan job because we just did recently."); return; } } ScanState scanState = ScanState.restore(context); schedule(context, scanState, true); } public void forceScheduleNextScan(Context context) { ScanState scanState = ScanState.restore(context); schedule(context, scanState, false); } private void schedule(Context context, ScanState scanState, boolean backgroundWakeup) { ensureNotificationProcessorSetup(context); long betweenScanPeriod = scanState.getScanJobIntervalMillis() - scanState.getScanJobRuntimeMillis(); long millisToNextJobStart; if (backgroundWakeup) { LogManager.d(TAG, "We just woke up in the background based on a new scan result. Start scan job immediately."); millisToNextJobStart = 0; } else { if (betweenScanPeriod > 0) { // If we pause between scans, then we need to start scanning on a normalized time millisToNextJobStart = (SystemClock.elapsedRealtime() % scanState.getScanJobIntervalMillis()); } else { millisToNextJobStart = 0; } if (millisToNextJobStart < 50) { // always wait a little bit to start scanning in case settings keep changing. // by user restarting settings and scanning. 50ms should be fine millisToNextJobStart = 50; } } JobScheduler jobScheduler = (JobScheduler) context.getSystemService(Context.JOB_SCHEDULER_SERVICE); if (backgroundWakeup || !scanState.getBackgroundMode()) { // If we are in the foreground, and we want to start a scan soon, we will schedule an // immediate job if (millisToNextJobStart < scanState.getScanJobIntervalMillis() - 50) { // If the next time we want to scan is less than 50ms from the periodic scan cycle, then] // we schedule it for that specific time. LogManager.d(TAG, "Scheduling immediate ScanJob to run in "+millisToNextJobStart+" millis"); JobInfo immediateJob = new JobInfo.Builder(ScanJob.IMMEDIATE_SCAN_JOB_ID, new ComponentName(context, ScanJob.class)) .setPersisted(true) // This makes it restart after reboot .setExtras(new PersistableBundle()) .setMinimumLatency(millisToNextJobStart) .setOverrideDeadline(millisToNextJobStart).build(); int error = jobScheduler.schedule(immediateJob); if (error < 0) { LogManager.e(TAG, "Failed to schedule scan job. Beacons will not be detected. Error: "+error); } } else { LogManager.d(TAG, "Not scheduling immediate scan, assuming periodic is about to run"); } } else { LogManager.d(TAG, "Not scheduling an immediate scan because we are in background mode. Cancelling existing immediate scan."); jobScheduler.cancel(ScanJob.IMMEDIATE_SCAN_JOB_ID); } JobInfo.Builder periodicJobBuilder = new JobInfo.Builder(ScanJob.PERIODIC_SCAN_JOB_ID, new ComponentName(context, ScanJob.class)) .setPersisted(true) // This makes it restart after reboot .setExtras(new PersistableBundle()); if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { // ON Android N+ we specify a tolerance of 0ms (capped at 5% by the OS) to ensure // our scans happen within 5% of the schduled time. periodicJobBuilder.setPeriodic(scanState.getScanJobIntervalMillis(), 0L).build(); } else { periodicJobBuilder.setPeriodic(scanState.getScanJobIntervalMillis()).build(); } // On Android O I see this: // // 06-07 22:15:51.361 6455-6455/org.altbeacon.beaconreference W/JobInfo: Specified interval for 1 is +5m10s0ms. Clamped to +15m0s0ms // 06-07 22:15:51.361 6455-6455/org.altbeacon.beaconreference W/JobInfo: Specified flex for 1 is 0. Clamped to +5m0s0ms // // This suggests logs are being clamped at a max of every 15 minutes +/- 5 minutes in the background // This is the same way it worked on Android N per this post: https://stackoverflow.com/questions/38344220/job-scheduler-not-running-on-android-n // // In practice, I see the following runtimes on the Nexus Player with Android O // This shows that the 15 minutes has some slop. // /* 06-07 22:25:51.380 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@7188bc6 06-07 22:41:01.227 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@382ed7b 06-07 22:55:51.373 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@203c928 06-07 23:10:59.083 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@dc96415 06-07 23:25:51.371 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@68bed2e 06-07 23:40:59.142 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@c295843 06-07 23:55:51.369 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@cd047e4 06-08 00:10:59.082 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@8009a61 06-08 00:25:51.368 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@f1fa2ca 06-08 00:40:59.085 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@88dddef 06-08 00:55:51.374 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@eb2b360 06-08 01:10:51.670 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@9bca225 06-08 01:25:51.383 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@871c8fe 06-08 01:45:51.404 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@3bf42d3 06-08 01:56:12.354 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@c3d4e34 06-08 02:21:51.771 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@1557571 06-08 02:37:01.861 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@e2c879a 06-08 02:52:11.943 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@c9f0d7f 06-08 03:07:22.041 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@4e0cab0 06-08 03:23:12.696 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@1139a7d 06-08 03:38:22.776 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@e06b8f6 06-08 03:52:12.792 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@74147eb 06-08 04:08:32.872 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@90d9fec 06-08 04:21:12.856 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@a4abd49 06-08 04:38:42.959 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@741d912 06-08 04:50:12.923 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@15bfe17 06-08 05:08:53.047 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@fa229e8 06-08 05:19:13.050 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@b0e49d5 06-08 05:39:03.142 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@18823ee 06-08 05:54:13.212 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@a72fc03 06-08 06:10:51.850 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@3fb84a4 06-08 06:26:01.917 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@53d6c21 06-08 06:41:11.994 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@848958a 06-08 06:56:22.053 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@43cdaf 06-08 07:06:32.119 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@5318c20 06-08 07:29:12.356 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@34f102d 06-08 07:44:22.431 6455-6455/org.altbeacon.beaconreference I/ScanJob: Running periodic scan job: instance is org.altbeacon.beacon.service.ScanJob@4d2e9e6 */ final JobInfo jobInfo = periodicJobBuilder.build(); LogManager.d(TAG, "Scheduling ScanJob " + jobInfo + " to run every "+scanState.getScanJobIntervalMillis()+" millis"); int error = jobScheduler.schedule(jobInfo); if (error < 0) { LogManager.e(TAG, "Failed to schedule scan job. Beacons will not be detected. Error: "+error); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.scalding; import com.twitter.scalding.ScaldingILoop; import org.apache.hadoop.security.UserGroupInformation; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterResult.Code; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.scheduler.Scheduler; import org.apache.zeppelin.scheduler.SchedulerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import scala.Console; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; /** * Scalding interpreter for Zeppelin. Based off the Spark interpreter code. * */ public class ScaldingInterpreter extends Interpreter { Logger logger = LoggerFactory.getLogger(ScaldingInterpreter.class); static final String ARGS_STRING = "args.string"; static final String ARGS_STRING_DEFAULT = "--local --repl"; static final String MAX_OPEN_INSTANCES = "max.open.instances"; static final String MAX_OPEN_INSTANCES_DEFAULT = "50"; public static final List NO_COMPLETION = Collections.unmodifiableList(new ArrayList<>()); static int numOpenInstances = 0; private ScaldingILoop interpreter; private ByteArrayOutputStream out; public ScaldingInterpreter(Properties property) { super(property); out = new ByteArrayOutputStream(); } @Override public void open() { numOpenInstances = numOpenInstances + 1; String maxOpenInstancesStr = property.getProperty(MAX_OPEN_INSTANCES, MAX_OPEN_INSTANCES_DEFAULT); int maxOpenInstances = 50; try { maxOpenInstances = Integer.valueOf(maxOpenInstancesStr); } catch (Exception e) { logger.error("Error reading max.open.instances", e); } logger.info("max.open.instances = {}", maxOpenInstances); if (numOpenInstances > maxOpenInstances) { logger.error("Reached maximum number of open instances"); return; } logger.info("Opening instance {}", numOpenInstances); logger.info("property: {}", property); String argsString = property.getProperty(ARGS_STRING, ARGS_STRING_DEFAULT); String[] args; if (argsString == null) { args = new String[0]; } else { args = argsString.split(" "); } logger.info("{}", Arrays.toString(args)); PrintWriter printWriter = new PrintWriter(out, true); interpreter = ZeppelinScaldingShell.getRepl(args, printWriter); interpreter.createInterpreter(); } @Override public void close() { interpreter.intp().close(); } @Override public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) { String user = contextInterpreter.getAuthenticationInfo().getUser(); logger.info("Running Scalding command: user: {} cmd: '{}'", user, cmd); if (interpreter == null) { logger.error( "interpreter == null, open may not have been called because max.open.instances reached"); return new InterpreterResult(Code.ERROR, "interpreter == null\n" + "open may not have been called because max.open.instances reached" ); } if (cmd == null || cmd.trim().length() == 0) { return new InterpreterResult(Code.SUCCESS); } InterpreterResult interpreterResult = new InterpreterResult(Code.ERROR); if (property.getProperty(ARGS_STRING).contains("hdfs")) { UserGroupInformation ugi = null; try { ugi = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser()); } catch (IOException e) { logger.error("Error creating UserGroupInformation", e); return new InterpreterResult(Code.ERROR, e.getMessage()); } try { // Make variables final to avoid "local variable is accessed from within inner class; // needs to be declared final" exception in JDK7 final String cmd1 = cmd; final InterpreterContext contextInterpreter1 = contextInterpreter; PrivilegedExceptionAction<InterpreterResult> action = new PrivilegedExceptionAction<InterpreterResult>() { public InterpreterResult run() throws Exception { return interpret(cmd1.split("\n"), contextInterpreter1); } }; interpreterResult = ugi.doAs(action); } catch (Exception e) { logger.error("Error running command with ugi.doAs", e); return new InterpreterResult(Code.ERROR, e.getMessage()); } } else { interpreterResult = interpret(cmd.split("\n"), contextInterpreter); } return interpreterResult; } public InterpreterResult interpret(String[] lines, InterpreterContext context) { synchronized (this) { InterpreterResult r = interpretInput(lines); return r; } } public InterpreterResult interpretInput(String[] lines) { // add print("") to make sure not finishing with comment // see https://github.com/NFLabs/zeppelin/issues/151 String[] linesToRun = new String[lines.length + 1]; for (int i = 0; i < lines.length; i++) { linesToRun[i] = lines[i]; } linesToRun[lines.length] = "print(\"\")"; out.reset(); // Moving two lines below from open() to this function. // If they are in open output is incomplete. PrintStream printStream = new PrintStream(out, true); Console.setOut(printStream); Code r = null; String incomplete = ""; boolean inComment = false; for (int l = 0; l < linesToRun.length; l++) { String s = linesToRun[l]; // check if next line starts with "." (but not ".." or "./") it is treated as an invocation if (l + 1 < linesToRun.length) { String nextLine = linesToRun[l + 1].trim(); boolean continuation = false; if (nextLine.isEmpty() || nextLine.startsWith("//") // skip empty line or comment || nextLine.startsWith("}") || nextLine.startsWith("object")) { // include "} object" for Scala companion object continuation = true; } else if (!inComment && nextLine.startsWith("/*")) { inComment = true; continuation = true; } else if (inComment && nextLine.lastIndexOf("*/") >= 0) { inComment = false; continuation = true; } else if (nextLine.length() > 1 && nextLine.charAt(0) == '.' && nextLine.charAt(1) != '.' // ".." && nextLine.charAt(1) != '/') { // "./" continuation = true; } else if (inComment) { continuation = true; } if (continuation) { incomplete += s + "\n"; continue; } } scala.tools.nsc.interpreter.Results.Result res = null; try { res = interpreter.intp().interpret(incomplete + s); } catch (Exception e) { logger.error("Interpreter exception: ", e); return new InterpreterResult(Code.ERROR, e.getMessage()); } r = getResultCode(res); if (r == Code.ERROR) { Console.flush(); return new InterpreterResult(r, out.toString()); } else if (r == Code.INCOMPLETE) { incomplete += s + "\n"; } else { incomplete = ""; } } if (r == Code.INCOMPLETE) { return new InterpreterResult(r, "Incomplete expression"); } else { Console.flush(); return new InterpreterResult(r, out.toString()); } } private Code getResultCode(scala.tools.nsc.interpreter.Results.Result r) { if (r instanceof scala.tools.nsc.interpreter.Results.Success$) { return Code.SUCCESS; } else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) { return Code.INCOMPLETE; } else { return Code.ERROR; } } @Override public void cancel(InterpreterContext context) { // not implemented } @Override public FormType getFormType() { return FormType.NATIVE; } @Override public int getProgress(InterpreterContext context) { // fine-grained progress not implemented - return 0 return 0; } @Override public Scheduler getScheduler() { return SchedulerFactory.singleton().createOrGetFIFOScheduler( ScaldingInterpreter.class.getName() + this.hashCode()); } @Override public List<InterpreterCompletion> completion(String buf, int cursor) { return NO_COMPLETION; } }
package gov.va.vinci.leo.cr; /* * #%L * Leo Client * %% * Copyright (C) 2010 - 2014 Department of Veterans Affairs * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import gov.va.vinci.leo.descriptors.LeoConfigurationParameter; import gov.va.vinci.leo.model.DataQueryInformation; import gov.va.vinci.leo.model.DatabaseConnectionInformation; import org.apache.uima.collection.CollectionException; import org.apache.uima.resource.ResourceInitializationException; import java.security.SecureRandom; import java.util.HashSet; /** * Database CollectionReader that pulls the data in batches of a configurable size. Assumes the query provided returns * one record per row of results. * * User: Thomas Ginter * Date: 7/18/14 * Time: 11:44 */ public class BatchDatabaseCollectionReader extends DatabaseCollectionReader { /** * Min record ID. */ @LeoConfigurationParameter(mandatory = true) protected int minRecordNumber = -1; /** * Max record ID. */ @LeoConfigurationParameter(mandatory = true) protected int maxRecordNumber = -1; /** * Size of each batch to pull. */ @LeoConfigurationParameter(mandatory = true) protected int batchSize = -1; /** * Number of random batches to pull. */ @LeoConfigurationParameter protected int randomBatches = 0; /** * Query template that will be filled in with min and max values for each batch . */ protected String baseQuery = null; /** * Total number of batches available to be executed in the table. */ protected int totalNumberOfBatches = 0; /** * Current batch number we are executing. */ protected int currentBatch = 0; /** * True if we are pulling random batches. */ protected boolean isRandom = false; /** * Store the batch numbers already visited when executing random batches. */ protected HashSet<Integer> usedBatches = new HashSet<>(); /** * Generates the random batch number if random batches are indicated. */ protected static final SecureRandom secureRandom = new SecureRandom(); /** * Default constructor used during UIMA initialization. */ public BatchDatabaseCollectionReader() { /** Do Nothing **/ } /** * Initialize the reader using the database connection and query information provided, including the size of each * batch and optionally the number of random batches to execute. * * @param databaseConnectionInformation database connection information to use. * @param dataQueryInformation database query for retrieving results, assumes one record per row. * @param minRecordNumber starting row number for record retrieval. * @param maxRecordNumber ending row number for record retrieval. * @param batchSize size of each batch to retrieve at a time. Optimal size will vary with the database environment. */ public BatchDatabaseCollectionReader(DatabaseConnectionInformation databaseConnectionInformation, DataQueryInformation dataQueryInformation, int minRecordNumber, int maxRecordNumber, int batchSize) { this(databaseConnectionInformation.getDriver(), databaseConnectionInformation.getUrl(), databaseConnectionInformation.getUsername(), databaseConnectionInformation.getPassword(), dataQueryInformation.getQuery(), dataQueryInformation.getIdColumn(), dataQueryInformation.getNoteColumn(), minRecordNumber, maxRecordNumber, batchSize); } /** * Initialize the reader using the provided connection and query information, including the size of each batch and * optionally the number of random batches to execute. * * @param driver JDBC driver class * @param url JDBC connection URL * @param username database user name. * @param password database user password. * @param query SQL query used to retrieve the data, one record per row of results. * @param idColumn name of the ID column in the SQL query. Assumes only one column to use as the record ID. * Additional ID fields are propagated through the row results String array of the CSI annotation. * @param noteColumn name of the note column in the SQL query. Assumes only one note column. * @param minRecordNumber starting row number for record retrieval. * @param maxRecordNumber ending row number for record retrieval. * @param batchSize size of each batch to retrieve at a time. Optimal size will vary with the database environment. */ public BatchDatabaseCollectionReader(String driver, String url, String username, String password, String query, String idColumn, String noteColumn, int minRecordNumber, int maxRecordNumber, int batchSize) { super(driver, url, username, password, query, idColumn, noteColumn); validateParams(minRecordNumber, maxRecordNumber, batchSize); this.minRecordNumber = minRecordNumber; this.maxRecordNumber = maxRecordNumber; this.batchSize = batchSize; this.baseQuery = this.query; } /** * Return the minumum row number. * * @return minimum row number */ public int getMinRecordNumber() { return minRecordNumber; } /** * Set the minimum row number. * * @param minRecordNumber minimum row number * @return reference to this reader instance */ public BatchDatabaseCollectionReader setMinRecordNumber(int minRecordNumber) { this.minRecordNumber = minRecordNumber; return this; } /** * Return the maximum row number. * * @return maximum row number */ public int getMaxRecordNumber() { return maxRecordNumber; } /** * Set the maximum row number. * * @param maxRecordNumber max row number * @return reference to this reader instance */ public BatchDatabaseCollectionReader setMaxRecordNumber(int maxRecordNumber) { this.maxRecordNumber = maxRecordNumber; return this; } /** * Get the batch size. * * @return batch size */ public int getBatchSize() { return batchSize; } /** * Set the batch size. * * @param batchSize batch size * @return reference to this reader instance */ public BatchDatabaseCollectionReader setBatchSize(int batchSize) { this.batchSize = batchSize; return this; } /** * Get the number of random batches, defaults to zero. * * @return number of random batches */ public int getRandomBatches() { return randomBatches; } /** * Set the number of random batches. * * @param randomBatches number of random batches * @return reference to this reader instance */ public BatchDatabaseCollectionReader setRandomBatches(int randomBatches) { this.randomBatches = randomBatches; return this; } /** * Validate the parameters that will initialize this object. * * @param minRecordNumber Minimum record ID where processing will start * @param maxRecordNumber Maximum record ID where processing will start * @param batchSize Size of each batch to be executed */ protected void validateParams(int minRecordNumber, int maxRecordNumber, int batchSize) { if(minRecordNumber < 0) { throw new IllegalArgumentException("Minimum record ID must be 0 or greater!"); } if(maxRecordNumber < 0) { throw new IllegalArgumentException("Max record ID must be 0 or greater!"); } if(batchSize < 0) { throw new IllegalArgumentException("Batch size must be 0 or greater!"); } } /** * This method is called during initialization. Subclasses should override it to perform one-time startup logic. * * @throws org.apache.uima.resource.ResourceInitializationException if a failure occurs during initialization. */ @Override public void initialize() throws ResourceInitializationException { super.initialize(); //Calculate the total number of batches int length = (this.maxRecordNumber - this.minRecordNumber); if(length < 0) { totalNumberOfBatches = 0; } else if(length == 0) { totalNumberOfBatches = 1; } else { totalNumberOfBatches = (int) Math.ceil((double) length / (double) batchSize); } this.baseQuery = this.query; if(randomBatches > 0) isRandom = true; } /** * @return true if and only if there are more elements available from this CollectionReader. * @throws org.apache.uima.collection.CollectionException */ @Override public boolean hasNext() throws CollectionException { //if the current row set is empty or the index is still -1 then setup the query for the next set while(mRecordList == null || !super.hasNext()) { //Setup the next batch query if there is one if(isRandom && randomBatches > 0) { //Setup another random batch this.query = getQuery(getNextRandomBatchNumber()); randomBatches--; } else if(currentBatch < totalNumberOfBatches) { //Setup another inline batch this.query = getQuery(currentBatch); currentBatch++; } else { //No more batches or batch data to process, return false return false; } getData(query); } return true; } /** * Get the next unused random batch number to pull. * * @return random batch number */ protected int getNextRandomBatchNumber() { int randomBatch = secureRandom.nextInt(totalNumberOfBatches); //make sure we have not picked this batch before while(usedBatches.contains(new Integer(randomBatch))){ if(randomBatch < totalNumberOfBatches - 1) { randomBatch++; } else { randomBatch = secureRandom.nextInt(totalNumberOfBatches); } } usedBatches.add(randomBatch); return randomBatch; } /** * Does a string substitution on the query, replacing {min} and {max} with the appropriate * values for this batch. * * @param batch the batch number being queried. * @return the SQL statement with min/max replaced with appropriate values for this batch. */ protected String getQuery(int batch) { int startRecord = (batch * batchSize) + minRecordNumber -1; int endRecord = startRecord + batchSize + 1; if (endRecord > maxRecordNumber) { endRecord = maxRecordNumber + 1; } return this.baseQuery.replaceAll("\\{min\\}", "" + startRecord).replaceAll("\\{max\\}", "" + endRecord); } }
/* * Copyright 2017 Axway Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.axway.ats.uiengine.elements.swing; import static org.fest.assertions.Assertions.assertThat; import static org.fest.swing.edt.GuiActionRunner.execute; import static org.fest.swing.format.Formatting.format; import static org.fest.util.Strings.concat; import static org.fest.util.Systems.LINE_SEPARATOR; import java.awt.Component; import java.awt.Container; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; import javax.swing.JMenuItem; import org.fest.swing.core.BasicComponentFinder; import org.fest.swing.core.ComponentMatcher; import org.fest.swing.core.ComponentPrinter; import org.fest.swing.core.GenericTypeMatcher; import org.fest.swing.core.Robot; import org.fest.swing.driver.JMenuItemMatcher; import org.fest.swing.edt.GuiQuery; import org.fest.swing.edt.GuiTask; import org.fest.swing.exception.ComponentLookupException; import org.fest.swing.fixture.JMenuItemFixture; import org.fest.swing.hierarchy.ComponentHierarchy; import org.fest.swing.hierarchy.SingleComponentHierarchy; /** * This class is based on {@link BasicComponentFinder}, * the only difference is that we are not using {@link LinkedHashSet} for the founded components, * but {@link ArrayList} and every time the component index is the same (in the component hierarchy tree), * and we are able to search by index. * */ public class SwingElementFinder { public static <T extends Component> T find( Robot robot, Container root, GenericTypeMatcher<T> m ) { ComponentHierarchy hierarchy = robot.hierarchy(); List<Component> found = null; if (root == null) { found = find(hierarchy, m); } else { found = find(new SingleComponentHierarchy(root, hierarchy), m); } if (found.isEmpty()) { throw componentNotFound(robot, hierarchy, m); } if (found.size() > 1) { throw multipleComponentsFound(found, m); } Component component = found.iterator().next(); return m.supportedType().cast(component); } public static List<Component> find( ComponentHierarchy hierarchy, Container root, GenericTypeMatcher<Component> m ) { List<Component> found = null; if (root == null) { found = find(hierarchy, m); } else { found = find(new SingleComponentHierarchy(root, hierarchy), m); } return found; } public static JMenuItemFixture menuItemWithPath( Robot robot, Container root, String... path ) { ComponentMatcher m = new JMenuItemMatcher(path); Component item = robot.finder().find(root, m); assertThat(item).as(format(item)).isInstanceOf(JMenuItem.class); return new JMenuItemFixture(robot, (JMenuItem) item); } private static ComponentLookupException componentNotFound( Robot robot, ComponentHierarchy h, ComponentMatcher m ) { String message = concat("Unable to find component using matcher ", m, "."); message = concat(message, LINE_SEPARATOR, LINE_SEPARATOR, "Component hierarchy:", LINE_SEPARATOR, formattedHierarchy(robot.printer(), root(h))); throw new ComponentLookupException(message); } private static ComponentLookupException multipleComponentsFound( Collection<Component> found, ComponentMatcher m ) { StringBuilder message = new StringBuilder(); message.append("Found more than one component using matcher ") .append(m) .append(".") .append(LINE_SEPARATOR) .append(LINE_SEPARATOR) .append("Found:"); appendComponents(message, found); if (!found.isEmpty()) { message.append(LINE_SEPARATOR); } throw new ComponentLookupException(message.toString(), found); } private static void appendComponents( final StringBuilder message, final Collection<Component> found ) { execute(new GuiTask() { protected void executeInEDT() { for (Component c : found) message.append(LINE_SEPARATOR).append(format(c)); } }); } private static String formattedHierarchy( ComponentPrinter printer, Container root ) { ByteArrayOutputStream out = new ByteArrayOutputStream(); PrintStream printStream = new PrintStream(out, true); printer.printComponents(printStream, root); printStream.flush(); return new String(out.toByteArray()); } private static Container root( ComponentHierarchy h ) { if (h instanceof SingleComponentHierarchy) { return ((SingleComponentHierarchy) h).root(); } return null; } private static List<Component> find( ComponentHierarchy h, ComponentMatcher m ) { List<Component> found = new ArrayList<Component>(); for (Object o : rootsOf(h)) { find(h, m, (Component) o, found); } return found; } private static void find( ComponentHierarchy h, ComponentMatcher m, Component root, List<Component> found ) { for (Component c : childrenOfComponent(root, h)) { find(h, m, c, found); } if (isMatching(root, m) && !found.contains(root)) { found.add(root); } } private static boolean isMatching( final Component c, final ComponentMatcher m ) { return execute(new GuiQuery<Boolean>() { protected Boolean executeInEDT() { return m.matches(c); } }); } private static Collection<? extends Component> rootsOf( final ComponentHierarchy h ) { return execute(new GuiQuery<Collection<? extends Component>>() { protected Collection<? extends Component> executeInEDT() { return h.roots(); } }); } private static Collection<Component> childrenOfComponent( final Component c, final ComponentHierarchy h ) { return execute(new GuiQuery<Collection<Component>>() { protected Collection<Component> executeInEDT() { return h.childrenOf(c); } }); } }
/******************************************************************************* * Product of NIST/ITL Advanced Networking Technologies Division (ANTD) * * Creator: O. Deruelle (deruelle@nist.gov) * * Questions/Comments: nist-sip-dev@antd.nist.gov * *******************************************************************************/ package gov.nist.sip.instantmessaging.presence.cpimparser; import java.io.*; import java.util.*; import org.xml.sax.*; import org.xml.sax.helpers.DefaultHandler; import javax.xml.parsers.SAXParserFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import gov.nist.sip.instantmessaging.*; /** parser for a XML file */ public class XMLcpimParser extends DefaultHandler { private PresenceTag presenceTag; private PresentityTag presentityTag; private StatusTag statusTag; private TupleTag tupleTag; private ContactTag contactTag; private ValueTag valueTag; private NoteTag noteTag; private String element; private XMLReader saxParser; /** start the parsing * @param file to parse * @return Vector containing the test cases */ public XMLcpimParser(String fileLocation) { try { SAXParserFactory saxParserFactory=SAXParserFactory.newInstance(); this.saxParser = saxParserFactory.newSAXParser().getXMLReader(); saxParser.setContentHandler(this); saxParser.setFeature ("http://xml.org/sax/features/validation",true); // parse the xml specification for the event tags. saxParser.parse(fileLocation); } catch (SAXParseException spe) { spe.printStackTrace(); } catch (SAXException sxe) { sxe.printStackTrace(); } catch (IOException ioe) { // I/O error ioe.printStackTrace(); } catch (Exception pce) { // Parser with specified options can't be built pce.printStackTrace(); } } /** start the parsing * @param file to parse * @return Vector containing the test cases */ public XMLcpimParser() { try { SAXParserFactory saxParserFactory=SAXParserFactory.newInstance(); this.saxParser = saxParserFactory.newSAXParser().getXMLReader(); saxParser.setContentHandler(this); saxParser.setFeature ("http://xml.org/sax/features/validation",true); // parse the xml specification for the event tags. } catch (Exception e) { e.printStackTrace(); } } public void parseCPIMString(String body) { try { StringReader stringReader=new StringReader(body); InputSource inputSource=new InputSource(stringReader); this.saxParser.parse(inputSource); } catch (Exception e) { e.printStackTrace(); } } public PresenceTag getPresenceTag() { return presenceTag; } //=========================================================== // SAX DocumentHandler methods //=========================================================== public void startDocument() throws SAXException { try { DebugIM.println("Parsing XML cpim string"); } catch (Exception e) { throw new SAXException("XMLcpimParser error", e); } } public void endDocument() throws SAXException { try { DebugIM.println("XML cpim string parsed successfully!!!"); } catch (Exception e) { throw new SAXException("XMLcpimParser error", e); } } public void startElement(String namespaceURI, String lName, // local name String qName, // qualified name Attributes attrs) throws SAXException { element=qName; System.out.println("StartElement:"+element); if (element.compareToIgnoreCase("presence") ==0 ) { //System.out.println("presence!!!!"); presenceTag=new PresenceTag(); String entity=attrs.getValue("entity").trim(); presenceTag.setEntity(entity); //System.out.println("presence!!!!"); } if (element.compareToIgnoreCase("presentity") ==0 ) { //System.out.println("presentity!!!!"); presentityTag=new PresentityTag(); String id=attrs.getValue("id").trim(); presentityTag.setId(id); //System.out.println("presentity!!!!"); } if (element.compareToIgnoreCase("tuple") ==0 ) { //System.out.println("tuple!!!!"); tupleTag=new TupleTag(); String id=attrs.getValue("id").trim(); tupleTag.setId(id); //System.out.println("tuple!!!!"); } if (element.compareToIgnoreCase("status") ==0 ) { //System.out.println("status!!!!"); statusTag=new StatusTag(); //System.out.println("status!!!!"); } if (element.compareToIgnoreCase("basic") ==0 ) { //System.out.println("basic!!!!"); valueTag=new ValueTag(); //System.out.println("basic!!!!"); } if (element.compareToIgnoreCase("contact") ==0 ) { //System.out.println("contact!!!!"); contactTag=new ContactTag(); String priority=attrs.getValue("priority").trim(); if (priority!=null) { try { contactTag.setPriority(Float.parseFloat(priority)); } catch(Exception e) { e.printStackTrace(); } } //System.out.println("contact!!!!"); } if (element.compareToIgnoreCase("note") ==0 ) { //System.out.println("note!!!!"); noteTag=new NoteTag(); //System.out.println("note!!!!"); } } public void endElement(String namespaceURI, String sName, // simple name String qName // qualified name ) throws SAXException { String element=qName; if (element.compareToIgnoreCase("presence") ==0 ) { } if (element.compareToIgnoreCase("presentity") ==0 ) { //System.out.println("/presentity!!!!"); presenceTag.setPresentityTag(presentityTag); //System.out.println("/presentity!!!!"); } if (element.compareToIgnoreCase("tuple") ==0 ) { //System.out.println("/tuple!!!!"); presenceTag.addTupleTag(tupleTag); //System.out.println("/tuple!!!!"); } if (element.compareToIgnoreCase("status") ==0 ) { //System.out.println("/status!!!!"); tupleTag.setStatusTag(statusTag); //System.out.println("/status!!!"); } if (element.compareToIgnoreCase("basic") ==0 ) { //System.out.println("/basic!!!!"); statusTag.setValueTag(valueTag); //System.out.println("/basic!!!!"); } if (element.compareToIgnoreCase("contact") ==0 ) { //System.out.println("/contact!!!!"); tupleTag.setContactTag(contactTag); //System.out.println("/contact!!!!"); } if (element.compareToIgnoreCase("note") ==0 ) { //System.out.println("/note!!!!"); tupleTag.setNoteTag(noteTag); //System.out.println("//note!!!!"); } } public void characters(char buf[], int offset, int len) throws SAXException { String str = new String(buf, offset, len); if (str!=null && !str.trim().equals("") ) { if (element.compareToIgnoreCase("basic") ==0 ) { valueTag.setValue(str); } if (element.compareToIgnoreCase("contact") ==0 ) { contactTag.setContact(str); } if (element.compareToIgnoreCase("note") ==0 ) { noteTag.setNote(str); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.salesforce; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import org.apache.gobblin.salesforce.SalesforceConfigurationKeys; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.utils.URIBuilder; import org.apache.http.message.BasicNameValuePair; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.sforce.async.AsyncApiException; import com.sforce.async.BatchInfo; import com.sforce.async.BatchInfoList; import com.sforce.async.BatchStateEnum; import com.sforce.async.BulkConnection; import com.sforce.async.ConcurrencyMode; import com.sforce.async.ContentType; import com.sforce.async.JobInfo; import com.sforce.async.OperationEnum; import com.sforce.async.QueryResultList; import com.sforce.soap.partner.PartnerConnection; import com.sforce.ws.ConnectorConfig; import org.apache.gobblin.configuration.ConfigurationKeys; import org.apache.gobblin.configuration.WorkUnitState; import org.apache.gobblin.password.PasswordManager; import org.apache.gobblin.source.extractor.DataRecordException; import org.apache.gobblin.source.extractor.exception.HighWatermarkException; import org.apache.gobblin.source.extractor.exception.RecordCountException; import org.apache.gobblin.source.extractor.exception.RestApiClientException; import org.apache.gobblin.source.extractor.exception.RestApiConnectionException; import org.apache.gobblin.source.extractor.exception.SchemaException; import org.apache.gobblin.source.extractor.extract.Command; import org.apache.gobblin.source.extractor.extract.CommandOutput; import org.apache.gobblin.source.extractor.partition.Partitioner; import org.apache.gobblin.source.jdbc.SqlQueryUtils; import org.apache.gobblin.source.extractor.extract.restapi.RestApiCommand; import org.apache.gobblin.source.extractor.extract.restapi.RestApiCommand.RestApiCommandType; import org.apache.gobblin.source.extractor.extract.restapi.RestApiConnector; import org.apache.gobblin.source.extractor.extract.restapi.RestApiExtractor; import org.apache.gobblin.source.extractor.resultset.RecordSet; import org.apache.gobblin.source.extractor.resultset.RecordSetList; import org.apache.gobblin.source.extractor.schema.Schema; import org.apache.gobblin.source.extractor.utils.InputStreamCSVReader; import org.apache.gobblin.source.extractor.utils.Utils; import org.apache.gobblin.source.extractor.watermark.Predicate; import org.apache.gobblin.source.extractor.watermark.WatermarkType; import org.apache.gobblin.source.workunit.WorkUnit; import lombok.Data; import lombok.extern.slf4j.Slf4j; /** * An implementation of salesforce extractor for extracting data from SFDC */ @Slf4j public class SalesforceExtractor extends RestApiExtractor { private static final String SOQL_RESOURCE = "/queryAll"; public static final String SALESFORCE_TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss'.000Z'"; private static final String SALESFORCE_DATE_FORMAT = "yyyy-MM-dd"; private static final String SALESFORCE_HOUR_FORMAT = "HH"; private static final String SALESFORCE_SOAP_SERVICE = "/services/Soap/u"; private static final Gson GSON = new Gson(); private static final int MAX_PK_CHUNKING_SIZE = 250000; private static final int MIN_PK_CHUNKING_SIZE = 100000; private static final int DEFAULT_PK_CHUNKING_SIZE = 200000; private static final String ENABLE_PK_CHUNKING_KEY = "salesforce.enablePkChunking"; private static final String PK_CHUNKING_SIZE_KEY = "salesforce.pkChunkingSize"; private static final int MAX_RETRY_INTERVAL_SECS = 600; // avoid using too many bulk API calls by only allowing PK chunking only if max partitions is configured <= this private static final int PK_CHUNKING_MAX_PARTITIONS_LIMIT = 3; private static final String FETCH_RETRY_LIMIT_KEY = "salesforce.fetchRetryLimit"; private static final int DEFAULT_FETCH_RETRY_LIMIT = 5; private boolean pullStatus = true; private String nextUrl; private BulkConnection bulkConnection = null; private boolean bulkApiInitialRun = true; private JobInfo bulkJob = new JobInfo(); private BufferedReader bulkBufferedReader = null; private List<BatchIdAndResultId> bulkResultIdList = Lists.newArrayList(); private int bulkResultIdCount = 0; private boolean bulkJobFinished = true; private List<String> bulkRecordHeader; private int bulkResultColumCount; private boolean newBulkResultSet = true; private int bulkRecordCount = 0; private int prevBulkRecordCount = 0; private List<String> csvRecord; private final boolean pkChunking; private final int pkChunkingSize; private final SalesforceConnector sfConnector; private final int fetchRetryLimit; private final int batchSize; public SalesforceExtractor(WorkUnitState state) { super(state); this.sfConnector = (SalesforceConnector) this.connector; // don't allow pk chunking if max partitions too high or have user specified partitions if (state.getPropAsBoolean(Partitioner.HAS_USER_SPECIFIED_PARTITIONS, false) || state.getPropAsInt(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, ConfigurationKeys.DEFAULT_MAX_NUMBER_OF_PARTITIONS) > PK_CHUNKING_MAX_PARTITIONS_LIMIT) { if (state.getPropAsBoolean(ENABLE_PK_CHUNKING_KEY, false)) { log.warn("Max partitions too high, so PK chunking is not enabled"); } this.pkChunking = false; } else { this.pkChunking = state.getPropAsBoolean(ENABLE_PK_CHUNKING_KEY, false); } this.pkChunkingSize = Math.max(MIN_PK_CHUNKING_SIZE, Math.min(MAX_PK_CHUNKING_SIZE, state.getPropAsInt(PK_CHUNKING_SIZE_KEY, DEFAULT_PK_CHUNKING_SIZE))); // Get batch size from .pull file int tmpBatchSize = state.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_FETCH_SIZE, ConfigurationKeys.DEFAULT_SOURCE_FETCH_SIZE); this.batchSize = tmpBatchSize == 0 ? ConfigurationKeys.DEFAULT_SOURCE_FETCH_SIZE : tmpBatchSize; this.fetchRetryLimit = state.getPropAsInt(FETCH_RETRY_LIMIT_KEY, DEFAULT_FETCH_RETRY_LIMIT); } @Override protected RestApiConnector getConnector(WorkUnitState state) { return new SalesforceConnector(state); } /** * true is further pull required else false */ public void setPullStatus(boolean pullStatus) { this.pullStatus = pullStatus; } /** * url for the next pull from salesforce */ public void setNextUrl(String nextUrl) { this.nextUrl = nextUrl; } private boolean isBulkJobFinished() { return this.bulkJobFinished; } private void setBulkJobFinished(boolean bulkJobFinished) { this.bulkJobFinished = bulkJobFinished; } public boolean isNewBulkResultSet() { return this.newBulkResultSet; } public void setNewBulkResultSet(boolean newBulkResultSet) { this.newBulkResultSet = newBulkResultSet; } @Override public HttpEntity getAuthentication() throws RestApiConnectionException { log.debug("Authenticating salesforce"); return this.connector.getAuthentication(); } @Override public List<Command> getSchemaMetadata(String schema, String entity) throws SchemaException { log.debug("Build url to retrieve schema"); return constructGetCommand(this.sfConnector.getFullUri("/sobjects/" + entity.trim() + "/describe")); } @Override public JsonArray getSchema(CommandOutput<?, ?> response) throws SchemaException { log.info("Get schema from salesforce"); String output; Iterator<String> itr = (Iterator<String>) response.getResults().values().iterator(); if (itr.hasNext()) { output = itr.next(); } else { throw new SchemaException("Failed to get schema from salesforce; REST response has no output"); } JsonArray fieldJsonArray = new JsonArray(); JsonElement element = GSON.fromJson(output, JsonObject.class); JsonObject jsonObject = element.getAsJsonObject(); try { JsonArray array = jsonObject.getAsJsonArray("fields"); for (JsonElement columnElement : array) { JsonObject field = columnElement.getAsJsonObject(); Schema schema = new Schema(); schema.setColumnName(field.get("name").getAsString()); String dataType = field.get("type").getAsString(); String elementDataType = "string"; List<String> mapSymbols = null; JsonObject newDataType = this.convertDataType(field.get("name").getAsString(), dataType, elementDataType, mapSymbols); log.debug("ColumnName:" + field.get("name").getAsString() + "; old datatype:" + dataType + "; new datatype:" + newDataType); schema.setDataType(newDataType); schema.setLength(field.get("length").getAsLong()); schema.setPrecision(field.get("precision").getAsInt()); schema.setScale(field.get("scale").getAsInt()); schema.setNullable(field.get("nillable").getAsBoolean()); schema.setFormat(null); schema.setComment((field.get("label").isJsonNull() ? null : field.get("label").getAsString())); schema .setDefaultValue((field.get("defaultValue").isJsonNull() ? null : field.get("defaultValue").getAsString())); schema.setUnique(field.get("unique").getAsBoolean()); String jsonStr = GSON.toJson(schema); JsonObject obj = GSON.fromJson(jsonStr, JsonObject.class).getAsJsonObject(); fieldJsonArray.add(obj); } } catch (Exception e) { throw new SchemaException("Failed to get schema from salesforce; error - " + e.getMessage(), e); } return fieldJsonArray; } @Override public List<Command> getHighWatermarkMetadata(String schema, String entity, String watermarkColumn, List<Predicate> predicateList) throws HighWatermarkException { log.debug("Build url to retrieve high watermark"); String query = "SELECT " + watermarkColumn + " FROM " + entity; String defaultPredicate = " " + watermarkColumn + " != null"; String defaultSortOrder = " ORDER BY " + watermarkColumn + " desc LIMIT 1"; String existingPredicate = ""; if (this.updatedQuery != null) { String queryLowerCase = this.updatedQuery.toLowerCase(); int startIndex = queryLowerCase.indexOf(" where "); if (startIndex > 0) { existingPredicate = this.updatedQuery.substring(startIndex); } } query = query + existingPredicate; String limitString = getLimitFromInputQuery(query); query = query.replace(limitString, ""); Iterator<Predicate> i = predicateList.listIterator(); while (i.hasNext()) { Predicate predicate = i.next(); query = SqlQueryUtils.addPredicate(query, predicate.getCondition()); } query = SqlQueryUtils.addPredicate(query, defaultPredicate); query = query + defaultSortOrder; log.info("QUERY: " + query); try { return constructGetCommand(this.sfConnector.getFullUri(getSoqlUrl(query))); } catch (Exception e) { throw new HighWatermarkException("Failed to get salesforce url for high watermark; error - " + e.getMessage(), e); } } @Override public long getHighWatermark(CommandOutput<?, ?> response, String watermarkColumn, String format) throws HighWatermarkException { log.info("Get high watermark from salesforce"); String output; Iterator<String> itr = (Iterator<String>) response.getResults().values().iterator(); if (itr.hasNext()) { output = itr.next(); } else { throw new HighWatermarkException("Failed to get high watermark from salesforce; REST response has no output"); } JsonElement element = GSON.fromJson(output, JsonObject.class); long high_ts; try { JsonObject jsonObject = element.getAsJsonObject(); JsonArray jsonArray = jsonObject.getAsJsonArray("records"); if (jsonArray.size() == 0) { return -1; } String value = jsonObject.getAsJsonArray("records").get(0).getAsJsonObject().get(watermarkColumn).getAsString(); if (format != null) { SimpleDateFormat inFormat = new SimpleDateFormat(format); Date date = null; try { date = inFormat.parse(value); } catch (ParseException e) { log.error("ParseException: " + e.getMessage(), e); } SimpleDateFormat outFormat = new SimpleDateFormat("yyyyMMddHHmmss"); high_ts = Long.parseLong(outFormat.format(date)); } else { high_ts = Long.parseLong(value); } } catch (Exception e) { throw new HighWatermarkException("Failed to get high watermark from salesforce; error - " + e.getMessage(), e); } return high_ts; } @Override public List<Command> getCountMetadata(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList) throws RecordCountException { log.debug("Build url to retrieve source record count"); String existingPredicate = ""; if (this.updatedQuery != null) { String queryLowerCase = this.updatedQuery.toLowerCase(); int startIndex = queryLowerCase.indexOf(" where "); if (startIndex > 0) { existingPredicate = this.updatedQuery.substring(startIndex); } } String query = "SELECT COUNT() FROM " + entity + existingPredicate; String limitString = getLimitFromInputQuery(query); query = query.replace(limitString, ""); try { if (isNullPredicate(predicateList)) { log.info("QUERY with null predicate: " + query); return constructGetCommand(this.sfConnector.getFullUri(getSoqlUrl(query))); } Iterator<Predicate> i = predicateList.listIterator(); while (i.hasNext()) { Predicate predicate = i.next(); query = SqlQueryUtils.addPredicate(query, predicate.getCondition()); } query = query + getLimitFromInputQuery(this.updatedQuery); log.info("QUERY: " + query); return constructGetCommand(this.sfConnector.getFullUri(getSoqlUrl(query))); } catch (Exception e) { throw new RecordCountException("Failed to get salesforce url for record count; error - " + e.getMessage(), e); } } @Override public long getCount(CommandOutput<?, ?> response) throws RecordCountException { log.info("Get source record count from salesforce"); String output; Iterator<String> itr = (Iterator<String>) response.getResults().values().iterator(); if (itr.hasNext()) { output = itr.next(); } else { throw new RecordCountException("Failed to get count from salesforce; REST response has no output"); } JsonElement element = GSON.fromJson(output, JsonObject.class); long count; try { JsonObject jsonObject = element.getAsJsonObject(); count = jsonObject.get("totalSize").getAsLong(); } catch (Exception e) { throw new RecordCountException("Failed to get record count from salesforce; error - " + e.getMessage(), e); } return count; } @Override public List<Command> getDataMetadata(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList) throws DataRecordException { log.debug("Build url to retrieve data records"); String query = this.updatedQuery; String url = null; try { if (this.getNextUrl() != null && this.pullStatus == true) { url = this.getNextUrl(); } else { if (isNullPredicate(predicateList)) { log.info("QUERY:" + query); return constructGetCommand(this.sfConnector.getFullUri(getSoqlUrl(query))); } String limitString = getLimitFromInputQuery(query); query = query.replace(limitString, ""); Iterator<Predicate> i = predicateList.listIterator(); while (i.hasNext()) { Predicate predicate = i.next(); query = SqlQueryUtils.addPredicate(query, predicate.getCondition()); } if (Boolean.valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_SPECIFIC_API_ACTIVE))) { query = SqlQueryUtils.addPredicate(query, "IsDeleted = true"); } query = query + limitString; log.info("QUERY: " + query); url = this.sfConnector.getFullUri(getSoqlUrl(query)); } return constructGetCommand(url); } catch (Exception e) { throw new DataRecordException("Failed to get salesforce url for data records; error - " + e.getMessage(), e); } } private static String getLimitFromInputQuery(String query) { String inputQuery = query.toLowerCase(); int limitIndex = inputQuery.indexOf(" limit"); if (limitIndex > 0) { return query.substring(limitIndex); } return ""; } @Override public Iterator<JsonElement> getData(CommandOutput<?, ?> response) throws DataRecordException { log.debug("Get data records from response"); String output; Iterator<String> itr = (Iterator<String>) response.getResults().values().iterator(); if (itr.hasNext()) { output = itr.next(); } else { throw new DataRecordException("Failed to get data from salesforce; REST response has no output"); } List<JsonElement> rs = Lists.newArrayList(); JsonElement element = GSON.fromJson(output, JsonObject.class); JsonArray partRecords; try { JsonObject jsonObject = element.getAsJsonObject(); partRecords = jsonObject.getAsJsonArray("records"); if (jsonObject.get("done").getAsBoolean()) { setPullStatus(false); } else { setNextUrl(this.sfConnector.getFullUri( jsonObject.get("nextRecordsUrl").getAsString().replaceAll(this.sfConnector.getServicesDataEnvPath(), ""))); } JsonArray array = Utils.removeElementFromJsonArray(partRecords, "attributes"); Iterator<JsonElement> li = array.iterator(); while (li.hasNext()) { JsonElement recordElement = li.next(); rs.add(recordElement); } return rs.iterator(); } catch (Exception e) { throw new DataRecordException("Failed to get records from salesforce; error - " + e.getMessage(), e); } } @Override public boolean getPullStatus() { return this.pullStatus; } @Override public String getNextUrl() { return this.nextUrl; } public static String getSoqlUrl(String soqlQuery) throws RestApiClientException { String path = SOQL_RESOURCE + "/"; NameValuePair pair = new BasicNameValuePair("q", soqlQuery); List<NameValuePair> qparams = new ArrayList<>(); qparams.add(pair); return buildUrl(path, qparams); } private static String buildUrl(String path, List<NameValuePair> qparams) throws RestApiClientException { URIBuilder builder = new URIBuilder(); builder.setPath(path); ListIterator<NameValuePair> i = qparams.listIterator(); while (i.hasNext()) { NameValuePair keyValue = i.next(); builder.setParameter(keyValue.getName(), keyValue.getValue()); } URI uri; try { uri = builder.build(); } catch (Exception e) { throw new RestApiClientException("Failed to build url; error - " + e.getMessage(), e); } return new HttpGet(uri).getURI().toString(); } private static boolean isNullPredicate(List<Predicate> predicateList) { if (predicateList == null || predicateList.size() == 0) { return true; } return false; } @Override public String getWatermarkSourceFormat(WatermarkType watermarkType) { switch (watermarkType) { case TIMESTAMP: return "yyyy-MM-dd'T'HH:mm:ss"; case DATE: return "yyyy-MM-dd"; default: return null; } } @Override public String getHourPredicateCondition(String column, long value, String valueFormat, String operator) { log.info("Getting hour predicate from salesforce"); String Formattedvalue = Utils.toDateTimeFormat(Long.toString(value), valueFormat, SALESFORCE_HOUR_FORMAT); return column + " " + operator + " " + Formattedvalue; } @Override public String getDatePredicateCondition(String column, long value, String valueFormat, String operator) { log.info("Getting date predicate from salesforce"); String Formattedvalue = Utils.toDateTimeFormat(Long.toString(value), valueFormat, SALESFORCE_DATE_FORMAT); return column + " " + operator + " " + Formattedvalue; } @Override public String getTimestampPredicateCondition(String column, long value, String valueFormat, String operator) { log.info("Getting timestamp predicate from salesforce"); String Formattedvalue = Utils.toDateTimeFormat(Long.toString(value), valueFormat, SALESFORCE_TIMESTAMP_FORMAT); return column + " " + operator + " " + Formattedvalue; } @Override public Map<String, String> getDataTypeMap() { Map<String, String> dataTypeMap = ImmutableMap.<String, String> builder().put("url", "string") .put("textarea", "string").put("reference", "string").put("phone", "string").put("masterrecord", "string") .put("location", "string").put("id", "string").put("encryptedstring", "string").put("email", "string") .put("DataCategoryGroupReference", "string").put("calculated", "string").put("anyType", "string") .put("address", "string").put("blob", "string").put("date", "date").put("datetime", "timestamp") .put("time", "time").put("object", "string").put("string", "string").put("int", "int").put("long", "long") .put("double", "double").put("percent", "double").put("currency", "double").put("decimal", "double") .put("boolean", "boolean").put("picklist", "string").put("multipicklist", "string").put("combobox", "string") .put("list", "string").put("set", "string").put("map", "string").put("enum", "string").build(); return dataTypeMap; } @Override public Iterator<JsonElement> getRecordSetFromSourceApi(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList) throws IOException { log.debug("Getting salesforce data using bulk api"); RecordSet<JsonElement> rs = null; try { //Get query result ids in the first run //result id is used to construct url while fetching data if (this.bulkApiInitialRun == true) { // set finish status to false before starting the bulk job this.setBulkJobFinished(false); this.bulkResultIdList = getQueryResultIds(entity, predicateList); log.info("Number of bulk api resultSet Ids:" + this.bulkResultIdList.size()); } // Get data from input stream // If bulk load is not finished, get data from the stream // Skip empty result sets since they will cause the extractor to terminate early while (!this.isBulkJobFinished() && (rs == null || rs.isEmpty())) { rs = getBulkData(); } // Set bulkApiInitialRun to false after the completion of first run this.bulkApiInitialRun = false; // If bulk job is finished, get soft deleted records using Rest API boolean isSoftDeletesPullDisabled = Boolean.valueOf(this.workUnit .getProp(SalesforceConfigurationKeys.SOURCE_QUERYBASED_SALESFORCE_IS_SOFT_DELETES_PULL_DISABLED)); if (rs == null || rs.isEmpty()) { // Get soft delete records only if IsDeleted column exists and soft deletes pull is not disabled if (this.columnList.contains("IsDeleted") && !isSoftDeletesPullDisabled) { return this.getSoftDeletedRecords(schema, entity, workUnit, predicateList); } log.info("Ignoring soft delete records"); } return rs.iterator(); } catch (Exception e) { throw new IOException("Failed to get records using bulk api; error - " + e.getMessage(), e); } } /** * Get soft deleted records using Rest Api * @return iterator with deleted records */ private Iterator<JsonElement> getSoftDeletedRecords(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList) throws DataRecordException { return this.getRecordSet(schema, entity, workUnit, predicateList); } /** * Login to salesforce * @return login status */ public boolean bulkApiLogin() throws Exception { log.info("Authenticating salesforce bulk api"); boolean success = false; String hostName = this.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_HOST_NAME); String apiVersion = this.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_VERSION); if (Strings.isNullOrEmpty(apiVersion)) { apiVersion = "29.0"; } String soapAuthEndPoint = hostName + SALESFORCE_SOAP_SERVICE + "/" + apiVersion; try { ConnectorConfig partnerConfig = new ConnectorConfig(); if (super.workUnitState.contains(ConfigurationKeys.SOURCE_CONN_USE_PROXY_URL) && !super.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_USE_PROXY_URL).isEmpty()) { partnerConfig.setProxy(super.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_USE_PROXY_URL), super.workUnitState.getPropAsInt(ConfigurationKeys.SOURCE_CONN_USE_PROXY_PORT)); } String accessToken = sfConnector.getAccessToken(); if (accessToken == null) { boolean isConnectSuccess = sfConnector.connect(); if (isConnectSuccess) { accessToken = sfConnector.getAccessToken(); } } if (accessToken != null) { String serviceEndpoint = sfConnector.getInstanceUrl() + SALESFORCE_SOAP_SERVICE + "/" + apiVersion; partnerConfig.setSessionId(accessToken); partnerConfig.setServiceEndpoint(serviceEndpoint); } else { String securityToken = this.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_SECURITY_TOKEN); String password = PasswordManager.getInstance(this.workUnitState) .readPassword(this.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_PASSWORD)); partnerConfig.setUsername(this.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_USERNAME)); partnerConfig.setPassword(password + securityToken); } partnerConfig.setAuthEndpoint(soapAuthEndPoint); new PartnerConnection(partnerConfig); String soapEndpoint = partnerConfig.getServiceEndpoint(); String restEndpoint = soapEndpoint.substring(0, soapEndpoint.indexOf("Soap/")) + "async/" + apiVersion; ConnectorConfig config = new ConnectorConfig(); config.setSessionId(partnerConfig.getSessionId()); config.setRestEndpoint(restEndpoint); config.setCompression(true); config.setTraceFile("traceLogs.txt"); config.setTraceMessage(false); config.setPrettyPrintXml(true); if (super.workUnitState.contains(ConfigurationKeys.SOURCE_CONN_USE_PROXY_URL) && !super.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_USE_PROXY_URL).isEmpty()) { config.setProxy(super.workUnitState.getProp(ConfigurationKeys.SOURCE_CONN_USE_PROXY_URL), super.workUnitState.getPropAsInt(ConfigurationKeys.SOURCE_CONN_USE_PROXY_PORT)); } this.bulkConnection = new BulkConnection(config); success = true; } catch (RuntimeException e) { throw new RuntimeException("Failed to connect to salesforce bulk api; error - " + e, e); } return success; } /** * Get Record set using salesforce specific API(Bulk API) * @param entity/tablename * @param predicateList of all predicate conditions * @return iterator with batch of records */ private List<BatchIdAndResultId> getQueryResultIds(String entity, List<Predicate> predicateList) throws Exception { if (!bulkApiLogin()) { throw new IllegalArgumentException("Invalid Login"); } try { boolean usingPkChunking = false; // Set bulk job attributes this.bulkJob.setObject(entity); this.bulkJob.setOperation(OperationEnum.query); this.bulkJob.setConcurrencyMode(ConcurrencyMode.Parallel); // use pk chunking if pk chunking is configured and the expected record count is larger than the pk chunking size if (this.pkChunking && getExpectedRecordCount() > this.pkChunkingSize) { log.info("Enabling pk chunking with size {}", this.pkChunkingSize); this.bulkConnection.addHeader("Sforce-Enable-PKChunking", "chunkSize=" + this.pkChunkingSize); usingPkChunking = true; } // Result type as CSV this.bulkJob.setContentType(ContentType.CSV); this.bulkJob = this.bulkConnection.createJob(this.bulkJob); this.bulkJob = this.bulkConnection.getJobStatus(this.bulkJob.getId()); // Construct query with the predicates String query = this.updatedQuery; if (!isNullPredicate(predicateList)) { String limitString = getLimitFromInputQuery(query); query = query.replace(limitString, ""); Iterator<Predicate> i = predicateList.listIterator(); while (i.hasNext()) { Predicate predicate = i.next(); query = SqlQueryUtils.addPredicate(query, predicate.getCondition()); } query = query + limitString; } log.info("QUERY:" + query); ByteArrayInputStream bout = new ByteArrayInputStream(query.getBytes(ConfigurationKeys.DEFAULT_CHARSET_ENCODING)); BatchInfo bulkBatchInfo = this.bulkConnection.createBatchFromStream(this.bulkJob, bout); long expectedSizePerBatch = usingPkChunking ? this.pkChunkingSize : this.getExpectedRecordCount(); int retryInterval = Math.min(MAX_RETRY_INTERVAL_SECS, 30 + (int) Math.ceil((float) expectedSizePerBatch / 10000) * 2); log.info("Salesforce bulk api retry interval in seconds:" + retryInterval); // Get batch info with complete resultset (info id - refers to the resultset id corresponding to entire resultset) bulkBatchInfo = this.bulkConnection.getBatchInfo(this.bulkJob.getId(), bulkBatchInfo.getId()); // wait for completion, failure, or formation of PK chunking batches while ((bulkBatchInfo.getState() != BatchStateEnum.Completed) && (bulkBatchInfo.getState() != BatchStateEnum.Failed) && (!usingPkChunking || bulkBatchInfo.getState() != BatchStateEnum.NotProcessed)) { Thread.sleep(retryInterval * 1000); bulkBatchInfo = this.bulkConnection.getBatchInfo(this.bulkJob.getId(), bulkBatchInfo.getId()); log.debug("Bulk Api Batch Info:" + bulkBatchInfo); log.info("Waiting for bulk resultSetIds"); } // Wait for pk chunking batches BatchInfoList batchInfoList = this.bulkConnection.getBatchInfoList(this.bulkJob.getId()); if (usingPkChunking && bulkBatchInfo.getState() == BatchStateEnum.NotProcessed) { bulkBatchInfo = waitForPkBatches(batchInfoList, retryInterval); } if (bulkBatchInfo.getState() == BatchStateEnum.Failed) { log.error("Bulk batch failed: " + bulkBatchInfo.toString()); throw new RuntimeException("Failed to get bulk batch info for jobId " + bulkBatchInfo.getJobId() + " error - " + bulkBatchInfo.getStateMessage()); } // Get resultset ids of all the batches from the batch info list List<BatchIdAndResultId> batchIdAndResultIdList = Lists.newArrayList(); for (BatchInfo bi : batchInfoList.getBatchInfo()) { QueryResultList list = this.bulkConnection.getQueryResultList(this.bulkJob.getId(), bi.getId()); for (String result : list.getResult()) { batchIdAndResultIdList.add(new BatchIdAndResultId(bi.getId(), result)); } } log.info("QueryResultList: " + batchIdAndResultIdList); return batchIdAndResultIdList; } catch (RuntimeException | AsyncApiException | InterruptedException e) { throw new RuntimeException( "Failed to get query result ids from salesforce using bulk api; error - " + e.getMessage(), e); } } /** * Get a buffered reader wrapping the query result stream for the result with the specified index * @param index index the {@link #bulkResultIdList} * @return a {@link BufferedReader} * @throws AsyncApiException */ private BufferedReader getBulkBufferedReader(int index) throws AsyncApiException { return new BufferedReader(new InputStreamReader( this.bulkConnection.getQueryResultStream(this.bulkJob.getId(), this.bulkResultIdList.get(index).getBatchId(), this.bulkResultIdList.get(index).getResultId()), ConfigurationKeys.DEFAULT_CHARSET_ENCODING)); } /** * Fetch records into a {@link RecordSetList} up to the configured batch size {@link #batchSize}. This batch is not * the entire Salesforce result batch. It is an internal batch in the extractor for buffering a subset of the result * stream that comes from a Salesforce batch for more efficient processing. * @param rs the record set to fetch into * @param initialRecordCount Initial record count to use. This should correspond to the number of records already in rs. * This is used to limit the number of records returned in rs to {@link #batchSize}. * @throws DataRecordException * @throws IOException */ private void fetchResultBatch(RecordSetList<JsonElement> rs, int initialRecordCount) throws DataRecordException, IOException { int recordCount = initialRecordCount; // Stream the resultset through CSV reader to identify columns in each record InputStreamCSVReader reader = new InputStreamCSVReader(this.bulkBufferedReader); // Get header if it is first run of a new resultset if (this.isNewBulkResultSet()) { this.bulkRecordHeader = reader.nextRecord(); this.bulkResultColumCount = this.bulkRecordHeader.size(); this.setNewBulkResultSet(false); } // Get record from CSV reader stream while ((this.csvRecord = reader.nextRecord()) != null) { // Convert CSV record to JsonObject JsonObject jsonObject = Utils.csvToJsonObject(this.bulkRecordHeader, this.csvRecord, this.bulkResultColumCount); rs.add(jsonObject); recordCount++; this.bulkRecordCount++; // Insert records in record set until it reaches the batch size if (recordCount >= batchSize) { log.info("Total number of records processed so far: " + this.bulkRecordCount); break; } } } /** * Reinitialize the state of {@link #bulkBufferedReader} to handle network disconnects * @throws IOException * @throws AsyncApiException */ private void reinitializeBufferedReader() throws IOException, AsyncApiException { // close reader and get a new input stream to reconnect to resolve intermittent network errors this.bulkBufferedReader.close(); this.bulkBufferedReader = getBulkBufferedReader(this.bulkResultIdCount - 1); // if the result set is partially processed then we need to skip over processed records if (!isNewBulkResultSet()) { List<String> lastCsvRecord = null; InputStreamCSVReader reader = new InputStreamCSVReader(this.bulkBufferedReader); // skip header reader.nextRecord(); int recordsToSkip = this.bulkRecordCount - this.prevBulkRecordCount; log.info("Skipping {} records on retry: ", recordsToSkip); for (int i = 0; i < recordsToSkip; i++) { lastCsvRecord = reader.nextRecord(); } // make sure the last record processed before the error was the last record skipped so that the next // unprocessed record is processed in the next call to fetchResultBatch() if (recordsToSkip > 0) { if (!this.csvRecord.equals(lastCsvRecord)) { throw new RuntimeException("Repositioning after reconnecting did not point to the expected record"); } } } } /** * Fetch a result batch with retry for network errors * @param rs the {@link RecordSetList} to fetch into */ private void fetchResultBatchWithRetry(RecordSetList<JsonElement> rs) throws AsyncApiException, DataRecordException, IOException { boolean success = false; int retryCount = 0; int recordCountBeforeFetch = this.bulkRecordCount; do { try { // reinitialize the reader to establish a new connection to handle transient network errors if (retryCount > 0) { reinitializeBufferedReader(); } // on retries there may already be records in rs, so pass the number of records as the initial count fetchResultBatch(rs, this.bulkRecordCount - recordCountBeforeFetch); success = true; } catch (IOException e) { if (retryCount < this.fetchRetryLimit) { log.info("Exception while fetching data, retrying: " + e.getMessage(), e); retryCount++; } else { log.error("Exception while fetching data: " + e.getMessage(), e); throw e; } } } while (!success); } /** * Get data from the bulk api input stream * @return record set with each record as a JsonObject */ private RecordSet<JsonElement> getBulkData() throws DataRecordException { log.debug("Processing bulk api batch..."); RecordSetList<JsonElement> rs = new RecordSetList<>(); try { // if Buffer is empty then get stream for the new resultset id if (this.bulkBufferedReader == null || !this.bulkBufferedReader.ready()) { // log the number of records from each result set after it is processed (bulkResultIdCount > 0) if (this.bulkResultIdCount > 0) { log.info("Result set {} had {} records", this.bulkResultIdCount, this.bulkRecordCount - this.prevBulkRecordCount); } // if there is unprocessed resultset id then get result stream for that id if (this.bulkResultIdCount < this.bulkResultIdList.size()) { log.info("Stream resultset for resultId:" + this.bulkResultIdList.get(this.bulkResultIdCount)); this.setNewBulkResultSet(true); if (this.bulkBufferedReader != null) { this.bulkBufferedReader.close(); } this.bulkBufferedReader = getBulkBufferedReader(this.bulkResultIdCount); this.bulkResultIdCount++; this.prevBulkRecordCount = bulkRecordCount; } else { // if result stream processed for all resultset ids then finish the bulk job log.info("Bulk job is finished"); this.setBulkJobFinished(true); return rs; } } // fetch a batch of results with retry for network errors fetchResultBatchWithRetry(rs); } catch (Exception e) { throw new DataRecordException("Failed to get records from salesforce; error - " + e.getMessage(), e); } return rs; } @Override public void closeConnection() throws Exception { if (this.bulkConnection != null && !this.bulkConnection.getJobStatus(this.bulkJob.getId()).getState().toString().equals("Closed")) { log.info("Closing salesforce bulk job connection"); this.bulkConnection.closeJob(this.bulkJob.getId()); } } public static List<Command> constructGetCommand(String restQuery) { return Arrays.asList(new RestApiCommand().build(Arrays.asList(restQuery), RestApiCommandType.GET)); } /** * Waits for the PK batches to complete. The wait will stop after all batches are complete or on the first failed batch * @param batchInfoList list of batch info * @param retryInterval the polling interval * @return the last {@link BatchInfo} processed * @throws InterruptedException * @throws AsyncApiException */ private BatchInfo waitForPkBatches(BatchInfoList batchInfoList, int retryInterval) throws InterruptedException, AsyncApiException { BatchInfo batchInfo = null; BatchInfo[] batchInfos = batchInfoList.getBatchInfo(); // Wait for all batches other than the first one. The first one is not processed in PK chunking mode for (int i = 1; i < batchInfos.length; i++) { BatchInfo bi = batchInfos[i]; // get refreshed job status bi = this.bulkConnection.getBatchInfo(this.bulkJob.getId(), bi.getId()); while ((bi.getState() != BatchStateEnum.Completed) && (bi.getState() != BatchStateEnum.Failed)) { Thread.sleep(retryInterval * 1000); bi = this.bulkConnection.getBatchInfo(this.bulkJob.getId(), bi.getId()); log.debug("Bulk Api Batch Info:" + bi); log.info("Waiting for bulk resultSetIds"); } batchInfo = bi; // exit if there was a failure if (batchInfo.getState() == BatchStateEnum.Failed) { break; } } return batchInfo; } @Data private static class BatchIdAndResultId { private final String batchId; private final String resultId; } }
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.gateway.engine.policies.auth; import io.apiman.gateway.engine.async.AsyncResultImpl; import io.apiman.gateway.engine.async.IAsyncResult; import io.apiman.gateway.engine.async.IAsyncResultHandler; import io.apiman.gateway.engine.beans.ApiRequest; import io.apiman.gateway.engine.components.IJdbcComponent; import io.apiman.gateway.engine.components.jdbc.IJdbcClient; import io.apiman.gateway.engine.components.jdbc.IJdbcConnection; import io.apiman.gateway.engine.components.jdbc.IJdbcResultSet; import io.apiman.gateway.engine.components.jdbc.JdbcOptionsBean; import io.apiman.gateway.engine.policies.AuthorizationPolicy; import io.apiman.gateway.engine.policies.config.basicauth.JDBCIdentitySource; import io.apiman.gateway.engine.policies.config.basicauth.JDBCType; import io.apiman.gateway.engine.policy.IPolicyContext; import java.util.HashSet; import java.util.Set; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.sql.DataSource; import org.apache.commons.codec.digest.DigestUtils; /** * An identity validator that uses the static information in the config * to validate the user. * * @author eric.wittmann@redhat.com */ public class JDBCIdentityValidator implements IIdentityValidator<JDBCIdentitySource> { /** * Constructor. */ public JDBCIdentityValidator() { } /** * @see io.apiman.gateway.engine.policies.auth.IIdentityValidator#validate(String, String, ApiRequest, IPolicyContext, Object, IAsyncResultHandler) */ @Override public void validate(final String username, final String password, final ApiRequest request, final IPolicyContext context, final JDBCIdentitySource config, final IAsyncResultHandler<Boolean> handler) { String sqlPwd = password; switch (config.getHashAlgorithm()) { case MD5: sqlPwd = DigestUtils.md5Hex(password); break; case SHA1: sqlPwd = DigestUtils.sha1Hex(password); break; case SHA256: sqlPwd = DigestUtils.sha256Hex(password); break; case SHA384: sqlPwd = DigestUtils.sha384Hex(password); break; case SHA512: sqlPwd = DigestUtils.sha512Hex(password); break; case None: default: break; } final String query = config.getQuery(); final String queryUsername = username; final String queryPassword = sqlPwd; IJdbcClient client; try { client = createClient(context, config); } catch (Throwable e) { handler.handle(AsyncResultImpl.create(e, Boolean.class)); return; } client.connect(new IAsyncResultHandler<IJdbcConnection>() { @Override public void handle(IAsyncResult<IJdbcConnection> result) { if (result.isError()) { handler.handle(AsyncResultImpl.create(result.getError(), Boolean.class)); } else { validate(result.getResult(), query, queryUsername, queryPassword, context, config, handler); } } }); } /** * Creates the appropriate jdbc client. * @param context * @param config */ private IJdbcClient createClient(IPolicyContext context, JDBCIdentitySource config) throws Throwable { IJdbcComponent jdbcComponent = context.getComponent(IJdbcComponent.class); if (config.getType() == JDBCType.datasource || config.getType() == null) { DataSource ds = lookupDatasource(config); return jdbcComponent.create(ds); } if (config.getType() == JDBCType.url) { JdbcOptionsBean options = new JdbcOptionsBean(); options.setJdbcUrl(config.getJdbcUrl()); options.setUsername(config.getUsername()); options.setPassword(config.getPassword()); options.setAutoCommit(true); return jdbcComponent.createStandalone(options); } throw new Exception("Unknown JDBC options."); //$NON-NLS-1$ } /** * @param connection * @param query * @param username * @param context * @param password * @param config * @param handler */ protected void validate(final IJdbcConnection connection, final String query, final String username, final String password, final IPolicyContext context, final JDBCIdentitySource config, final IAsyncResultHandler<Boolean> handler) { IAsyncResultHandler<IJdbcResultSet> queryHandler = new IAsyncResultHandler<IJdbcResultSet>() { @Override public void handle(IAsyncResult<IJdbcResultSet> result) { if (result.isError()) { closeQuietly(connection); handler.handle(AsyncResultImpl.create(result.getError(), Boolean.class)); } else { boolean validated = false; IJdbcResultSet resultSet = result.getResult(); if (resultSet.next()) { validated = true; } resultSet.close(); if (validated && config.isExtractRoles()) { extractRoles(connection, username, context, config, handler); } else { closeQuietly(connection); handler.handle(AsyncResultImpl.create(validated)); } } } }; connection.query(queryHandler, query, username, password); } /** * @param connection * @param username * @param context * @param config * @param handler */ protected void extractRoles(final IJdbcConnection connection, final String username, final IPolicyContext context, final JDBCIdentitySource config, final IAsyncResultHandler<Boolean> handler) { String roleQuery = config.getRoleQuery(); IAsyncResultHandler<IJdbcResultSet> roleHandler = new IAsyncResultHandler<IJdbcResultSet>() { @Override public void handle(IAsyncResult<IJdbcResultSet> result) { if (result.isError()) { closeQuietly(connection); handler.handle(AsyncResultImpl.create(result.getError(), Boolean.class)); } else { Set<String> extractedRoles = new HashSet<>(); IJdbcResultSet resultSet = result.getResult(); while (resultSet.next()) { String roleName = resultSet.getString(1); extractedRoles.add(roleName); } context.setAttribute(AuthorizationPolicy.AUTHENTICATED_USER_ROLES, extractedRoles); closeQuietly(connection); handler.handle(AsyncResultImpl.create(true)); } } }; connection.query(roleHandler, roleQuery, username); } /** * @param connection */ protected void closeQuietly(IJdbcConnection connection) { try { connection.close(); } catch (Exception e) { // TODO log this error } } /** * Lookup the datasource from JNDI. * @param config */ private DataSource lookupDatasource(JDBCIdentitySource config) { DataSource ds; try { InitialContext ctx = new InitialContext(); ds = lookupDS(ctx, config.getDatasourcePath()); if (ds == null) { ds = lookupDS(ctx, "java:comp/env/" + config.getDatasourcePath()); //$NON-NLS-1$ } } catch (Exception e) { throw new RuntimeException(e); } if (ds == null) { throw new RuntimeException("Datasource not found: " + config.getDatasourcePath()); //$NON-NLS-1$ } return ds; } /** * Lookup the datasource from JNDI. * @param ctx * @param path */ private DataSource lookupDS(InitialContext ctx, String path) { try { return (DataSource) ctx.lookup(path); } catch (NamingException e) { return null; } } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.progress.impl; import com.intellij.codeWithMe.ClientId; import com.intellij.diagnostic.ThreadDumper; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ex.ApplicationEx; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.application.ex.ApplicationUtil; import com.intellij.openapi.diagnostic.Attachment; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.ThrowableComputable; import com.intellij.openapi.wm.ex.ProgressIndicatorEx; import com.intellij.util.ExceptionUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.SystemProperties; import com.intellij.util.concurrency.AppExecutorUtil; import com.intellij.util.containers.ConcurrentLongObjectMap; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.io.StringWriter; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.LockSupport; public class CoreProgressManager extends ProgressManager implements Disposable { private static final Logger LOG = Logger.getInstance(CoreProgressManager.class); static final int CHECK_CANCELED_DELAY_MILLIS = 10; private final AtomicInteger myUnsafeProgressCount = new AtomicInteger(0); public static final boolean ENABLED = !"disabled".equals(System.getProperty("idea.ProcessCanceledException")); private static CheckCanceledHook ourCheckCanceledHook; private ScheduledFuture<?> myCheckCancelledFuture; // guarded by threadsUnderIndicator // indicator -> threads which are running under this indicator. // THashMap is avoided here because of tombstones overhead private static final Map<ProgressIndicator, Set<Thread>> threadsUnderIndicator = new HashMap<>(); // guarded by threadsUnderIndicator // the active indicator for the thread id private static final ConcurrentLongObjectMap<ProgressIndicator> currentIndicators = ContainerUtil.createConcurrentLongObjectMap(); // top-level indicators for the thread id private static final ConcurrentLongObjectMap<ProgressIndicator> threadTopLevelIndicators = ContainerUtil.createConcurrentLongObjectMap(); // threads which are running under canceled indicator // THashSet is avoided here because of possible tombstones overhead static final Set<Thread> threadsUnderCanceledIndicator = new HashSet<>(); // guarded by threadsUnderIndicator @NotNull private static volatile CheckCanceledBehavior ourCheckCanceledBehavior = CheckCanceledBehavior.NONE; private enum CheckCanceledBehavior {NONE, ONLY_HOOKS, INDICATOR_PLUS_HOOKS} /** * active (i.e. which have {@link #executeProcessUnderProgress(Runnable, ProgressIndicator)} method running) indicators * which are not inherited from {@link StandardProgressIndicator}. * for them an extra processing thread (see {@link #myCheckCancelledFuture}) has to be run * to call their non-standard {@link ProgressIndicator#checkCanceled()} method periodically. * Poor-man Multiset here (instead of a set) is for simplifying add/remove indicators on process-with-progress start/end with possibly identical indicators. * ProgressIndicator -> count of this indicator occurrences in this multiset. */ private static final Map<ProgressIndicator, AtomicInteger> nonStandardIndicators = new ConcurrentHashMap<>(); /** * true if running in non-cancelable section started with * {@link #executeNonCancelableSection(Runnable)} in this thread */ private static final ThreadLocal<Boolean> isInNonCancelableSection = new ThreadLocal<>(); // do not supply initial value to conserve memory // must be under threadsUnderIndicator lock private void startBackgroundNonStandardIndicatorsPing() { if (myCheckCancelledFuture != null) { return; } myCheckCancelledFuture = AppExecutorUtil.getAppScheduledExecutorService().scheduleWithFixedDelay(() -> { for (ProgressIndicator indicator : nonStandardIndicators.keySet()) { try { indicator.checkCanceled(); } catch (ProcessCanceledException e) { indicatorCanceled(indicator); } } }, 0, CHECK_CANCELED_DELAY_MILLIS, TimeUnit.MILLISECONDS); } // must be under threadsUnderIndicator lock private void stopBackgroundNonStandardIndicatorsPing() { if (myCheckCancelledFuture != null) { myCheckCancelledFuture.cancel(true); myCheckCancelledFuture = null; } } @Override public void dispose() { synchronized (threadsUnderIndicator) { stopBackgroundNonStandardIndicatorsPing(); } } @NotNull @ApiStatus.Internal public static List<ProgressIndicator> getCurrentIndicators() { synchronized (threadsUnderIndicator) { return new ArrayList<>(threadsUnderIndicator.keySet()); } } @ApiStatus.Internal public static boolean runCheckCanceledHooks(@Nullable ProgressIndicator indicator) { CheckCanceledHook hook = ourCheckCanceledHook; return hook != null && hook.runHook(indicator); } @Override protected void doCheckCanceled() throws ProcessCanceledException { CheckCanceledBehavior behavior = ourCheckCanceledBehavior; if (behavior == CheckCanceledBehavior.NONE) return; if (behavior == CheckCanceledBehavior.INDICATOR_PLUS_HOOKS) { ProgressIndicator progress = getProgressIndicator(); if (progress != null) { progress.checkCanceled(); } } else { runCheckCanceledHooks(null); } } @Override public boolean hasProgressIndicator() { return getProgressIndicator() != null; } @Override public boolean hasUnsafeProgressIndicator() { return myUnsafeProgressCount.get() > 0; } @Override public boolean hasModalProgressIndicator() { synchronized (threadsUnderIndicator) { return ContainerUtil.or(threadsUnderIndicator.keySet(), i -> i.isModal()); } } // run in current thread @Override public void runProcess(@NotNull Runnable process, @Nullable ProgressIndicator progress) { if (progress != null) { assertNoOtherThreadUnder(progress); } executeProcessUnderProgress(() -> { try { try { if (progress != null && !progress.isRunning()) { progress.start(); } } catch (RuntimeException e) { throw e; } catch (Throwable e) { throw new RuntimeException(e); } process.run(); } finally { if (progress != null && progress.isRunning()) { progress.stop(); if (progress instanceof ProgressIndicatorEx) { ((ProgressIndicatorEx)progress).processFinish(); } } } }, progress); } private static void assertNoOtherThreadUnder(@NotNull ProgressIndicator progress) { synchronized (threadsUnderIndicator) { Collection<Thread> threads = threadsUnderIndicator.get(progress); Thread other = threads == null || threads.isEmpty() ? null : threads.iterator().next(); if (other != null) { if (other == Thread.currentThread()) { LOG.error("This thread is already running under this indicator, starting/stopping it here might be a data race"); } else { StringWriter dump = new StringWriter(); ThreadDumper.dumpCallStack(other, dump, other.getStackTrace()); LOG.error("Other thread is already running under this indicator, starting/stopping it here might be a data race. Its thread dump:\n" + dump); } } } } // run in the current thread (?) @Override public void executeNonCancelableSection(@NotNull Runnable runnable) { computeInNonCancelableSection(() -> { runnable.run(); return null; }); } // FROM EDT: bg OR calling if can't @Override public <T, E extends Exception> T computeInNonCancelableSection(@NotNull ThrowableComputable<T, E> computable) throws E { try { if (isInNonCancelableSection()) { return computable.compute(); } else { try { isInNonCancelableSection.set(Boolean.TRUE); return computeUnderProgress(computable, NonCancelableIndicator.INSTANCE); } finally { isInNonCancelableSection.remove(); } } } catch (ProcessCanceledException e) { throw new RuntimeException("PCE is not expected in non-cancellable section execution", e); } } @Override public boolean runProcessWithProgressSynchronously(@NotNull Runnable process, @NotNull @NlsContexts.ProgressTitle String progressTitle, boolean canBeCanceled, @Nullable Project project) { return runProcessWithProgressSynchronously(process, progressTitle, canBeCanceled, project, null); } @Override public <T, E extends Exception> T runProcessWithProgressSynchronously(@NotNull ThrowableComputable<T, E> process, @NotNull String progressTitle, boolean canBeCanceled, @Nullable Project project) throws E { AtomicReference<T> result = new AtomicReference<>(); AtomicReference<Throwable> exception = new AtomicReference<>(); runProcessWithProgressSynchronously(new Task.Modal(project, progressTitle, canBeCanceled) { @Override public void run(@NotNull ProgressIndicator indicator) { try { T compute = process.compute(); result.set(compute); } catch (Throwable t) { exception.set(t); } } }); Throwable t = exception.get(); if (t != null) { ExceptionUtil.rethrowUnchecked(t); @SuppressWarnings("unchecked") E e = (E)t; throw e; } return result.get(); } // FROM EDT: bg OR calling if can't @Override public boolean runProcessWithProgressSynchronously(@NotNull Runnable process, @NotNull @NlsContexts.DialogTitle String progressTitle, boolean canBeCanceled, @Nullable Project project, @Nullable JComponent parentComponent) { Task.Modal task = new Task.Modal(project, parentComponent, progressTitle, canBeCanceled) { @Override public void run(@NotNull ProgressIndicator indicator) { process.run(); } }; return runProcessWithProgressSynchronously(task); } // bg; runnables on UI/EDT? @Override public void runProcessWithProgressAsynchronously(@NotNull Project project, @NotNull @NlsContexts.ProgressTitle String progressTitle, @NotNull Runnable process, @Nullable Runnable successRunnable, @Nullable Runnable canceledRunnable) { runProcessWithProgressAsynchronously(project, progressTitle, process, successRunnable, canceledRunnable, PerformInBackgroundOption.DEAF); } // bg; runnables on UI/EDT? @Override public void runProcessWithProgressAsynchronously(@NotNull Project project, @NotNull @NlsContexts.ProgressTitle String progressTitle, @NotNull Runnable process, @Nullable Runnable successRunnable, @Nullable Runnable canceledRunnable, @NotNull PerformInBackgroundOption option) { runProcessWithProgressAsynchronously(new Task.Backgroundable(project, progressTitle, true, option) { @Override public void run(@NotNull ProgressIndicator indicator) { process.run(); } @Override public void onCancel() { if (canceledRunnable != null) { canceledRunnable.run(); } } @Override public void onSuccess() { if (successRunnable != null) { successRunnable.run(); } } }); } /** * Different places in IntelliJ codebase behaves differently in case of headless mode. * <p> * Often, they're trying to make async parts synchronous to make it more predictable or controllable. * E.g. in tests or IntelliJ-based command line tools this is the usual code: * <p> * ``` * if (ApplicationManager.getApplication().isHeadless()) { * performSyncChange() * } * else { * scheduleAsyncChange() * } * ``` * <p> * However, sometimes headless application should behave just as regular GUI Application, * with all its asynchronous stuff. For that, the application must declare `intellij.progress.task.ignoreHeadless` * system property. And clients should modify its pure `isHeadless` condition to something like * <p> * ``` * ApplicationManager.getApplication().isHeadless() && !shouldRunHeadlessTasksAsynchronously() * ``` * * @return true is asynchronous tasks must remain asynchronous even in headless mode */ @ApiStatus.Internal public static boolean shouldKeepTasksAsynchronousInHeadlessMode() { return SystemProperties.getBooleanProperty("intellij.progress.task.ignoreHeadless", false); } // from any: bg or current if can't @Override public void run(@NotNull Task task) { if (task.isHeadless() && !shouldKeepTasksAsynchronousInHeadlessMode()) { if (SwingUtilities.isEventDispatchThread()) { runProcessWithProgressSynchronously(task); } else { runProcessWithProgressInCurrentThread(task, new EmptyProgressIndicator(), ModalityState.defaultModalityState()); } } else if (task.isModal()) { runProcessWithProgressSynchronously(task.asModal()); } else { Task.Backgroundable backgroundable = task.asBackgroundable(); if (backgroundable.isConditionalModal() && !backgroundable.shouldStartInBackground()) { runProcessWithProgressSynchronously(backgroundable); } else { runAsynchronously(backgroundable); } } } // from any: bg private void runAsynchronously(@NotNull Task.Backgroundable task) { if (ApplicationManager.getApplication().isDispatchThread()) { runProcessWithProgressAsynchronously(task); } else { ApplicationManager.getApplication().invokeLater(() -> { Project project = task.getProject(); if (project != null && project.isDisposed()) { LOG.info("Task canceled because of project disposal: " + task); finishTask(task, true, null); return; } runProcessWithProgressAsynchronously(task); }, ModalityState.defaultModalityState()); } } // from any: bg @NotNull public Future<?> runProcessWithProgressAsynchronously(@NotNull Task.Backgroundable task) { return runProcessWithProgressAsynchronously(task, new EmptyProgressIndicator(), null); } // from any: bg @NotNull public Future<?> runProcessWithProgressAsynchronously(@NotNull Task.Backgroundable task, @NotNull ProgressIndicator progressIndicator, @Nullable Runnable continuation) { return runProcessWithProgressAsynchronously(task, progressIndicator, continuation, progressIndicator.getModalityState()); } @Deprecated protected void startTask(@NotNull Task task, @NotNull ProgressIndicator indicator, @Nullable Runnable continuation) { try { task.run(indicator); } finally { try { if (indicator instanceof ProgressIndicatorEx) { ((ProgressIndicatorEx)indicator).finish(task); } } finally { if (continuation != null) { continuation.run(); } } } } private static class IndicatorDisposable implements Disposable { @NotNull private final ProgressIndicator myIndicator; IndicatorDisposable(@NotNull ProgressIndicator indicator) { myIndicator = indicator; } @Override public void dispose() { // do nothing if already disposed Disposer.dispose((Disposable)myIndicator, false); } } // from any: bg, task.finish on "UI/EDT" @NotNull public Future<?> runProcessWithProgressAsynchronously(@NotNull Task.Backgroundable task, @NotNull ProgressIndicator progressIndicator, @Nullable Runnable continuation, @NotNull ModalityState modalityState) { IndicatorDisposable indicatorDisposable; if (progressIndicator instanceof Disposable) { // use IndicatorDisposable instead of progressIndicator to // avoid re-registering progressIndicator if it was registered on some other parent before indicatorDisposable = new IndicatorDisposable(progressIndicator); Disposer.register(ApplicationManager.getApplication(), indicatorDisposable); } else { indicatorDisposable = null; } return runProcessWithProgressAsync(task, CompletableFuture.completedFuture(progressIndicator), continuation, indicatorDisposable, modalityState); } @NotNull protected Future<?> runProcessWithProgressAsync(@NotNull Task.Backgroundable task, @NotNull CompletableFuture<? extends @NotNull ProgressIndicator> progressIndicator, @Nullable Runnable continuation, @Nullable IndicatorDisposable indicatorDisposable, @Nullable ModalityState modalityState) { AtomicLong elapsed = new AtomicLong(); return new ProgressRunner<>(progress -> { long start = System.currentTimeMillis(); try { startTask(task, progress, continuation); } finally { elapsed.set(System.currentTimeMillis() - start); } return null; }).onThread(ProgressRunner.ThreadToUse.POOLED) .withProgress(progressIndicator) .submit() .whenComplete(ClientId.decorateBiConsumer((result, err) -> { if (!result.isCanceled()) { notifyTaskFinished(task, elapsed.get()); } ModalityState modality; if (modalityState != null) { modality = modalityState; } else { try { modality = progressIndicator.get().getModalityState(); } catch (Throwable e) { modality = ModalityState.NON_MODAL; } } ApplicationUtil.invokeLaterSomewhere(task.whereToRunCallbacks(), modality, () -> { finishTask(task, result.isCanceled(), result.getThrowable() instanceof ProcessCanceledException ? null : result.getThrowable()); if (indicatorDisposable != null) { Disposer.dispose(indicatorDisposable); } }); })); } void notifyTaskFinished(@NotNull Task.Backgroundable task, long elapsed) { } // ASSERT IS EDT->UI bg or calling if cant // NEW: no assert; bg or calling ... protected boolean runProcessWithProgressSynchronously(@NotNull Task task) { Ref<Throwable> exceptionRef = new Ref<>(); Runnable taskContainer = () -> { try { startTask(task, getProgressIndicator(), null); } catch (ProcessCanceledException e) { throw e; } catch (Throwable e) { exceptionRef.set(e); } }; ApplicationEx application = ApplicationManagerEx.getApplicationEx(); boolean result = application.runProcessWithProgressSynchronously(taskContainer, task.getTitle(), task.isCancellable(), task.isModal(), task.getProject(), task.getParentComponent(), task.getCancelText()); ApplicationUtil.invokeAndWaitSomewhere(task.whereToRunCallbacks(), application.getDefaultModalityState(), () -> finishTask(task, !result, exceptionRef.get())); return result; } public void runProcessWithProgressInCurrentThread(@NotNull Task task, @NotNull ProgressIndicator progressIndicator, @NotNull ModalityState modalityState) { if (progressIndicator instanceof Disposable) { Disposer.register(ApplicationManager.getApplication(), (Disposable)progressIndicator); } boolean processCanceled = false; Throwable exception = null; try { runProcess(() -> startTask(task, progressIndicator, null), progressIndicator); } catch (ProcessCanceledException e) { processCanceled = true; } catch (Throwable e) { exception = e; } boolean finalCanceled = processCanceled || progressIndicator.isCanceled(); Throwable finalException = exception; ApplicationUtil.invokeAndWaitSomewhere(task.whereToRunCallbacks(), modalityState, () -> finishTask(task, finalCanceled, finalException)); } protected void finishTask(@NotNull Task task, boolean canceled, @Nullable Throwable error) { try { if (error != null) { task.onThrowable(error); } else if (canceled) { task.onCancel(); } else { task.onSuccess(); } } finally { task.onFinished(); } } // bg @Override public void runProcessWithProgressAsynchronously(@NotNull Task.Backgroundable task, @NotNull ProgressIndicator progressIndicator) { runProcessWithProgressAsynchronously(task, progressIndicator, null); } @Override public ProgressIndicator getProgressIndicator() { return getCurrentIndicator(Thread.currentThread()); } // run in current thread @Override public void executeProcessUnderProgress(@NotNull Runnable process, ProgressIndicator progress) throws ProcessCanceledException { computeUnderProgress(() -> { process.run(); return null; }, progress); } @Override public boolean runInReadActionWithWriteActionPriority(@NotNull Runnable action, @Nullable ProgressIndicator indicator) { ApplicationManager.getApplication().runReadAction(action); return true; } private <V, E extends Throwable> V computeUnderProgress(@NotNull ThrowableComputable<V, E> process, ProgressIndicator progress) throws E { if (progress == null) myUnsafeProgressCount.incrementAndGet(); try { ProgressIndicator oldIndicator = null; boolean set = progress != null && progress != (oldIndicator = getProgressIndicator()); if (set) { Thread currentThread = Thread.currentThread(); long threadId = currentThread.getId(); setCurrentIndicator(threadId, progress); try { return registerIndicatorAndRun(progress, currentThread, oldIndicator, process); } finally { setCurrentIndicator(threadId, oldIndicator); } } else { return process.compute(); } } finally { if (progress == null) myUnsafeProgressCount.decrementAndGet(); } } // this thread private <V, E extends Throwable> V registerIndicatorAndRun(@NotNull ProgressIndicator indicator, @NotNull Thread currentThread, ProgressIndicator oldIndicator, @NotNull ThrowableComputable<V, E> process) throws E { List<Set<Thread>> threadsUnderThisIndicator = new ArrayList<>(); synchronized (threadsUnderIndicator) { boolean oneOfTheIndicatorsIsCanceled = false; for (ProgressIndicator thisIndicator = indicator; thisIndicator != null; thisIndicator = thisIndicator instanceof WrappedProgressIndicator ? ((WrappedProgressIndicator)thisIndicator).getOriginalProgressIndicator() : null) { Set<Thread> underIndicator = threadsUnderIndicator.computeIfAbsent(thisIndicator, __ -> new HashSet<>()); boolean alreadyUnder = !underIndicator.add(currentThread); threadsUnderThisIndicator.add(alreadyUnder ? null : underIndicator); boolean isStandard = thisIndicator instanceof StandardProgressIndicator; if (!isStandard) { nonStandardIndicators.compute(thisIndicator, (__, count) -> { if (count == null) { return new AtomicInteger(1); } count.incrementAndGet(); return count; }); startBackgroundNonStandardIndicatorsPing(); } oneOfTheIndicatorsIsCanceled |= thisIndicator.isCanceled(); } updateThreadUnderCanceledIndicator(currentThread, oneOfTheIndicatorsIsCanceled); } try { return process.compute(); } finally { synchronized (threadsUnderIndicator) { ProgressIndicator thisIndicator = null; // order doesn't matter for (int i = 0; i < threadsUnderThisIndicator.size(); i++) { thisIndicator = i == 0 ? indicator : ((WrappedProgressIndicator)thisIndicator).getOriginalProgressIndicator(); Set<Thread> underIndicator = threadsUnderThisIndicator.get(i); boolean removed = underIndicator != null && underIndicator.remove(currentThread); if (removed && underIndicator.isEmpty()) { threadsUnderIndicator.remove(thisIndicator); } boolean isStandard = thisIndicator instanceof StandardProgressIndicator; if (!isStandard) { AtomicInteger newCount = nonStandardIndicators.compute(thisIndicator, (__, count) -> { if (count.decrementAndGet() == 0) { return null; } return count; }); if (newCount == null) { stopBackgroundNonStandardIndicatorsPing(); } } // by this time oldIndicator may have been canceled } updateThreadUnderCanceledIndicator(currentThread, oldIndicator != null && oldIndicator.isCanceled()); } } } private void updateThreadUnderCanceledIndicator(@NotNull Thread thread, boolean underCanceledIndicator) { boolean changed = underCanceledIndicator ? threadsUnderCanceledIndicator.add(thread) : threadsUnderCanceledIndicator.remove(thread); if (changed) { updateShouldCheckCanceled(); } } final void updateShouldCheckCanceled() { synchronized (threadsUnderIndicator) { CheckCanceledHook hook = createCheckCanceledHook(); boolean hasCanceledIndicator = !threadsUnderCanceledIndicator.isEmpty(); ourCheckCanceledHook = hook; ourCheckCanceledBehavior = hook == null && !hasCanceledIndicator ? CheckCanceledBehavior.NONE : hasCanceledIndicator && ENABLED ? CheckCanceledBehavior.INDICATOR_PLUS_HOOKS : CheckCanceledBehavior.ONLY_HOOKS; } } @Nullable protected CheckCanceledHook createCheckCanceledHook() { return null; } @Override protected void indicatorCanceled(@NotNull ProgressIndicator indicator) { // mark threads running under this indicator as canceled synchronized (threadsUnderIndicator) { Set<Thread> threads = threadsUnderIndicator.get(indicator); if (threads != null) { for (Thread thread : threads) { boolean underCancelledIndicator = false; for (ProgressIndicator currentIndicator = getCurrentIndicator(thread); currentIndicator != null; currentIndicator = currentIndicator instanceof WrappedProgressIndicator ? ((WrappedProgressIndicator)currentIndicator).getOriginalProgressIndicator() : null) { if (currentIndicator == indicator) { underCancelledIndicator = true; break; } } if (underCancelledIndicator) { threadsUnderCanceledIndicator.add(thread); updateShouldCheckCanceled(); } } } } } @TestOnly public static boolean isCanceledThread(@NotNull Thread thread) { synchronized (threadsUnderIndicator) { return threadsUnderCanceledIndicator.contains(thread); } } @Override public boolean isInNonCancelableSection() { return isInNonCancelableSection.get() != null; } private static final long MAX_PRIORITIZATION_NANOS = TimeUnit.SECONDS.toNanos(12); private static final Thread[] EMPTY_THREAD_ARRAY = new Thread[0]; private final Set<Thread> myPrioritizedThreads = ContainerUtil.newConcurrentSet(); private volatile Thread[] myEffectivePrioritizedThreads = EMPTY_THREAD_ARRAY; private int myDeprioritizations; //guarded by myPrioritizationLock private final Object myPrioritizationLock = ObjectUtils.sentinel("myPrioritizationLock"); private volatile long myPrioritizingStarted; @Override public <T, E extends Throwable> T computePrioritized(@NotNull ThrowableComputable<T, E> computable) throws E { Thread thread = Thread.currentThread(); boolean prioritize; synchronized (myPrioritizationLock) { if (isCurrentThreadPrioritized()) { prioritize = false; } else { prioritize = true; if (myPrioritizedThreads.isEmpty()) { myPrioritizingStarted = System.nanoTime(); } myPrioritizedThreads.add(thread); updateEffectivePrioritized(); } } try { return computable.compute(); } finally { if (prioritize) { synchronized (myPrioritizationLock) { myPrioritizedThreads.remove(thread); updateEffectivePrioritized(); } } } } private void updateEffectivePrioritized() { Thread[] prev = myEffectivePrioritizedThreads; Thread[] current = myDeprioritizations > 0 || myPrioritizedThreads.isEmpty() ? EMPTY_THREAD_ARRAY : myPrioritizedThreads.toArray(EMPTY_THREAD_ARRAY); myEffectivePrioritizedThreads = current; if (prev.length == 0 && current.length > 0) { prioritizingStarted(); } else if (prev.length > 0 && current.length == 0) { prioritizingFinished(); } } protected void prioritizingStarted() {} protected void prioritizingFinished() {} @ApiStatus.Internal public boolean isCurrentThreadPrioritized() { return myPrioritizedThreads.contains(Thread.currentThread()); } @ApiStatus.Internal public void suppressPrioritizing() { synchronized (myPrioritizationLock) { if (++myDeprioritizations == 100 + ForkJoinPool.getCommonPoolParallelism() * 2) { Attachment attachment = new Attachment("threadDump.txt", ThreadDumper.dumpThreadsToString()); attachment.setIncluded(true); LOG.error("A suspiciously high nesting of suppressPrioritizing, forgot to call restorePrioritizing?", attachment); } updateEffectivePrioritized(); } } @ApiStatus.Internal public void restorePrioritizing() { synchronized (myPrioritizationLock) { if (--myDeprioritizations < 0) { myDeprioritizations = 0; LOG.error("Unmatched suppressPrioritizing/restorePrioritizing"); } updateEffectivePrioritized(); } } protected boolean sleepIfNeededToGivePriorityToAnotherThread() { if (!isCurrentThreadEffectivelyPrioritized() && checkLowPriorityReallyApplicable()) { LockSupport.parkNanos(1_000_000); avoidBlockingPrioritizingThread(); return true; } return false; } private boolean isCurrentThreadEffectivelyPrioritized() { Thread current = Thread.currentThread(); for (Thread prioritized : myEffectivePrioritizedThreads) { if (prioritized == current) { return true; } } return false; } private boolean checkLowPriorityReallyApplicable() { long time = System.nanoTime() - myPrioritizingStarted; if (time < 5_000_000) { return false; // don't sleep when activities are very short (e.g. empty processing of mouseMoved events) } if (avoidBlockingPrioritizingThread()) { return false; } if (ApplicationManager.getApplication().isDispatchThread()) { return false; // EDT always has high priority } if (time > MAX_PRIORITIZATION_NANOS) { // Don't wait forever in case someone forgot to stop prioritizing before waiting for other threads to complete // wait just for 12 seconds; this will be noticeable (and we'll get 2 thread dumps) but not fatal stopAllPrioritization(); return false; } return true; } private boolean avoidBlockingPrioritizingThread() { if (isAnyPrioritizedThreadBlocked()) { // the current thread could hold a lock that prioritized threads are waiting for suppressPrioritizing(); checkLaterThreadsAreUnblocked(); return true; } return false; } private void checkLaterThreadsAreUnblocked() { try { AppExecutorUtil.getAppScheduledExecutorService().schedule(() -> { if (isAnyPrioritizedThreadBlocked()) { checkLaterThreadsAreUnblocked(); } else { restorePrioritizing(); } }, 5, TimeUnit.MILLISECONDS); } catch (RejectedExecutionException ignore) { } } private void stopAllPrioritization() { synchronized (myPrioritizationLock) { myPrioritizedThreads.clear(); updateEffectivePrioritized(); } } private boolean isAnyPrioritizedThreadBlocked() { for (Thread thread : myEffectivePrioritizedThreads) { Thread.State state = thread.getState(); if (state == Thread.State.WAITING || state == Thread.State.TIMED_WAITING || state == Thread.State.BLOCKED) { return true; } } return false; } @NotNull public static ModalityState getCurrentThreadProgressModality() { ProgressIndicator indicator = threadTopLevelIndicators.get(Thread.currentThread().getId()); ModalityState modality = indicator == null ? null : indicator.getModalityState(); return modality != null ? modality : ModalityState.NON_MODAL; } private static void setCurrentIndicator(long threadId, ProgressIndicator indicator) { if (indicator == null) { currentIndicators.remove(threadId); threadTopLevelIndicators.remove(threadId); } else { currentIndicators.put(threadId, indicator); threadTopLevelIndicators.putIfAbsent(threadId, indicator); } } private static ProgressIndicator getCurrentIndicator(@NotNull Thread thread) { return currentIndicators.get(thread.getId()); } @FunctionalInterface interface CheckCanceledHook { /** * @param indicator the indicator whose {@link ProgressIndicator#checkCanceled()} was called, * or null if {@link ProgressManager#checkCanceled()} was called (even on a thread with indicator) * @return true if the hook has done anything that might take some time. */ boolean runHook(@Nullable ProgressIndicator indicator); } public static void assertUnderProgress(@NotNull ProgressIndicator indicator) { synchronized (threadsUnderIndicator) { Set<Thread> threads = threadsUnderIndicator.get(indicator); if (threads == null || !threads.contains(Thread.currentThread())) { LOG.error("Must be executed under progress indicator: " + indicator + ". Please see e.g. ProgressManager.runProcess()"); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jk.config; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.util.Hashtable; import java.util.Vector; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.tomcat.util.IntrospectionUtils; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.xml.sax.EntityResolver; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /* Naming conventions: JK_CONF_DIR == serverRoot/work ( XXX /jkConfig ? ) - Each vhost has a sub-dir named after the canonycal name - For each webapp in a vhost, there is a separate WEBAPP_NAME.jkmap - In httpd.conf ( or equivalent servers ), in each virtual host you should "Include JK_CONF_DIR/VHOST/jk_apache.conf". The config file will contain the Alias declarations and other rules required for apache operation. Same for other servers. - WebXml2Jk will be invoked by a config tool or automatically for each webapp - it'll generate the WEBAPP.jkmap files and config fragments. WebXml2Jk will _not_ generate anything else but mappings. It should _not_ try to guess locations or anything else - that's another components' job. */ /** * Read a web.xml file and generate the mappings for jk2. * It can be used from the command line or ant. * * In order for the web server to serve static pages, all webapps * must be deployed on the computer that runs Apache, IIS, etc. * * Dynamic pages can be executed on that computer or other servers * in a pool, but even if the main server doesn't run tomcat, * it must have all the static files and WEB-INF/web.xml. * ( you could have a script remove everything else, including jsps - if * security paranoia is present ). * * XXX We could have this in WEB-INF/urimap.properties. * * @author Costin Manolache */ public class WebXml2Jk { String vhost=""; String cpath=""; String docBase; String file; String worker="lb"; // -------------------- Settings -------------------- // XXX We can also generate location-independent mappings. /** Set the canonycal name of the virtual host. */ public void setHost( String vhost ) { this.vhost=vhost; } /** Set the canonical name of the virtual host. */ public void setContext( String contextPath ) { this.cpath=contextPath; } /** Set the base directory where the application is * deployed ( on the web server ). */ public void setDocBase(String docBase ) { this.docBase=docBase; } // Automatically generated. // /** The file where the jk2 mapping will be generated // */ // public void setJk2Conf( String outFile ) { // file=outFile; // type=CONFIG_JK2_URIMAP; // } // /** Backward compat: generate JkMounts for mod_jk1 // */ // public void setJkmountFile( String outFile ) { // file=outFile; // type=CONFIG_JK_MOUNT; // } /* By default we map to the lb - in jk2 this is automatically * created and includes all tomcat instances. * * This is equivalent to the worker in jk1. */ public void setGroup(String route ) { worker=route; } // -------------------- Generators -------------------- public static interface MappingGenerator { void setWebXmlReader(WebXml2Jk wxml ); /** Start section( vhost declarations, etc ) */ void generateStart() throws IOException ; void generateEnd() throws IOException ; void generateServletMapping( String servlet, String url )throws IOException ; void generateFilterMapping( String servlet, String url ) throws IOException ; void generateLoginConfig( String loginPage, String errPage, String authM ) throws IOException ; void generateErrorPage( int err, String location ) throws IOException ; void generateConstraints( Vector urls, Vector methods, Vector roles, boolean isSSL ) throws IOException ; } // -------------------- Implementation -------------------- Node webN; File jkDir; /** Return the top level node */ public Node getWebXmlNode() { return webN; } public File getJkDir() { return jkDir; } /** Extract the wellcome files from the web.xml */ public Vector getWellcomeFiles() { Node n0=getChild( webN, "welcome-file-list" ); Vector wF=new Vector(); if( n0!=null ) { for( Node mapN=getChild( webN, "welcome-file" ); mapN != null; mapN = getNext( mapN ) ) { wF.addElement( getContent(mapN)); } } // XXX Add index.html, index.jsp return wF; } void generate(MappingGenerator gen ) throws IOException { gen.generateStart(); log.info("Generating mappings for servlets " ); for( Node mapN=getChild( webN, "servlet-mapping" ); mapN != null; mapN = getNext( mapN ) ) { String serv=getChildContent( mapN, "servlet-name"); String url=getChildContent( mapN, "url-pattern"); gen.generateServletMapping( serv, url ); } log.info("Generating mappings for filters " ); for( Node mapN=getChild( webN, "filter-mapping" ); mapN != null; mapN = getNext( mapN ) ) { String filter=getChildContent( mapN, "filter-name"); String url=getChildContent( mapN, "url-pattern"); gen.generateFilterMapping( filter, url ); } for( Node mapN=getChild( webN, "error-page" ); mapN != null; mapN = getNext( mapN ) ) { String errorCode= getChildContent( mapN, "error-code" ); String location= getChildContent( mapN, "location" ); if( errorCode!=null && ! "".equals( errorCode ) ) { try { int err=new Integer( errorCode ).intValue(); gen.generateErrorPage( err, location ); } catch( Exception ex ) { log.error( "Format error " + location, ex); } } } Node lcN=getChild( webN, "login-config" ); if( lcN!=null ) { log.info("Generating mapping for login-config " ); String authMeth=getContent( getChild( lcN, "auth-method")); if( authMeth == null ) authMeth="BASIC"; Node n1=getChild( lcN, "form-login-config"); String loginPage= getChildContent( n1, "form-login-page"); String errPage= getChildContent( n1, "form-error-page"); if(loginPage != null) { int lpos = loginPage.lastIndexOf("/"); String jscurl = loginPage.substring(0,lpos+1) + "j_security_check"; gen.generateLoginConfig( jscurl, errPage, authMeth ); } } log.info("Generating mappings for security constraints " ); for( Node mapN=getChild( webN, "security-constraint" ); mapN != null; mapN = getNext( mapN )) { Vector methods=new Vector(); Vector urls=new Vector(); Vector roles=new Vector(); boolean isSSL=false; Node wrcN=getChild( mapN, "web-resource-collection"); for( Node uN=getChild(wrcN, "http-method"); uN!=null; uN=getNext( uN )) { methods.addElement( getContent( uN )); } for( Node uN=getChild(wrcN, "url-pattern"); uN!=null; uN=getNext( uN )) { urls.addElement( getContent( uN )); } // Not used at the moment Node acN=getChild( mapN, "auth-constraint"); for( Node rN=getChild(acN, "role-name"); rN!=null; rN=getNext( rN )) { roles.addElement(getContent( rN )); } Node ucN=getChild( mapN, "user-data-constraint"); String transp=getContent(getChild( ucN, "transport-guarantee")); if( transp!=null ) { if( "INTEGRAL".equalsIgnoreCase( transp ) || "CONFIDENTIAL".equalsIgnoreCase( transp ) ) { isSSL=true; } } gen.generateConstraints( urls, methods, roles, isSSL ); } gen.generateEnd(); } // -------------------- Main and ant wrapper -------------------- public void execute() { try { if( docBase== null) { log.error("No docbase - please specify the base directory of you web application ( -docBase PATH )"); return; } if( cpath== null) { log.error("No context - please specify the mount ( -context PATH )"); return; } File docbF=new File(docBase); File wXmlF=new File( docBase, "WEB-INF/web.xml"); Document wXmlN=readXml(wXmlF); if( wXmlN == null ) return; webN = wXmlN.getDocumentElement(); if( webN==null ) { log.error("Can't find web-app"); return; } jkDir=new File( docbF, "WEB-INF/jk2" ); jkDir.mkdirs(); MappingGenerator generator=new GeneratorJk2(); generator.setWebXmlReader( this ); generate( generator ); generator=new GeneratorJk1(); generator.setWebXmlReader( this ); generate( generator ); generator=new GeneratorApache2(); generator.setWebXmlReader( this ); generate( generator ); } catch( Exception ex ) { ex.printStackTrace(); } } public static void main(String args[] ) { try { if( args.length == 1 && ( "-?".equals(args[0]) || "-h".equals( args[0])) ) { System.out.println("Usage: "); System.out.println(" WebXml2Jk [OPTIONS]"); System.out.println(); System.out.println(" -docBase DIR The location of the webapp. Required"); System.out.println(" -group GROUP Group, if you have multiple tomcats with diffrent content. " ); System.out.println(" The default is 'lb', and should be used in most cases"); System.out.println(" -host HOSTNAME Canonical hostname - for virtual hosts"); System.out.println(" -context /CPATH Context path where the app will be mounted"); return; } WebXml2Jk w2jk=new WebXml2Jk(); /* do ant-style property setting */ IntrospectionUtils.processArgs( w2jk, args, new String[] {}, null, new Hashtable()); w2jk.execute(); } catch( Exception ex ) { ex.printStackTrace(); } } private static org.apache.juli.logging.Log log= org.apache.juli.logging.LogFactory.getLog( WebXml2Jk.class ); // -------------------- DOM utils -------------------- /** Get the content of a node */ public static String getContent(Node n ) { if( n==null ) return null; Node n1=n.getFirstChild(); // XXX Check if it's a text node String s1=n1.getNodeValue(); return s1.trim(); } /** Get the first child */ public static Node getChild( Node parent, String name ) { if( parent==null ) return null; Node first=parent.getFirstChild(); if( first==null ) return null; for (Node node = first; node != null; node = node.getNextSibling()) { //System.out.println("getNode: " + name + " " + node.getNodeName()); if( name.equals( node.getNodeName() ) ) { return node; } } return null; } /** Get the first child's content ( i.e. it's included TEXT node ) */ public static String getChildContent( Node parent, String name ) { Node first=parent.getFirstChild(); if( first==null ) return null; for (Node node = first; node != null; node = node.getNextSibling()) { //System.out.println("getNode: " + name + " " + node.getNodeName()); if( name.equals( node.getNodeName() ) ) { return getContent( node ); } } return null; } /** Get the node in the list of siblings */ public static Node getNext( Node current ) { Node first=current.getNextSibling(); String name=current.getNodeName(); if( first==null ) return null; for (Node node = first; node != null; node = node.getNextSibling()) { //System.out.println("getNode: " + name + " " + node.getNodeName()); if( name.equals( node.getNodeName() ) ) { return node; } } return null; } public static class NullResolver implements EntityResolver { public InputSource resolveEntity (String publicId, String systemId) throws SAXException, IOException { if (log.isDebugEnabled()) log.debug("ResolveEntity: " + publicId + " " + systemId); return new InputSource(new StringReader("")); } } public static Document readXml(File xmlF) throws SAXException, IOException, ParserConfigurationException { if( ! xmlF.exists() ) { log.error("No xml file " + xmlF ); return null; } DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setValidating(false); dbf.setIgnoringComments(false); dbf.setIgnoringElementContentWhitespace(true); //dbf.setCoalescing(true); //dbf.setExpandEntityReferences(true); DocumentBuilder db = null; db = dbf.newDocumentBuilder(); db.setEntityResolver( new NullResolver() ); // db.setErrorHandler( new MyErrorHandler()); Document doc = db.parse(xmlF); return doc; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sling.pipes; import org.apache.commons.io.IOUtils; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ValueMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.script.Bindings; import javax.script.Invocable; import javax.script.ScriptContext; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import javax.script.SimpleScriptContext; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Execution bindings of a pipe */ public class PipeBindings { private static final Logger log = LoggerFactory.getLogger(PipeBindings.class); public static final String NN_ADDITIONALBINDINGS = "additionalBindings"; public static final String PN_ADDITIONALSCRIPTS = "additionalScripts"; ScriptEngine engine = new ScriptEngineManager().getEngineByName("nashorn"); ScriptContext scriptContext = new SimpleScriptContext(); public static final String PATH_BINDING = "path"; Map<String, String> pathBindings = new HashMap<>(); Map<String, Resource> outputResources = new HashMap<>(); private static final Pattern INJECTED_SCRIPT = Pattern.compile("\\$\\{(([^\\{^\\}]*(\\{[0-9,]+\\})?)*)\\}"); /** * public constructor */ public PipeBindings(Resource resource){ engine.setContext(scriptContext); //add path bindings where path.MyPipe will give MyPipe current resource path getBindings().put(PATH_BINDING, pathBindings); //additional bindings (global variables to use in child pipes expressions) Resource additionalBindings = resource.getChild(NN_ADDITIONALBINDINGS); if (additionalBindings != null) { ValueMap bindings = additionalBindings.adaptTo(ValueMap.class); addBindings(bindings); } Resource scriptsResource = resource.getChild(PN_ADDITIONALSCRIPTS); if (scriptsResource != null) { String[] scripts = scriptsResource.adaptTo(String[].class); if (scripts != null) { for (String script : scripts){ addScript(resource.getResourceResolver(), script); } } } } /** * add a script file to the engine * @param resolver * @param path */ public void addScript(ResourceResolver resolver, String path) { InputStream is = null; try { if (path.startsWith("http")) { try { URL remoteScript = new URL(path); is = remoteScript.openStream(); } catch (Exception e) { log.error("unable to retrieve remote script", e); } } else if (path.startsWith("/")) { Resource scriptResource = resolver.getResource(path); if (scriptResource != null) { is = scriptResource.adaptTo(InputStream.class); } } if (is != null) { try { engine.eval(new InputStreamReader(is), scriptContext); } catch (Exception e) { log.error("unable to execute {}", path); } } } finally { IOUtils.closeQuietly(is); } } /** * adds additional bindings (global variables to use in child pipes expressions) * @param bindings */ public void addBindings(Map bindings) { log.info("Adding bindings {}", bindings); getBindings().putAll(bindings); } public void copyBindings(PipeBindings original){ getBindings().putAll(original.getBindings()); } /** * Update current resource of a given pipe, and appropriate binding * @param pipe * @param resource */ public void updateBindings(Pipe pipe, Resource resource) { outputResources.put(pipe.getName(), resource); if (resource != null) { pathBindings.put(pipe.getName(), resource.getPath()); } addBinding(pipe.getName(), pipe.getOutputBinding()); } public void addBinding(String name, Object value){ log.debug("Adding binding {}={}", name, value); getBindings().put(name, value); } public boolean isBindingDefined(String name){ return getBindings().containsKey(name); } public Bindings getBindings() { return scriptContext.getBindings(ScriptContext.ENGINE_SCOPE); } /** * Doesn't look like nashorn likes template strings :-( * @param expr * @return */ protected String computeECMA5Expression(String expr){ Matcher matcher = INJECTED_SCRIPT.matcher(expr); if (INJECTED_SCRIPT.matcher(expr).find()) { StringBuilder expression = new StringBuilder(); int start = 0; while (matcher.find()) { if (matcher.start() > start) { if (expression.length() == 0) { expression.append("'"); } expression.append(expr.substring(start, matcher.start())); } if (expression.length() > 0) { expression.append("' + "); } expression.append(matcher.group(1)); start = matcher.end(); if (start < expr.length()) { expression.append(" + '"); } } if (start < expr.length()) { expression.append(expr.substring(start) + "'"); } return expression.toString(); } return null; } /** * * @param expr * @return * @throws ScriptException */ protected Object evaluate(String expr) throws ScriptException { String computed = computeECMA5Expression(expr); if (computed != null){ //computed is null in case expr is a simple string return engine.eval(computed, scriptContext); } return expr; } /** * Expression is a function of variables from execution context, that * we implement here as a String * @param expr * @return */ public String instantiateExpression(String expr){ try { return (String)evaluate(expr); } catch (ScriptException e) { log.error("Unable to evaluate the script", e); } return expr; } /** * Instantiate object from expression * @param expr * @return */ public Object instantiateObject(String expr){ try { Object result = evaluate(expr); if (result != null && ! result.getClass().getName().startsWith("java.lang.")) { //special case of the date in which case jdk.nashorn.api.scripting.ScriptObjectMirror will //be returned JsDate jsDate = ((Invocable) engine).getInterface(result, JsDate.class); if (jsDate != null ) { Date date = new Date(jsDate.getTime() + jsDate.getTimezoneOffset() * 60 * 1000); Calendar cal = Calendar.getInstance(); cal.setTime(date); return cal; } } return result; } catch (ScriptException e) { log.error("Unable to evaluate the script for expr {} ", expr, e); } return expr; } /** * * @param name * @return */ public Resource getExecutedResource(String name) { return outputResources.get(name); } /** * interface mapping a javascript date */ public interface JsDate { long getTime(); int getTimezoneOffset(); } }
/* * Copyright (C) 2018 Satomichi Nishihara * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package burai.app.project.editor.input.band; import java.net.URL; import java.util.List; import java.util.ResourceBundle; import burai.app.QEFXMainController; import burai.app.project.editor.input.QEFXInputController; import burai.app.project.editor.input.items.QEFXComboInteger; import burai.app.project.editor.input.items.QEFXItem; import burai.app.project.editor.input.items.QEFXTextFieldInteger; import burai.app.project.editor.input.items.QEFXToggleBoolean; import burai.app.project.editor.input.items.WarningCondition; import burai.input.QEInput; import burai.input.card.QECard; import burai.input.card.QECardEvent; import burai.input.card.QEKPoint; import burai.input.card.QEKPoints; import burai.input.correcter.BandCorrector; import burai.input.correcter.BrillouinPathGenerator; import burai.input.correcter.SymmetricKPointsGenerator; import burai.input.namelist.QENamelist; import burai.input.namelist.QEValueBase; import burai.input.namelist.QEValueBuffer; import javafx.fxml.FXML; import javafx.scene.control.Button; import javafx.scene.control.ComboBox; import javafx.scene.control.ContextMenu; import javafx.scene.control.Label; import javafx.scene.control.MenuItem; import javafx.scene.control.TableCell; import javafx.scene.control.TableColumn; import javafx.scene.control.TableRow; import javafx.scene.control.TableView; import javafx.scene.control.TableView.TableViewSelectionModel; import javafx.scene.control.TextField; import javafx.scene.control.ToggleButton; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.control.cell.TextFieldTableCell; public class QEFXBandController extends QEFXInputController { private static final String UNIT_2PIA = "2PI / A"; private static final String UNIT_CRYSTAL = "Crystal"; public static final double PLUS_GRAPHIC_SIZE = 18.0; public static final String PLUS_GRAPHIC_CLASS = "piclight-button"; private static final String MENU_TEXT_ADD = "Add"; private static final String MENU_TEXT_DELETE = "Delete"; private static final String MENU_TEXT_UP = "Bring up"; private static final String MENU_TEXT_DOWN = "Bring down"; private KPointAnsatzBinder kpointBinder; private QEFXTextFieldInteger nbandItem; private boolean busyUnitCombo; private boolean busyKPoints; /* * nband */ @FXML private Label nbandLabel; @FXML private TextField nbandField; @FXML private Button nbandButton; /* * symmetry of band */ @FXML private Label symLabel; @FXML private ToggleButton symToggle; @FXML private Button symButton; /* * spin component */ @FXML private Label spinLabel; @FXML private ComboBox<String> spinCombo; @FXML private Button spinButton; /* * K-points */ @FXML private ComboBox<String> unitCombo; @FXML private Button defButton; @FXML private TableView<KPointAnsatz> kpointTable; @FXML private TableColumn<KPointAnsatz, String> symbolColumn; @FXML private TableColumn<KPointAnsatz, String> kxColumn; @FXML private TableColumn<KPointAnsatz, String> kyColumn; @FXML private TableColumn<KPointAnsatz, String> kzColumn; @FXML private TableColumn<KPointAnsatz, String> nkColumn; public QEFXBandController(QEFXMainController mainController, QEInput input) { super(mainController, input); this.kpointBinder = null; this.nbandItem = null; this.busyUnitCombo = false; this.busyKPoints = false; } public void updateNBandStatus() { if (this.nbandItem != null) { this.nbandItem.pullAllTriggers(); } } @Override public void initialize(URL location, ResourceBundle resources) { QECard card = this.input.getCard(QEKPoints.CARD_NAME); QENamelist nmlSystem = this.input.getNamelist(QEInput.NAMELIST_SYSTEM); QENamelist nmlBands = this.input.getNamelist(QEInput.NAMELIST_BANDS); if (nmlSystem != null) { this.setupNBandItem(nmlSystem); } if (nmlBands != null) { this.setupSymItem(nmlBands); if (nmlSystem != null) { this.setupSpinItem(nmlBands, nmlSystem); } } if (card != null && card instanceof QEKPoints) { this.initializeKPointBinder((QEKPoints) card); this.setupUnitCombo((QEKPoints) card); this.setupDefButton((QEKPoints) card); } this.setupSymbolColumn(); this.setupKxColumn(); this.setupKyColumn(); this.setupKzColumn(); this.setupNkColumn(); this.setupKPointTable(); } private void setupNBandItem(QENamelist nmlSystem) { if (this.nbandField == null) { return; } BandCorrector corrector = new BandCorrector(this.input); QEFXTextFieldInteger item = new QEFXTextFieldInteger(nmlSystem.getValueBuffer("nbnd"), this.nbandField); if (this.nbandLabel != null) { item.setLabel(this.nbandLabel); } if (this.nbandButton != null) { item.setDefault(() -> { int nband = corrector.isAvailable() ? corrector.getNumBands() : 0; if (nband > 0) { return QEValueBase.getInstance("nbnd", nband); } else { return null; } }, this.nbandButton); } item.setLowerBound(0, QEFXTextFieldInteger.BOUND_TYPE_LESS_THAN); item.addWarningCondition((name, value) -> { if ("nbnd".equalsIgnoreCase(name)) { if (value == null) { return WarningCondition.WARNING; } int nband = corrector.isAvailable() ? corrector.getNumBands() : 0; if (nband > 0) { if (nband != value.getIntegerValue()) { return WarningCondition.WARNING; } else { return WarningCondition.OK; } } } return WarningCondition.OK; }); item.pullAllTriggers(); this.nbandItem = item; } private void setupSymItem(QENamelist nmlBands) { if (this.symToggle == null) { return; } QEFXToggleBoolean item = new QEFXToggleBoolean(nmlBands.getValueBuffer("lsym"), this.symToggle, false); if (this.symLabel != null) { item.setLabel(this.symLabel); } if (this.symButton != null) { item.setDefault(false, this.symButton); } } private void setupSpinItem(QENamelist nmlBands, QENamelist nmlSystem) { if (this.spinCombo == null) { return; } QEValueBuffer nspinValue = nmlSystem.getValueBuffer("nspin"); QEValueBuffer spinCompValue = nmlBands.getValueBuffer("spin_component"); this.spinCombo.getItems().clear(); QEFXComboInteger item = new QEFXComboInteger(spinCompValue, this.spinCombo); if (this.spinLabel != null) { item.setLabel(this.spinLabel); } if (this.spinButton != null) { item.setDefault(1, this.spinButton); } item.addItems("1", "2"); item.setValueFactory(text -> { return Integer.parseInt(text); }); item.addWarningTrigger(nspinValue); item.addWarningCondition((name, value) -> { if ("nspin".equalsIgnoreCase(name) || "spin_component".equalsIgnoreCase(name)) { int nspin = nspinValue.hasValue() ? nspinValue.getIntegerValue() : 1; int spinComp = spinCompValue.hasValue() ? spinCompValue.getIntegerValue() : 1; if (nspin != 2 && spinComp == 2) { return WarningCondition.ERROR; } else { return WarningCondition.OK; } } return WarningCondition.OK; }); item.pullAllTriggers(); } private void initializeKPointBinder(QEKPoints cardKPoints) { if (this.kpointTable == null) { return; } this.kpointBinder = new KPointAnsatzBinder(this.kpointTable, cardKPoints); } private void setupUnitCombo(QEKPoints cardKPoints) { if (this.unitCombo == null) { return; } this.unitCombo.getItems().clear(); this.unitCombo.getItems().add(UNIT_2PIA); this.unitCombo.getItems().add(UNIT_CRYSTAL); this.actionByKPoints(cardKPoints); cardKPoints.addListener(event -> { int eventType = event.getEventType(); if (eventType == QECardEvent.EVENT_TYPE_UNIT_CHANGED) { this.actionByKPoints(cardKPoints); } else if (eventType == QECardEvent.EVENT_TYPE_NULL) { this.actionByKPoints(cardKPoints); } }); this.unitCombo.setOnAction(event -> this.actionByUnitCombo(cardKPoints)); } private void actionByKPoints(QEKPoints cardKPoints) { if (this.busyUnitCombo) { return; } this.busyKPoints = true; if (cardKPoints.isTpibaB()) { this.unitCombo.setValue(UNIT_2PIA); } else if (cardKPoints.isCrystalB()) { this.unitCombo.setValue(UNIT_CRYSTAL); } this.busyKPoints = false; } private void actionByUnitCombo(QEKPoints cardKPoints) { if (this.busyKPoints) { return; } this.busyUnitCombo = true; String value = this.unitCombo.getValue(); if (UNIT_2PIA.equals(value)) { cardKPoints.setTpibaB(); } else if (UNIT_CRYSTAL.equals(value)) { cardKPoints.setCrystalB(); } this.busyUnitCombo = false; } private void setupDefButton(QEKPoints cardKPoints) { if (this.defButton == null) { return; } QEFXItem.setupDefaultButton(this.defButton); this.defButton.setOnAction(event -> { List<QEKPoint> kpoints = null; BrillouinPathGenerator generator = new BrillouinPathGenerator(this.input); if (generator.isAvailable()) { kpoints = generator.getKPoints(); } cardKPoints.clear(); cardKPoints.setTpibaB(); if (kpoints != null && (!kpoints.isEmpty())) { for (QEKPoint kpoint : kpoints) { if (kpoint != null) { cardKPoints.addKPoint(kpoint); } } } }); } private void setupKPointTable() { if (this.kpointTable == null) { return; } if (this.kpointBinder != null) { this.kpointBinder.bindTable(); } ContextMenu contextMenu = this.createContextMenu(); if (contextMenu != null) { this.kpointTable.setContextMenu(contextMenu); } } private ContextMenu createContextMenu() { ContextMenu contextMenu = new ContextMenu(); MenuItem addItem = this.createAddMenuItem(); MenuItem deleteItem = this.createDeleteMenuItem(); MenuItem upItem = this.createUpMenuItem(); MenuItem downItem = this.createDownMenuItem(); contextMenu.getItems().addAll(addItem, deleteItem, upItem, downItem); return contextMenu; } private MenuItem createAddMenuItem() { MenuItem menuItem = new MenuItem(MENU_TEXT_ADD); menuItem.setOnAction(event -> { if (this.kpointTable == null || this.kpointBinder == null) { return; } TableViewSelectionModel<KPointAnsatz> selectionModel = this.kpointTable.getSelectionModel(); if (selectionModel == null) { return; } int index = selectionModel.getSelectedIndex(); KPointAnsatz kpointAnsatz = new KPointAnsatz(Math.max(0, index + 1)); kpointAnsatz.setSymbol(""); kpointAnsatz.setKx(0.0); kpointAnsatz.setKy(0.0); kpointAnsatz.setKz(0.0); kpointAnsatz.setNk(20); if (index > -1) { this.kpointBinder.addKPoint(kpointAnsatz, index); } else { this.kpointBinder.addKPoint(kpointAnsatz); } }); return menuItem; } private MenuItem createDeleteMenuItem() { MenuItem menuItem = new MenuItem(MENU_TEXT_DELETE); menuItem.setOnAction(event -> { if (this.kpointTable == null || this.kpointBinder == null) { return; } TableViewSelectionModel<KPointAnsatz> selectionModel = this.kpointTable.getSelectionModel(); if (selectionModel == null) { return; } KPointAnsatz kpointAnsatz = selectionModel.getSelectedItem(); if (kpointAnsatz == null) { return; } this.kpointBinder.removeKPoint(kpointAnsatz); }); return menuItem; } private MenuItem createUpMenuItem() { MenuItem menuItem = new MenuItem(MENU_TEXT_UP); menuItem.setOnAction(event -> { if (this.kpointTable == null || this.kpointBinder == null) { return; } TableViewSelectionModel<KPointAnsatz> selectionModel = this.kpointTable.getSelectionModel(); if (selectionModel == null) { return; } List<KPointAnsatz> items = this.kpointTable.getItems(); int index1 = selectionModel.getSelectedIndex(); if (index1 < 0 || items.size() <= index1) { return; } int index2 = index1 - 1; if (index2 < 0 || items.size() <= index2) { return; } KPointAnsatz kpointAnsatz1 = items.get(index1); KPointAnsatz kpointAnsatz2 = items.get(index2); this.kpointBinder.swapKPoints(kpointAnsatz1, kpointAnsatz2); selectionModel.select(index2); }); return menuItem; } private MenuItem createDownMenuItem() { MenuItem menuItem = new MenuItem(MENU_TEXT_DOWN); menuItem.setOnAction(event -> { if (this.kpointTable == null || this.kpointBinder == null) { return; } TableViewSelectionModel<KPointAnsatz> selectionModel = this.kpointTable.getSelectionModel(); if (selectionModel == null) { return; } List<KPointAnsatz> items = this.kpointTable.getItems(); int index1 = selectionModel.getSelectedIndex(); if (index1 < 0 || items.size() <= index1) { return; } int index2 = index1 + 1; if (index2 < 0 || items.size() <= index2) { return; } KPointAnsatz kpointAnsatz1 = items.get(index1); KPointAnsatz kpointAnsatz2 = items.get(index2); this.kpointBinder.swapKPoints(kpointAnsatz1, kpointAnsatz2); selectionModel.select(index2); }); return menuItem; } private void setupSymbolColumn() { if (this.symbolColumn == null) { return; } this.symbolColumn.setCellFactory(column -> { double width = column.getWidth(); return new SymbolCell(this, width); }); this.symbolColumn.setCellValueFactory(new PropertyValueFactory<KPointAnsatz, String>("symbol")); } private void setupKxColumn() { if (this.kxColumn == null) { return; } this.kxColumn.setCellFactory(TextFieldTableCell.<KPointAnsatz> forTableColumn()); this.kxColumn.setCellValueFactory(new PropertyValueFactory<KPointAnsatz, String>("kx")); } private void setupKyColumn() { if (this.kyColumn == null) { return; } this.kyColumn.setCellFactory(TextFieldTableCell.<KPointAnsatz> forTableColumn()); this.kyColumn.setCellValueFactory(new PropertyValueFactory<KPointAnsatz, String>("ky")); } private void setupKzColumn() { if (this.kzColumn == null) { return; } this.kzColumn.setCellFactory(TextFieldTableCell.<KPointAnsatz> forTableColumn()); this.kzColumn.setCellValueFactory(new PropertyValueFactory<KPointAnsatz, String>("kz")); } private void setupNkColumn() { if (this.nkColumn == null) { return; } this.nkColumn.setCellFactory(TextFieldTableCell.<KPointAnsatz> forTableColumn()); this.nkColumn.setCellValueFactory(new PropertyValueFactory<KPointAnsatz, String>("nk")); } private static class SymbolCell extends TableCell<KPointAnsatz, String> { private QEFXBandController root; private double width; private ComboBox<String> combo; public SymbolCell(QEFXBandController root, double width) { super(); if (root == null) { throw new IllegalArgumentException("root is null."); } if (width <= 0.0) { throw new IllegalArgumentException("width is not positive."); } this.root = root; this.width = width; this.combo = null; } private ComboBox<String> getCombo() { if (this.combo == null) { this.combo = new ComboBox<String>(); this.combo.setPrefWidth(this.width); this.combo.setFocusTraversable(false); this.combo.setOnShowing(event -> this.initializeComboItems()); this.combo.setOnAction(event -> this.setKPointAnsatz()); } return this.combo; } private void initializeComboItems() { List<String> items = this.getCombo().getItems(); items.clear(); items.add(""); SymmetricKPointsGenerator generator = new SymmetricKPointsGenerator(this.root.input); List<QEKPoint> kpoints = generator.getKPoints(); if (kpoints != null) { for (QEKPoint kpoint : kpoints) { String symbol = null; if (kpoint != null && kpoint.hasLetter()) { symbol = kpoint.getLetter(); } if (symbol != null && (!symbol.isEmpty())) { items.add(symbol); } } } } private void setKPointAnsatz() { String value = this.getCombo().getValue(); if (value != null) { @SuppressWarnings("unchecked") TableRow<KPointAnsatz> tableRow = this.getTableRow(); KPointAnsatz kpointAnsatz = tableRow == null ? null : tableRow.getItem(); if (kpointAnsatz != null) { if (this.root.kpointBinder != null) { this.root.kpointBinder.setKPointSymbol(kpointAnsatz, value); } } } } @Override protected void updateItem(String item, boolean empty) { super.updateItem(item, empty); if (!empty) { String value = item == null ? "" : item; this.getCombo().setValue(value); this.setGraphic(this.getCombo()); } else { this.setGraphic(null); } } } }
/* * Copyright (C) 2009-2012 The Project Lombok Authors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package lombok.eclipse.agent; import static lombok.patcher.scripts.ScriptBuilder.*; import java.lang.instrument.Instrumentation; import java.util.Collection; import java.util.Collections; import java.util.List; import lombok.core.AgentLauncher; import lombok.patcher.Hook; import lombok.patcher.MethodTarget; import lombok.patcher.ScriptManager; import lombok.patcher.StackRequest; import lombok.patcher.TargetMatcher; import lombok.patcher.scripts.ScriptBuilder; /** * This is a java-agent that patches some of eclipse's classes so AST Nodes are handed off to Lombok * for modification before Eclipse actually uses them to compile, render errors, show code outlines, * create auto-completion dialogs, and anything else eclipse does with java code. See the *Transformer * classes in this package for more information about which classes are transformed and how they are * transformed. */ public class EclipsePatcher implements AgentLauncher.AgentLaunchable { // At some point I'd like the agent to be capable of auto-detecting if its on eclipse or on ecj. This class is a sure sign we're not in ecj but in eclipse. -ReinierZ @SuppressWarnings("unused") private static final String ECLIPSE_SIGNATURE_CLASS = "org/eclipse/core/runtime/adaptor/EclipseStarter"; @Override public void runAgent(String agentArgs, Instrumentation instrumentation, boolean injected, Class<?> launchingContext) throws Exception { String[] args = agentArgs == null ? new String[0] : agentArgs.split(":"); boolean forceEcj = false; boolean forceEclipse = false; for (String arg : args) { if (arg.trim().equalsIgnoreCase("ECJ")) forceEcj = true; if (arg.trim().equalsIgnoreCase("ECLIPSE")) forceEclipse = true; } if (forceEcj && forceEclipse) { forceEcj = false; forceEclipse = false; } boolean ecj; if (forceEcj) ecj = true; else if (forceEclipse) ecj = false; else ecj = injected; registerPatchScripts(instrumentation, injected, ecj, launchingContext); } private static void registerPatchScripts(Instrumentation instrumentation, boolean reloadExistingClasses, boolean ecjOnly, Class<?> launchingContext) { ScriptManager sm = new ScriptManager(); sm.registerTransformer(instrumentation); if (!ecjOnly) { EclipseLoaderPatcher.patchEquinoxLoaders(sm, launchingContext); } if (!ecjOnly) { patchCatchReparse(sm); patchIdentifierEndReparse(sm); patchRetrieveEllipsisStartPosition(sm); patchRetrieveRightBraceOrSemiColonPosition(sm); patchSetGeneratedFlag(sm); patchDomAstReparseIssues(sm); patchHideGeneratedNodes(sm); patchPostCompileHookEclipse(sm); patchFixSourceTypeConverter(sm); patchDisableLombokForCodeFormatterAndCleanup(sm); patchListRewriteHandleGeneratedMethods(sm); patchSyntaxAndOccurrencesHighlighting(sm); patchSortMembersOperation(sm); patchExtractInterface(sm); patchAboutDialog(sm); patchEclipseDebugPatches(sm); } else { patchPostCompileHookEcj(sm); } patchAvoidReparsingGeneratedCode(sm); patchLombokizeAST(sm); patchEcjTransformers(sm, ecjOnly); patchExtensionMethod(sm, ecjOnly); if (reloadExistingClasses) sm.reloadClasses(instrumentation); } private static void patchExtractInterface(ScriptManager sm) { /* Fix sourceEnding for generated nodes to avoid null pointer */ sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.compiler.SourceElementNotifier", "notifySourceElementRequestor", "void", "org.eclipse.jdt.internal.compiler.ast.AbstractMethodDeclaration", "org.eclipse.jdt.internal.compiler.ast.TypeDeclaration", "org.eclipse.jdt.internal.compiler.ast.ImportReference")) .methodToWrap(new Hook("org.eclipse.jdt.internal.compiler.util.HashtableOfObjectToInt", "get", "int", "java.lang.Object")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "getSourceEndFixed", "int", "int", "org.eclipse.jdt.internal.compiler.ast.ASTNode")) .requestExtra(StackRequest.PARAM1) .transplant().build()); /* Make sure the generated source element is found instead of the annotation */ sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.corext.refactoring.structure.ExtractInterfaceProcessor", "createMethodDeclaration", "void", "org.eclipse.jdt.internal.corext.refactoring.structure.CompilationUnitRewrite", "org.eclipse.jdt.core.dom.rewrite.ASTRewrite", "org.eclipse.jdt.core.dom.AbstractTypeDeclaration", "org.eclipse.jdt.core.dom.MethodDeclaration" )) .methodToWrap(new Hook("org.eclipse.jface.text.IDocument", "get", "java.lang.String", "int", "int")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "getRealMethodDeclarationSource", "java.lang.String", "java.lang.String", "java.lang.Object", "org.eclipse.jdt.core.dom.MethodDeclaration")) .requestExtra(StackRequest.THIS, StackRequest.PARAM4) .transplant().build()); /* get real generated node in stead of a random one generated by the annotation */ sm.addScript(ScriptBuilder.replaceMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.corext.refactoring.structure.ExtractInterfaceProcessor", "createMemberDeclarations")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.refactoring.structure.ExtractInterfaceProcessor", "createMethodComments")) .methodToReplace(new Hook("org.eclipse.jdt.internal.corext.refactoring.structure.ASTNodeSearchUtil", "getMethodDeclarationNode", "org.eclipse.jdt.core.dom.MethodDeclaration", "org.eclipse.jdt.core.IMethod", "org.eclipse.jdt.core.dom.CompilationUnit")) .replacementMethod(new Hook("lombok.eclipse.agent.PatchFixes", "getRealMethodDeclarationNode", "org.eclipse.jdt.core.dom.MethodDeclaration", "org.eclipse.jdt.core.IMethod", "org.eclipse.jdt.core.dom.CompilationUnit")) .transplant().build()); /* Do not add @Override's for generated methods */ sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget("org.eclipse.jdt.core.dom.rewrite.ListRewrite", "insertFirst")) .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "isListRewriteOnGeneratedNode", "boolean", "org.eclipse.jdt.core.dom.rewrite.ListRewrite")) .request(StackRequest.THIS) .transplant().build()); /* Do not add comments for generated methods */ sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget("org.eclipse.jdt.internal.corext.refactoring.structure.ExtractInterfaceProcessor", "createMethodComment")) .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "isGenerated", "boolean", "org.eclipse.jdt.core.dom.ASTNode")) .request(StackRequest.PARAM2) .transplant().build()); } private static void patchAboutDialog(ScriptManager sm) { /* * Add a line about lombok (+ version info) to eclipse's about dialog. * This is doable without patching, but we intentionally patch it so that presence of the lombok info * in the about dialog can be used to ascertain that patching in general is doing something. */ sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget("org.eclipse.core.internal.runtime.Product", "getProperty", "java.lang.String", "java.lang.String")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "addLombokNotesToEclipseAboutDialog", "java.lang.String", "java.lang.String", "java.lang.String")) .request(StackRequest.RETURN_VALUE, StackRequest.PARAM1) .transplant().build()); } private static void patchSyntaxAndOccurrencesHighlighting(ScriptManager sm) { /* * Skip generated nodes for "visual effects" (syntax highlighting && highlight occurrences) */ sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget("org.eclipse.jdt.internal.ui.search.OccurrencesFinder", "addUsage")) .target(new MethodTarget("org.eclipse.jdt.internal.ui.search.OccurrencesFinder", "addWrite")) .target(new MethodTarget("org.eclipse.jdt.internal.ui.javaeditor.SemanticHighlightingReconciler$PositionCollector", "visit", "boolean", "org.eclipse.jdt.core.dom.SimpleName")) .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "isGenerated", "boolean", "org.eclipse.jdt.core.dom.ASTNode")) .valueMethod(new Hook("lombok.eclipse.agent.PatchFixes", "returnFalse", "boolean", "java.lang.Object")) .request(StackRequest.PARAM1) .build()); } private static void patchDisableLombokForCodeFormatterAndCleanup(ScriptManager sm) { sm.addScript(ScriptBuilder.setSymbolDuringMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.formatter.DefaultCodeFormatter", "formatCompilationUnit")) .callToWrap(new Hook("org.eclipse.jdt.internal.core.util.CodeSnippetParsingUtil", "parseCompilationUnit", "org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration", "char[]", "java.util.Map", "boolean")) .symbol("lombok.disable") .build()); sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.ControlStatementsFix$ControlStatementFinder", "visit", "boolean", "org.eclipse.jdt.core.dom.DoStatement")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.ControlStatementsFix$ControlStatementFinder", "visit", "boolean", "org.eclipse.jdt.core.dom.EnhancedForStatement")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.ControlStatementsFix$ControlStatementFinder", "visit", "boolean", "org.eclipse.jdt.core.dom.ForStatement")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.ControlStatementsFix$ControlStatementFinder", "visit", "boolean", "org.eclipse.jdt.core.dom.IfStatement")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.ControlStatementsFix$ControlStatementFinder", "visit", "boolean", "org.eclipse.jdt.core.dom.WhileStatement")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.CodeStyleFix$ThisQualifierVisitor", "visit", "boolean", "org.eclipse.jdt.core.dom.MethodInvocation")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.CodeStyleFix$ThisQualifierVisitor", "visit", "boolean", "org.eclipse.jdt.core.dom.FieldAccess")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.CodeStyleFix$CodeStyleVisitor", "visit", "boolean", "org.eclipse.jdt.core.dom.MethodInvocation")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.CodeStyleFix$CodeStyleVisitor", "visit", "boolean", "org.eclipse.jdt.core.dom.TypeDeclaration")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.CodeStyleFix$CodeStyleVisitor", "visit", "boolean", "org.eclipse.jdt.core.dom.QualifiedName")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.fix.CodeStyleFix$CodeStyleVisitor", "visit", "boolean", "org.eclipse.jdt.core.dom.SimpleName")) // if a generated node has children we can just ignore them as well; .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "isGenerated", "boolean", "org.eclipse.jdt.core.dom.ASTNode")) .request(StackRequest.PARAM1) .valueMethod(new Hook("lombok.eclipse.agent.PatchFixes", "returnFalse", "boolean", "java.lang.Object")) .build()); } private static void patchListRewriteHandleGeneratedMethods(ScriptManager sm) { sm.addScript(ScriptBuilder.replaceMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.dom.rewrite.ASTRewriteAnalyzer$ListRewriter", "rewriteList")) .methodToReplace(new Hook("org.eclipse.jdt.internal.core.dom.rewrite.RewriteEvent", "getChildren", "org.eclipse.jdt.internal.core.dom.rewrite.RewriteEvent[]")) .replacementMethod(new Hook("lombok.eclipse.agent.PatchFixes", "listRewriteHandleGeneratedMethods", "org.eclipse.jdt.internal.core.dom.rewrite.RewriteEvent[]", "org.eclipse.jdt.internal.core.dom.rewrite.RewriteEvent")) .build()); } private static void patchSortMembersOperation(ScriptManager sm) { /* Fixes "sort members" action with @Data @Log * I would have liked to patch sortMembers, but kept getting a VerifyError: Illegal type in constant pool * So now I just patch all calling methods */ sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.SortElementsOperation$2", "visit", "boolean", "org.eclipse.jdt.core.dom.CompilationUnit")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.CompilationUnit", "types", "java.util.List")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "removeGeneratedNodes", "java.util.List", "java.util.List")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.SortElementsOperation$2", "visit", "boolean", "org.eclipse.jdt.core.dom.AnnotationTypeDeclaration")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.AnnotationTypeDeclaration", "bodyDeclarations", "java.util.List")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "removeGeneratedNodes", "java.util.List", "java.util.List")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.SortElementsOperation$2", "visit", "boolean", "org.eclipse.jdt.core.dom.AnonymousClassDeclaration")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.AnonymousClassDeclaration", "bodyDeclarations", "java.util.List")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "removeGeneratedNodes", "java.util.List", "java.util.List")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.SortElementsOperation$2", "visit", "boolean", "org.eclipse.jdt.core.dom.TypeDeclaration")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.TypeDeclaration", "bodyDeclarations", "java.util.List")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "removeGeneratedNodes", "java.util.List", "java.util.List")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.SortElementsOperation$2", "visit", "boolean", "org.eclipse.jdt.core.dom.EnumDeclaration")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.EnumDeclaration", "bodyDeclarations", "java.util.List")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "removeGeneratedNodes", "java.util.List", "java.util.List")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.SortElementsOperation$2", "visit", "boolean", "org.eclipse.jdt.core.dom.EnumDeclaration")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.EnumDeclaration", "enumConstants", "java.util.List")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "removeGeneratedNodes", "java.util.List", "java.util.List")) .transplant().build()); } private static void patchDomAstReparseIssues(ScriptManager sm) { sm.addScript(ScriptBuilder.replaceMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.dom.rewrite.ASTRewriteAnalyzer", "visit")) .methodToReplace(new Hook("org.eclipse.jdt.internal.core.dom.rewrite.TokenScanner", "getTokenEndOffset", "int", "int", "int")) .replacementMethod(new Hook("lombok.eclipse.agent.PatchFixes", "getTokenEndOffsetFixed", "int", "org.eclipse.jdt.internal.core.dom.rewrite.TokenScanner", "int", "int", "java.lang.Object")) .requestExtra(StackRequest.PARAM1) .transplant() .build()); } private static void patchPostCompileHookEclipse(ScriptManager sm) { sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.core.builder.IncrementalImageBuilder", "writeClassFileContents")) .target(new MethodTarget("org.eclipse.jdt.internal.core.builder.AbstractImageBuilder", "writeClassFileContents")) .methodToWrap(new Hook("org.eclipse.jdt.internal.compiler.ClassFile", "getBytes", "byte[]")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "runPostCompiler", "byte[]", "byte[]", "java.lang.String")) .requestExtra(StackRequest.PARAM3) .build()); } private static void patchPostCompileHookEcj(ScriptManager sm) { sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.compiler.tool.EclipseCompilerImpl", "outputClassFiles")) .methodToWrap(new Hook("javax.tools.JavaFileObject", "openOutputStream", "java.io.OutputStream")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "runPostCompiler", "java.io.OutputStream", "java.io.OutputStream")) .transplant() .build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.compiler.util.Util", "writeToDisk")) .methodToWrap(new Hook("java.io.BufferedOutputStream", "<init>", "void", "java.io.OutputStream", "int")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "runPostCompiler", "java.io.BufferedOutputStream", "java.io.BufferedOutputStream", "java.lang.String", "java.lang.String")) .requestExtra(StackRequest.PARAM2, StackRequest.PARAM3) .transplant() .build()); } private static void patchHideGeneratedNodes(ScriptManager sm) { sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget("org.eclipse.jdt.internal.corext.dom.LinkedNodeFinder", "findByNode")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.dom.LinkedNodeFinder", "findByBinding")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "removeGeneratedSimpleNames", "org.eclipse.jdt.core.dom.SimpleName[]", "org.eclipse.jdt.core.dom.SimpleName[]")) .request(StackRequest.RETURN_VALUE).build()); patchRefactorScripts(sm); patchFormatters(sm); } private static void patchFormatters(ScriptManager sm) { sm.addScript(ScriptBuilder.setSymbolDuringMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.ui.text.java.JavaFormattingStrategy", "format", "void")) .callToWrap(new Hook("org.eclipse.jdt.internal.corext.util.CodeFormatterUtil", "reformat", "org.eclipse.text.edits.TextEdit", "int", "java.lang.String", "int", "int", "int", "java.lang.String", "java.util.Map")) .symbol("lombok.disable").build()); } private static void patchRefactorScripts(ScriptManager sm) { sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget("org.eclipse.jdt.core.dom.rewrite.ASTRewrite", "replace")) .target(new MethodTarget("org.eclipse.jdt.core.dom.rewrite.ASTRewrite", "remove")) .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "skipRewritingGeneratedNodes", "boolean", "org.eclipse.jdt.core.dom.ASTNode")) .transplant().request(StackRequest.PARAM1).build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.internal.corext.refactoring.rename.RenameTypeProcessor", "addConstructorRenames")) .methodToWrap(new Hook("org.eclipse.jdt.core.IType", "getMethods", "org.eclipse.jdt.core.IMethod[]")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "removeGeneratedMethods", "org.eclipse.jdt.core.IMethod[]", "org.eclipse.jdt.core.IMethod[]")) .transplant().build()); sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget("org.eclipse.jdt.internal.corext.refactoring.rename.TempOccurrenceAnalyzer", "visit", "boolean", "org.eclipse.jdt.core.dom.SimpleName")) .target(new MethodTarget("org.eclipse.jdt.internal.corext.refactoring.rename.RenameAnalyzeUtil$ProblemNodeFinder$NameNodeVisitor", "visit", "boolean", "org.eclipse.jdt.core.dom.SimpleName")) .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "isGenerated", "boolean", "org.eclipse.jdt.core.dom.ASTNode")) .valueMethod(new Hook("lombok.eclipse.agent.PatchFixes", "returnTrue", "boolean", "java.lang.Object")) .request(StackRequest.PARAM1) .transplant().build()); } private static void patchCatchReparse(ScriptManager sm) { sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "retrieveStartingCatchPosition")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "fixRetrieveStartingCatchPosition", "int", "int", "int")) .transplant().request(StackRequest.RETURN_VALUE, StackRequest.PARAM1).build()); } private static void patchIdentifierEndReparse(ScriptManager sm) { sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "retrieveIdentifierEndPosition")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "fixRetrieveIdentifierEndPosition", "int", "int", "int")) .transplant().request(StackRequest.RETURN_VALUE, StackRequest.PARAM2).build()); } private static void patchRetrieveEllipsisStartPosition(ScriptManager sm) { sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "retrieveEllipsisStartPosition")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "fixRetrieveEllipsisStartPosition", "int", "int", "int")) .transplant().request(StackRequest.RETURN_VALUE, StackRequest.PARAM2).build()); } private static void patchRetrieveRightBraceOrSemiColonPosition(ScriptManager sm) { sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "retrieveRightBraceOrSemiColonPosition")) .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "retrieveRightBrace")) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "fixRetrieveRightBraceOrSemiColonPosition", "int", "int", "int")) .transplant().request(StackRequest.RETURN_VALUE, StackRequest.PARAM2).build()); } private static void patchSetGeneratedFlag(ScriptManager sm) { sm.addScript(ScriptBuilder.addField() .targetClass("org.eclipse.jdt.internal.compiler.ast.ASTNode") .fieldName("$generatedBy") .fieldType("Lorg/eclipse/jdt/internal/compiler/ast/ASTNode;") .setPublic().setTransient().build()); sm.addScript(ScriptBuilder.addField() .targetClass("org.eclipse.jdt.core.dom.ASTNode") .fieldName("$isGenerated").fieldType("Z") .setPublic().setTransient().build()); sm.addScript(ScriptBuilder.wrapReturnValue() .target(new TargetMatcher() { @Override public boolean matches(String classSpec, String methodName, String descriptor) { if (!"convert".equals(methodName)) return false; List<String> fullDesc = MethodTarget.decomposeFullDesc(descriptor); if ("V".equals(fullDesc.get(0))) return false; if (fullDesc.size() < 2) return false; if (!fullDesc.get(1).startsWith("Lorg/eclipse/jdt/internal/compiler/ast/")) return false; return true; } @Override public Collection<String> getAffectedClasses() { return Collections.singleton("org.eclipse.jdt.core.dom.ASTConverter"); } }).request(StackRequest.PARAM1, StackRequest.RETURN_VALUE) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlag", "void", "org.eclipse.jdt.core.dom.ASTNode", "org.eclipse.jdt.internal.compiler.ast.ASTNode")) .transplant().build()); sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convert", "org.eclipse.jdt.core.dom.ASTNode", "boolean", "org.eclipse.jdt.internal.compiler.ast.AbstractMethodDeclaration")) .request(StackRequest.PARAM2, StackRequest.RETURN_VALUE) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlag", "void", "org.eclipse.jdt.core.dom.ASTNode", "org.eclipse.jdt.internal.compiler.ast.ASTNode")) .transplant().build()); sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convertToFieldDeclaration", "org.eclipse.jdt.core.dom.FieldDeclaration", "org.eclipse.jdt.internal.compiler.ast.FieldDeclaration")) /* Targets beneath are only patched because the resulting dom nodes should be marked if generated. * However I couldn't find a usecase where these were actually used */ .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convertToType", "org.eclipse.jdt.core.dom.Type", "org.eclipse.jdt.internal.compiler.ast.NameReference")) .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convertType", "org.eclipse.jdt.core.dom.Type", "org.eclipse.jdt.internal.compiler.ast.TypeReference")) .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convertToVariableDeclarationExpression", "org.eclipse.jdt.core.dom.VariableDeclarationExpression", "org.eclipse.jdt.internal.compiler.ast.LocalDeclaration")) .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convertToSingleVariableDeclaration", "org.eclipse.jdt.core.dom.SingleVariableDeclaration", "org.eclipse.jdt.internal.compiler.ast.LocalDeclaration")) .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convertToVariableDeclarationFragment", "org.eclipse.jdt.core.dom.VariableDeclarationFragment", "org.eclipse.jdt.internal.compiler.ast.FieldDeclaration")) .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convertToVariableDeclarationFragment", "org.eclipse.jdt.core.dom.VariableDeclarationFragment", "org.eclipse.jdt.internal.compiler.ast.LocalDeclaration")) .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convertToVariableDeclarationStatement", "org.eclipse.jdt.core.dom.VariableDeclarationStatement", "org.eclipse.jdt.internal.compiler.ast.LocalDeclaration")) /* Targets above are only patched because the resulting dom nodes should be marked if generated. */ .request(StackRequest.PARAM1, StackRequest.RETURN_VALUE) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlag", "void", "org.eclipse.jdt.core.dom.ASTNode", "org.eclipse.jdt.internal.compiler.ast.ASTNode")) .transplant().build()); /* Set generated flag for SimpleNames */ sm.addScript(ScriptBuilder.wrapMethodCall() .target(new TargetMatcher() { @Override public boolean matches(String classSpec, String methodName, String descriptor) { if (!methodName.startsWith("convert")) return false; List<String> fullDesc = MethodTarget.decomposeFullDesc(descriptor); if (fullDesc.size() < 2) return false; if (!fullDesc.get(1).startsWith("Lorg/eclipse/jdt/internal/compiler/ast/")) return false; return true; } @Override public Collection<String> getAffectedClasses() { return Collections.singleton("org.eclipse.jdt.core.dom.ASTConverter"); } }).methodToWrap(new Hook("org.eclipse.jdt.core.dom.SimpleName", "<init>", "void", "org.eclipse.jdt.core.dom.AST")) .requestExtra(StackRequest.PARAM1) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlagForName", "void", "org.eclipse.jdt.core.dom.Name", "java.lang.Object")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "convert", "org.eclipse.jdt.core.dom.ASTNode", "boolean", "org.eclipse.jdt.internal.compiler.ast.AbstractMethodDeclaration")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.SimpleName", "<init>", "void", "org.eclipse.jdt.core.dom.AST")) .requestExtra(StackRequest.PARAM2) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlagForName", "void", "org.eclipse.jdt.core.dom.Name", "java.lang.Object")) .transplant().build()); /* Set generated flag for QualifiedNames */ sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "setQualifiedNameNameAndSourceRanges", "org.eclipse.jdt.core.dom.QualifiedName", "char[][]", "long[]", "int", "org.eclipse.jdt.internal.compiler.ast.ASTNode")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.SimpleName", "<init>", "void", "org.eclipse.jdt.core.dom.AST")) .requestExtra(StackRequest.PARAM4) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlagForName", "void", "org.eclipse.jdt.core.dom.Name", "java.lang.Object")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "setQualifiedNameNameAndSourceRanges", "org.eclipse.jdt.core.dom.QualifiedName", "char[][]", "long[]", "int", "org.eclipse.jdt.internal.compiler.ast.ASTNode")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.QualifiedName", "<init>", "void", "org.eclipse.jdt.core.dom.AST")) .requestExtra(StackRequest.PARAM4) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlagForName", "void", "org.eclipse.jdt.core.dom.Name", "java.lang.Object")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "setQualifiedNameNameAndSourceRanges", "org.eclipse.jdt.core.dom.QualifiedName", "char[][]", "long[]", "org.eclipse.jdt.internal.compiler.ast.ASTNode")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.SimpleName", "<init>", "void", "org.eclipse.jdt.core.dom.AST")) .requestExtra(StackRequest.PARAM3) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlagForName", "void", "org.eclipse.jdt.core.dom.Name", "java.lang.Object")) .transplant().build()); sm.addScript(ScriptBuilder.wrapMethodCall() .target(new MethodTarget("org.eclipse.jdt.core.dom.ASTConverter", "setQualifiedNameNameAndSourceRanges", "org.eclipse.jdt.core.dom.QualifiedName", "char[][]", "long[]", "org.eclipse.jdt.internal.compiler.ast.ASTNode")) .methodToWrap(new Hook("org.eclipse.jdt.core.dom.QualifiedName", "<init>", "void", "org.eclipse.jdt.core.dom.AST")) .requestExtra(StackRequest.PARAM3) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "setIsGeneratedFlagForName", "void", "org.eclipse.jdt.core.dom.Name", "java.lang.Object")) .transplant().build()); } private static void patchAvoidReparsingGeneratedCode(ScriptManager sm) { final String PARSER_SIG = "org.eclipse.jdt.internal.compiler.parser.Parser"; sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget(PARSER_SIG, "parse", "void", "org.eclipse.jdt.internal.compiler.ast.MethodDeclaration", "org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration")) .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "checkBit24", "boolean", "java.lang.Object")) .transplant() .request(StackRequest.PARAM1).build()); sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget(PARSER_SIG, "parse", "void", "org.eclipse.jdt.internal.compiler.ast.ConstructorDeclaration", "org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration", "boolean")) .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "checkBit24", "boolean", "java.lang.Object")) .transplant() .request(StackRequest.PARAM1).build()); sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget(PARSER_SIG, "parse", "void", "org.eclipse.jdt.internal.compiler.ast.Initializer", "org.eclipse.jdt.internal.compiler.ast.TypeDeclaration", "org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration")) .decisionMethod(new Hook("lombok.eclipse.agent.PatchFixes", "checkBit24", "boolean", "java.lang.Object")) .transplant() .request(StackRequest.PARAM1).build()); } private static void patchLombokizeAST(ScriptManager sm) { sm.addScript(ScriptBuilder.addField() .targetClass("org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration") .fieldName("$lombokAST").fieldType("Ljava/lang/Object;") .setPublic().setTransient().build()); final String PARSER_SIG = "org.eclipse.jdt.internal.compiler.parser.Parser"; final String CUD_SIG = "org.eclipse.jdt.internal.compiler.ast.CompilationUnitDeclaration"; sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget(PARSER_SIG, "getMethodBodies", "void", CUD_SIG)) .wrapMethod(new Hook("lombok.eclipse.TransformEclipseAST", "transform", "void", PARSER_SIG, CUD_SIG)) .request(StackRequest.THIS, StackRequest.PARAM1).build()); sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget(PARSER_SIG, "endParse", CUD_SIG, "int")) .wrapMethod(new Hook("lombok.eclipse.TransformEclipseAST", "transform_swapped", "void", CUD_SIG, PARSER_SIG)) .request(StackRequest.THIS, StackRequest.RETURN_VALUE).build()); } private static void patchEcjTransformers(ScriptManager sm, boolean ecj) { addPatchesForDelegate(sm, ecj); addPatchesForVal(sm); if (!ecj) addPatchesForValEclipse(sm); } private static void addPatchesForDelegate(ScriptManager sm, boolean ecj) { final String CLASSSCOPE_SIG = "org.eclipse.jdt.internal.compiler.lookup.ClassScope"; sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget(CLASSSCOPE_SIG, "buildFieldsAndMethods", "void")) .request(StackRequest.THIS) .decisionMethod(new Hook("lombok.eclipse.agent.PatchDelegatePortal", "handleDelegateForType", "boolean", "java.lang.Object")) .build()); } private static void addPatchesForValEclipse(ScriptManager sm) { final String LOCALDECLARATION_SIG = "org.eclipse.jdt.internal.compiler.ast.LocalDeclaration"; final String PARSER_SIG = "org.eclipse.jdt.internal.compiler.parser.Parser"; final String VARIABLEDECLARATIONSTATEMENT_SIG = "org.eclipse.jdt.core.dom.VariableDeclarationStatement"; final String SINGLEVARIABLEDECLARATION_SIG = "org.eclipse.jdt.core.dom.SingleVariableDeclaration"; final String ASTCONVERTER_SIG = "org.eclipse.jdt.core.dom.ASTConverter"; sm.addScript(ScriptBuilder.addField() .fieldName("$initCopy") .fieldType("Lorg/eclipse/jdt/internal/compiler/ast/ASTNode;") .setPublic() .setTransient() .targetClass("org.eclipse.jdt.internal.compiler.ast.LocalDeclaration") .build()); sm.addScript(ScriptBuilder.addField() .fieldName("$iterableCopy") .fieldType("Lorg/eclipse/jdt/internal/compiler/ast/ASTNode;") .setPublic() .setTransient() .targetClass("org.eclipse.jdt.internal.compiler.ast.LocalDeclaration") .build()); sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget(PARSER_SIG, "consumeExitVariableWithInitialization", "void")) .request(StackRequest.THIS) .wrapMethod(new Hook("lombok.eclipse.agent.PatchValEclipsePortal", "copyInitializationOfLocalDeclaration", "void", "java.lang.Object")) .build()); sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget(PARSER_SIG, "consumeEnhancedForStatementHeader", "void")) .request(StackRequest.THIS) .wrapMethod(new Hook("lombok.eclipse.agent.PatchValEclipsePortal", "copyInitializationOfForEachIterable", "void", "java.lang.Object")) .build()); sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget(ASTCONVERTER_SIG, "setModifiers", "void", VARIABLEDECLARATIONSTATEMENT_SIG, LOCALDECLARATION_SIG)) .wrapMethod(new Hook("lombok.eclipse.agent.PatchValEclipsePortal", "addFinalAndValAnnotationToVariableDeclarationStatement", "void", "java.lang.Object", "java.lang.Object", "java.lang.Object")) .request(StackRequest.THIS, StackRequest.PARAM1, StackRequest.PARAM2).build()); sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget(ASTCONVERTER_SIG, "setModifiers", "void", SINGLEVARIABLEDECLARATION_SIG, LOCALDECLARATION_SIG)) .wrapMethod(new Hook("lombok.eclipse.agent.PatchValEclipsePortal", "addFinalAndValAnnotationToSingleVariableDeclaration", "void", "java.lang.Object", "java.lang.Object", "java.lang.Object")) .request(StackRequest.THIS, StackRequest.PARAM1, StackRequest.PARAM2).build()); } private static void addPatchesForVal(ScriptManager sm) { final String LOCALDECLARATION_SIG = "org.eclipse.jdt.internal.compiler.ast.LocalDeclaration"; final String FOREACHSTATEMENT_SIG = "org.eclipse.jdt.internal.compiler.ast.ForeachStatement"; final String EXPRESSION_SIG = "org.eclipse.jdt.internal.compiler.ast.Expression"; final String BLOCKSCOPE_SIG = "org.eclipse.jdt.internal.compiler.lookup.BlockScope"; final String TYPEBINDING_SIG = "org.eclipse.jdt.internal.compiler.lookup.TypeBinding"; sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget(LOCALDECLARATION_SIG, "resolve", "void", BLOCKSCOPE_SIG)) .request(StackRequest.THIS, StackRequest.PARAM1) .decisionMethod(new Hook("lombok.eclipse.agent.PatchVal", "handleValForLocalDeclaration", "boolean", LOCALDECLARATION_SIG, BLOCKSCOPE_SIG)) .build()); sm.addScript(ScriptBuilder.replaceMethodCall() .target(new MethodTarget(LOCALDECLARATION_SIG, "resolve", "void", BLOCKSCOPE_SIG)) .methodToReplace(new Hook(EXPRESSION_SIG, "resolveType", TYPEBINDING_SIG, BLOCKSCOPE_SIG)) .requestExtra(StackRequest.THIS) .replacementMethod(new Hook("lombok.eclipse.agent.PatchVal", "skipResolveInitializerIfAlreadyCalled2", TYPEBINDING_SIG, EXPRESSION_SIG, BLOCKSCOPE_SIG, LOCALDECLARATION_SIG)) .build()); sm.addScript(ScriptBuilder.replaceMethodCall() .target(new MethodTarget(FOREACHSTATEMENT_SIG, "resolve", "void", BLOCKSCOPE_SIG)) .methodToReplace(new Hook(EXPRESSION_SIG, "resolveType", TYPEBINDING_SIG, BLOCKSCOPE_SIG)) .replacementMethod(new Hook("lombok.eclipse.agent.PatchVal", "skipResolveInitializerIfAlreadyCalled", TYPEBINDING_SIG, EXPRESSION_SIG, BLOCKSCOPE_SIG)) .build()); sm.addScript(ScriptBuilder.exitEarly() .target(new MethodTarget(FOREACHSTATEMENT_SIG, "resolve", "void", BLOCKSCOPE_SIG)) .request(StackRequest.THIS, StackRequest.PARAM1) .decisionMethod(new Hook("lombok.eclipse.agent.PatchVal", "handleValForForEach", "boolean", FOREACHSTATEMENT_SIG, BLOCKSCOPE_SIG)) .build()); } private static void patchFixSourceTypeConverter(ScriptManager sm) { final String SOURCE_TYPE_CONVERTER_SIG = "org.eclipse.jdt.internal.compiler.parser.SourceTypeConverter"; final String I_ANNOTATABLE_SIG = "org.eclipse.jdt.core.IAnnotatable"; final String ANNOTATION_SIG = "org.eclipse.jdt.internal.compiler.ast.Annotation"; sm.addScript(ScriptBuilder.wrapReturnValue() .target(new MethodTarget(SOURCE_TYPE_CONVERTER_SIG, "convertAnnotations", ANNOTATION_SIG + "[]", I_ANNOTATABLE_SIG)) .wrapMethod(new Hook("lombok.eclipse.agent.PatchFixes", "convertAnnotations", ANNOTATION_SIG + "[]", ANNOTATION_SIG + "[]", I_ANNOTATABLE_SIG)) .request(StackRequest.PARAM1, StackRequest.RETURN_VALUE).build()); } private static void patchEclipseDebugPatches(ScriptManager sm) { final String ASTNODE_SIG = "org.eclipse.jdt.core.dom.ASTNode"; final String PATCH_DEBUG = "lombok.eclipse.agent.PatchDiagnostics"; sm.addScript(exitEarly() .target(new MethodTarget(ASTNODE_SIG, "setSourceRange", "void", "int", "int")) .request(StackRequest.THIS) .request(StackRequest.PARAM1) .request(StackRequest.PARAM2) .decisionMethod(new Hook(PATCH_DEBUG, "setSourceRangeCheck", "boolean", "java.lang.Object", "int", "int")) .build()); } private static void patchExtensionMethod(ScriptManager sm, boolean ecj) { final String PATCH_EXTENSIONMETHOD = "lombok.eclipse.agent.PatchExtensionMethod"; final String PATCH_EXTENSIONMETHOD_COMPLETIONPROPOSAL_PORTAL = "lombok.eclipse.agent.PatchExtensionMethodCompletionProposalPortal"; final String MESSAGE_SEND_SIG = "org.eclipse.jdt.internal.compiler.ast.MessageSend"; final String TYPE_BINDING_SIG = "org.eclipse.jdt.internal.compiler.lookup.TypeBinding"; final String BLOCK_SCOPE_SIG = "org.eclipse.jdt.internal.compiler.lookup.BlockScope"; final String TYPE_BINDINGS_SIG = "org.eclipse.jdt.internal.compiler.lookup.TypeBinding[]"; final String PROBLEM_REPORTER_SIG = "org.eclipse.jdt.internal.compiler.problem.ProblemReporter"; final String METHOD_BINDING_SIG = "org.eclipse.jdt.internal.compiler.lookup.MethodBinding"; final String COMPLETION_PROPOSAL_COLLECTOR_SIG = "org.eclipse.jdt.ui.text.java.CompletionProposalCollector"; final String I_JAVA_COMPLETION_PROPOSAL_SIG = "org.eclipse.jdt.ui.text.java.IJavaCompletionProposal[]"; sm.addScript(wrapReturnValue() .target(new MethodTarget(MESSAGE_SEND_SIG, "resolveType", TYPE_BINDING_SIG, BLOCK_SCOPE_SIG)) .request(StackRequest.RETURN_VALUE) .request(StackRequest.THIS) .request(StackRequest.PARAM1) .wrapMethod(new Hook(PATCH_EXTENSIONMETHOD, "resolveType", TYPE_BINDING_SIG, TYPE_BINDING_SIG, MESSAGE_SEND_SIG, BLOCK_SCOPE_SIG)) .build()); sm.addScript(replaceMethodCall() .target(new MethodTarget(MESSAGE_SEND_SIG, "resolveType", TYPE_BINDING_SIG, BLOCK_SCOPE_SIG)) .methodToReplace(new Hook(PROBLEM_REPORTER_SIG, "errorNoMethodFor", "void", MESSAGE_SEND_SIG, TYPE_BINDING_SIG, TYPE_BINDINGS_SIG)) .replacementMethod(new Hook(PATCH_EXTENSIONMETHOD, "errorNoMethodFor", "void", PROBLEM_REPORTER_SIG, MESSAGE_SEND_SIG, TYPE_BINDING_SIG, TYPE_BINDINGS_SIG)) .build()); sm.addScript(replaceMethodCall() .target(new MethodTarget(MESSAGE_SEND_SIG, "resolveType", TYPE_BINDING_SIG, BLOCK_SCOPE_SIG)) .methodToReplace(new Hook(PROBLEM_REPORTER_SIG, "invalidMethod", "void", MESSAGE_SEND_SIG, METHOD_BINDING_SIG)) .replacementMethod(new Hook(PATCH_EXTENSIONMETHOD, "invalidMethod", "void", PROBLEM_REPORTER_SIG, MESSAGE_SEND_SIG, METHOD_BINDING_SIG)) .build()); if (!ecj) { sm.addScript(wrapReturnValue() .target(new MethodTarget(COMPLETION_PROPOSAL_COLLECTOR_SIG, "getJavaCompletionProposals", I_JAVA_COMPLETION_PROPOSAL_SIG)) .request(StackRequest.RETURN_VALUE) .request(StackRequest.THIS) .wrapMethod(new Hook(PATCH_EXTENSIONMETHOD_COMPLETIONPROPOSAL_PORTAL, "getJavaCompletionProposals", I_JAVA_COMPLETION_PROPOSAL_SIG, "java.lang.Object[]", "java.lang.Object")) .build()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * GYear.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: SNAPSHOT Built on : Dec 21, 2007 (04:03:30 LKT) */ package org.apache.axis2.databinding.types.soapencoding; import javax.xml.stream.XMLStreamWriter; /** * GYear bean class */ public class GYear implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = gYear Namespace URI = http://schemas.xmlsoap.org/soap/encoding/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://schemas.xmlsoap.org/soap/encoding/")){ return "SOAP-ENC"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for GYear */ protected org.apache.axis2.databinding.types.Year localGYear ; /** * Auto generated getter method * @return org.apache.axis2.databinding.types.Year */ public org.apache.axis2.databinding.types.Year getGYear(){ return localGYear; } /** * Auto generated setter method * @param param GYear */ public void setGYear(org.apache.axis2.databinding.types.Year param){ this.localGYear=param; } public java.lang.String toString(){ return localGYear.toString(); } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName); return factory.createOMElement(dataSource,parentQName); } public void serialize(final javax.xml.namespace.QName parentQName, XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, XMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://schemas.xmlsoap.org/soap/encoding/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":gYear", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "gYear", xmlWriter); } } if (localGYear==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("gYear cannot be null!!"); }else{ xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localGYear)); } xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(org.apache.axis2.databinding.utils.reader.ADBXMLStreamReader.ELEMENT_TEXT); if (localGYear != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localGYear)); } else { throw new org.apache.axis2.databinding.ADBException("gYear cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ public static GYear fromString(java.lang.String value, java.lang.String namespaceURI){ GYear returnValue = new GYear(); returnValue.setGYear( org.apache.axis2.databinding.utils.ConverterUtil.convertToGYear(value)); return returnValue; } public static GYear fromString(javax.xml.stream.XMLStreamReader xmlStreamReader, java.lang.String content) { if (content.indexOf(":") > -1){ java.lang.String prefix = content.substring(0,content.indexOf(":")); java.lang.String namespaceUri = xmlStreamReader.getNamespaceContext().getNamespaceURI(prefix); return GYear.Factory.fromString(content,namespaceUri); } else { return GYear.Factory.fromString(content,""); } } /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static GYear parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ GYear object = new GYear(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"gYear".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (GYear)org.apache.axis2.databinding.types.soapencoding.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); while(!reader.isEndElement()) { if (reader.isStartElement() || reader.hasText()){ if (reader.isStartElement() || reader.hasText()){ java.lang.String content = reader.getElementText(); object.setGYear( org.apache.axis2.databinding.utils.ConverterUtil.convertToGYear(content)); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } } else { reader.next(); } } // end of while loop } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.history.parser.datamodel; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.LinkedHashMultimap; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.collect.Ordering; import org.apache.commons.collections.BidiMap; import org.apache.commons.collections.bidimap.DualHashBidiMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.StringInterner; import org.apache.tez.client.CallerContext; import org.apache.tez.dag.api.event.VertexState; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import static org.apache.hadoop.classification.InterfaceAudience.Public; import static org.apache.hadoop.classification.InterfaceStability.Evolving; @Public @Evolving public class DagInfo extends BaseInfo { private static final Log LOG = LogFactory.getLog(DagInfo.class); //Fields populated via JSON private final String name; private final long startTime; private final long endTime; private final long submitTime; private final int failedTasks; private final String dagId; private final int numVertices; private final String status; private final String diagnostics; private VersionInfo versionInfo; private CallerContext callerContext; //VertexID --> VertexName & vice versa private final BidiMap vertexNameIDMapping; //edgeId to EdgeInfo mapping private final Map<Integer, EdgeInfo> edgeInfoMap; //Only for internal parsing (vertexname mapping) private Map<String, BasicVertexInfo> basicVertexInfoMap; //VertexName --> VertexInfo private Map<String, VertexInfo> vertexNameMap; private Multimap<Container, TaskAttemptInfo> containerMapping; DagInfo(JSONObject jsonObject) throws JSONException { super(jsonObject); vertexNameMap = Maps.newHashMap(); vertexNameIDMapping = new DualHashBidiMap(); edgeInfoMap = Maps.newHashMap(); basicVertexInfoMap = Maps.newHashMap(); containerMapping = LinkedHashMultimap.create(); Preconditions.checkArgument(jsonObject.getString(Constants.ENTITY_TYPE).equalsIgnoreCase (Constants.TEZ_DAG_ID)); dagId = StringInterner.weakIntern(jsonObject.getString(Constants.ENTITY)); //Parse additional Info JSONObject otherInfoNode = jsonObject.getJSONObject(Constants.OTHER_INFO); startTime = otherInfoNode.optLong(Constants.START_TIME); endTime = otherInfoNode.optLong(Constants.FINISH_TIME); //TODO: Not getting populated correctly for lots of jobs. Verify submitTime = otherInfoNode.optLong(Constants.START_REQUESTED_TIME); diagnostics = otherInfoNode.optString(Constants.DIAGNOSTICS); failedTasks = otherInfoNode.optInt(Constants.NUM_FAILED_TASKS); JSONObject dagPlan = otherInfoNode.optJSONObject(Constants.DAG_PLAN); name = StringInterner.weakIntern((dagPlan != null) ? (dagPlan.optString(Constants.DAG_NAME)) : null); if (dagPlan != null) { JSONArray vertices = dagPlan.optJSONArray(Constants.VERTICES); if (vertices != null) { numVertices = vertices.length(); } else { numVertices = 0; } parseDAGPlan(dagPlan); } else { numVertices = 0; } status = StringInterner.weakIntern(otherInfoNode.optString(Constants.STATUS)); //parse name id mapping JSONObject vertexIDMappingJson = otherInfoNode.optJSONObject(Constants.VERTEX_NAME_ID_MAPPING); if (vertexIDMappingJson != null) { //get vertex name for (Map.Entry<String, BasicVertexInfo> entry : basicVertexInfoMap.entrySet()) { String vertexId = vertexIDMappingJson.optString(entry.getKey()); //vertexName --> vertexId vertexNameIDMapping.put(entry.getKey(), vertexId); } } } public static DagInfo create(JSONObject jsonObject) throws JSONException { DagInfo dagInfo = new DagInfo(jsonObject); return dagInfo; } private void parseDAGPlan(JSONObject dagPlan) throws JSONException { int version = dagPlan.optInt(Constants.VERSION, 1); parseEdges(dagPlan.optJSONArray(Constants.EDGES)); JSONArray verticesInfo = dagPlan.optJSONArray(Constants.VERTICES); parseBasicVertexInfo(verticesInfo); if (version > 1) { parseDAGContext(dagPlan.optJSONObject(Constants.DAG_CONTEXT)); } } private void parseDAGContext(JSONObject callerContextInfo) { if (callerContextInfo == null) { LOG.info("No DAG Caller Context available"); return; } String context = callerContextInfo.optString(Constants.CONTEXT); String callerId = callerContextInfo.optString(Constants.CALLER_ID); String callerType = callerContextInfo.optString(Constants.CALLER_TYPE); String description = callerContextInfo.optString(Constants.DESCRIPTION); this.callerContext = CallerContext.create(context, description); if (callerId != null && !callerId.isEmpty() && callerType != null && !callerType.isEmpty()) { this.callerContext.setCallerIdAndType(callerId, callerType); } else { LOG.info("No DAG Caller Context Id and Type available"); } } private void parseBasicVertexInfo(JSONArray verticesInfo) throws JSONException { if (verticesInfo == null) { LOG.info("No vertices available."); return; } //Parse basic information available in DAG for vertex and edges for (int i = 0; i < verticesInfo.length(); i++) { BasicVertexInfo basicVertexInfo = new BasicVertexInfo(); JSONObject vJson = verticesInfo.getJSONObject(i); basicVertexInfo.vertexName = vJson.optString(Constants.VERTEX_NAME); JSONArray inEdges = vJson.optJSONArray(Constants.IN_EDGE_IDS); if (inEdges != null) { String[] inEdgeIds = new String[inEdges.length()]; for (int j = 0; j < inEdges.length(); j++) { inEdgeIds[j] = inEdges.get(j).toString(); } basicVertexInfo.inEdgeIds = inEdgeIds; } JSONArray outEdges = vJson.optJSONArray(Constants.OUT_EDGE_IDS); if (outEdges != null) { String[] outEdgeIds = new String[outEdges.length()]; for (int j = 0; j < outEdges.length(); j++) { outEdgeIds[j] = outEdges.get(j).toString(); } basicVertexInfo.outEdgeIds = outEdgeIds; } JSONArray addInputsJson = vJson.optJSONArray(Constants.ADDITIONAL_INPUTS); basicVertexInfo.additionalInputs = parseAdditionalDetailsForVertex(addInputsJson); JSONArray addOutputsJson = vJson.optJSONArray(Constants.ADDITIONAL_OUTPUTS); basicVertexInfo.additionalOutputs = parseAdditionalDetailsForVertex(addOutputsJson); basicVertexInfoMap.put(basicVertexInfo.vertexName, basicVertexInfo); } } /** * get additional details available for every vertex in the dag * * @param jsonArray * @return AdditionalInputOutputDetails[] * @throws JSONException */ private AdditionalInputOutputDetails[] parseAdditionalDetailsForVertex(JSONArray jsonArray) throws JSONException { if (jsonArray != null) { AdditionalInputOutputDetails[] additionalInputOutputDetails = new AdditionalInputOutputDetails[jsonArray.length()]; for (int j = 0; j < jsonArray.length(); j++) { String name = jsonArray.getJSONObject(j).optString( Constants.NAME); String clazz = jsonArray.getJSONObject(j).optString( Constants.CLASS); String initializer = jsonArray.getJSONObject(j).optString(Constants.INITIALIZER); String userPayloadText = jsonArray.getJSONObject(j).optString( Constants.USER_PAYLOAD_TEXT); additionalInputOutputDetails[j] = new AdditionalInputOutputDetails(name, clazz, initializer, userPayloadText); } return additionalInputOutputDetails; } return null; } /** * Parse edge details in the DAG * * @param edgesArray * * @throws JSONException */ private void parseEdges(JSONArray edgesArray) throws JSONException { if (edgesArray == null) { return; } for (int i = 0; i < edgesArray.length(); i++) { JSONObject edge = edgesArray.getJSONObject(i); Integer edgeId = edge.optInt(Constants.EDGE_ID); String inputVertexName = edge.optString(Constants.INPUT_VERTEX_NAME); String outputVertexName = edge.optString(Constants.OUTPUT_VERTEX_NAME); String dataMovementType = edge.optString(Constants.DATA_MOVEMENT_TYPE); String edgeSourceClass = edge.optString(Constants.EDGE_SOURCE_CLASS); String edgeDestinationClass = edge.optString(Constants.EDGE_DESTINATION_CLASS); String inputUserPayloadAsText = edge.optString(Constants.INPUT_PAYLOAD_TEXT); String outputUserPayloadAsText = edge.optString(Constants.OUTPUT_PAYLOAD_TEXT); EdgeInfo edgeInfo = new EdgeInfo(inputVertexName, outputVertexName, dataMovementType, edgeSourceClass, edgeDestinationClass, inputUserPayloadAsText, outputUserPayloadAsText); edgeInfoMap.put(edgeId, edgeInfo); } } static class BasicVertexInfo { String vertexName; String[] inEdgeIds; String[] outEdgeIds; AdditionalInputOutputDetails[] additionalInputs; AdditionalInputOutputDetails[] additionalOutputs; } void addVertexInfo(VertexInfo vertexInfo) { BasicVertexInfo basicVertexInfo = basicVertexInfoMap.get(vertexInfo.getVertexName()); Preconditions.checkArgument(basicVertexInfo != null, "VerteName " + vertexInfo.getVertexName() + " not present in DAG's vertices " + basicVertexInfoMap.entrySet()); //populate additional information in VertexInfo if (basicVertexInfo.additionalInputs != null) { vertexInfo.setAdditionalInputInfoList(Arrays.asList(basicVertexInfo.additionalInputs)); } if (basicVertexInfo.additionalOutputs != null) { vertexInfo.setAdditionalOutputInfoList(Arrays.asList(basicVertexInfo.additionalOutputs)); } //Populate edge information in vertex if (basicVertexInfo.inEdgeIds != null) { for (String edge : basicVertexInfo.inEdgeIds) { EdgeInfo edgeInfo = edgeInfoMap.get(Integer.parseInt(edge)); Preconditions.checkState(edgeInfo != null, "EdgeId " + edge + " not present in DAG"); vertexInfo.addInEdge(edgeInfo); } } if (basicVertexInfo.outEdgeIds != null) { for (String edge : basicVertexInfo.outEdgeIds) { EdgeInfo edgeInfo = edgeInfoMap.get(Integer.parseInt(edge)); Preconditions.checkState(edgeInfo != null, "EdgeId " + edge + " not present in DAG"); vertexInfo.addOutEdge(edgeInfo); } } vertexNameMap.put(vertexInfo.getVertexName(), vertexInfo); } void setVersionInfo(VersionInfo versionInfo) { this.versionInfo = versionInfo; } void addContainerMapping(Container container, TaskAttemptInfo taskAttemptInfo) { this.containerMapping.put(container, taskAttemptInfo); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("["); sb.append("dagID=").append(getDagId()).append(", "); sb.append("dagName=").append(getName()).append(", "); sb.append("status=").append(getStatus()).append(", "); sb.append("startTime=").append(getStartTimeInterval()).append(", "); sb.append("submitTime=").append(getSubmitTime()).append(", "); sb.append("endTime=").append(getFinishTimeInterval()).append(", "); sb.append("timeTaken=").append(getTimeTaken()).append(", "); sb.append("diagnostics=").append(getDiagnostics()).append(", "); sb.append("vertexNameIDMapping=").append(getVertexNameIDMapping()).append(", "); sb.append("failedTasks=").append(getFailedTaskCount()).append(", "); sb.append("events=").append(getEvents()).append(", "); sb.append("status=").append(getStatus()); sb.append("]"); return sb.toString(); } public Multimap<Container, TaskAttemptInfo> getContainerMapping() { return Multimaps.unmodifiableMultimap(containerMapping); } public final VersionInfo getVersionInfo() { return versionInfo; } public final CallerContext getCallerContext() { return callerContext; } public final String getName() { return name; } public final Collection<EdgeInfo> getEdges() { return Collections.unmodifiableCollection(edgeInfoMap.values()); } public final long getSubmitTime() { return submitTime; } public final long getStartTime() { return startTime; } public final long getFinishTime() { return endTime; } /** * Reference start time for the DAG. Vertex, Task, TaskAttempt would map on to this. * If absolute start time is needed, call getAbsStartTime(). * * @return starting time w.r.t to dag */ public final long getStartTimeInterval() { return 0; } @Override public final long getFinishTimeInterval() { long dagEndTime = (endTime - startTime); if (dagEndTime < 0) { //probably dag is not complete or failed in middle. get the last task attempt time for (VertexInfo vertexInfo : getVertices()) { dagEndTime = (vertexInfo.getFinishTimeInterval() > dagEndTime) ? vertexInfo.getFinishTimeInterval() : dagEndTime; } } return dagEndTime; } public final long getTimeTaken() { return getFinishTimeInterval(); } public final String getStatus() { return status; } /** * Get vertexInfo for a given vertexid * * @param vertexId * @return VertexInfo */ public VertexInfo getVertexFromId(String vertexId) { return vertexNameMap.get(vertexNameIDMapping.getKey(vertexId)); } /** * Get vertexInfo for a given vertex name * * @param vertexName * @return VertexInfo */ public final VertexInfo getVertex(String vertexName) { return vertexNameMap.get(vertexName); } public final String getDiagnostics() { return diagnostics; } /** * Get all vertices * * @return List<VertexInfo> */ public final List<VertexInfo> getVertices() { List<VertexInfo> vertices = Lists.newLinkedList(vertexNameMap.values()); Collections.sort(vertices, new Comparator<VertexInfo>() { @Override public int compare(VertexInfo o1, VertexInfo o2) { return (o1.getStartTimeInterval() < o2.getStartTimeInterval()) ? -1 : ((o1.getStartTimeInterval() == o2.getStartTimeInterval()) ? 0 : 1); } }); return Collections.unmodifiableList(vertices); } /** * Get list of failed vertices * * @return List<VertexInfo> */ public final List<VertexInfo> getFailedVertices() { return getVertices(VertexState.FAILED); } /** * Get list of killed vertices * * @return List<VertexInfo> */ public final List<VertexInfo> getKilledVertices() { return getVertices(VertexState.KILLED); } /** * Get list of failed vertices * * @return List<VertexInfo> */ public final List<VertexInfo> getSuccessfullVertices() { return getVertices(VertexState.SUCCEEDED); } /** * Get list of vertices belonging to a specific state * * @param state * @return Collection<VertexInfo> */ public final List<VertexInfo> getVertices(final VertexState state) { return Collections.unmodifiableList(Lists.newLinkedList(Iterables.filter(Lists.newLinkedList (vertexNameMap.values()), new Predicate<VertexInfo>() { @Override public boolean apply(VertexInfo input) { return input.getStatus() != null && input.getStatus().equals(state.toString()); } } ) ) ); } public final Map<String, VertexInfo> getVertexMapping() { return Collections.unmodifiableMap(vertexNameMap); } private Ordering<VertexInfo> getVertexOrdering() { return Ordering.from(new Comparator<VertexInfo>() { @Override public int compare(VertexInfo o1, VertexInfo o2) { return (o1.getTimeTaken() < o2.getTimeTaken()) ? -1 : ((o1.getTimeTaken() == o2.getTimeTaken()) ? 0 : 1); } }); } /** * Get the slowest vertex in the DAG * * @return VertexInfo */ public final VertexInfo getSlowestVertex() { List<VertexInfo> vertexInfoList = getVertices(); if (vertexInfoList.size() == 0) { return null; } return getVertexOrdering().max(vertexInfoList); } /** * Get the slowest vertex in the DAG * * @return VertexInfo */ public final VertexInfo getFastestVertex() { List<VertexInfo> vertexInfoList = getVertices(); if (vertexInfoList.size() == 0) { return null; } return getVertexOrdering().min(vertexInfoList); } /** * Get node details for this DAG. Would be useful for analyzing node to tasks. * * @return Multimap<String, TaskAttemptInfo> taskAttempt details at every node */ public final Multimap<String, TaskAttemptInfo> getNodeDetails() { Multimap<String, TaskAttemptInfo> nodeDetails = LinkedListMultimap.create(); for (VertexInfo vertexInfo : getVertices()) { Multimap<Container, TaskAttemptInfo> containerMapping = vertexInfo.getContainersMapping(); for (Map.Entry<Container, TaskAttemptInfo> entry : containerMapping.entries()) { nodeDetails.put(entry.getKey().getHost(), entry.getValue()); } } return nodeDetails; } /** * Get containers used for this DAG * * @return Multimap<Container, TaskAttemptInfo> task attempt details at every container */ public final Multimap<Container, TaskAttemptInfo> getContainersToTaskAttemptMapping() { List<VertexInfo> VertexInfoList = getVertices(); Multimap<Container, TaskAttemptInfo> containerMapping = LinkedHashMultimap.create(); for (VertexInfo vertexInfo : VertexInfoList) { containerMapping.putAll(vertexInfo.getContainersMapping()); } return Multimaps.unmodifiableMultimap(containerMapping); } public final Map getVertexNameIDMapping() { return vertexNameIDMapping; } public final int getNumVertices() { return numVertices; } public final String getDagId() { return dagId; } public final int getFailedTaskCount() { return failedTasks; } }
package org.ovirt.engine.api.restapi.resource; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import java.util.ArrayList; import java.util.List; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.junit.Ignore; import org.junit.Test; import org.ovirt.engine.api.model.Cluster; import org.ovirt.engine.api.model.CpuProfile; import org.ovirt.engine.api.model.Fault; import org.ovirt.engine.core.common.action.CpuProfileParameters; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.queries.IdQueryParameters; import org.ovirt.engine.core.common.queries.VdcQueryParametersBase; import org.ovirt.engine.core.common.queries.VdcQueryReturnValue; import org.ovirt.engine.core.common.queries.VdcQueryType; import org.ovirt.engine.core.compat.Guid; public abstract class AbstractBackendCpuProfilesResourceTest<C extends AbstractBackendCpuProfilesResource> extends AbstractBackendCollectionResourceTest<CpuProfile, org.ovirt.engine.core.common.businessentities.profiles.CpuProfile, C> { protected static final Guid CLUSTER_ID = GUIDS[1]; private final VdcQueryType listQueryType; private final Class<? extends VdcQueryParametersBase> listQueryParamsClass; public AbstractBackendCpuProfilesResourceTest(C collection, VdcQueryType listQueryType, Class<? extends VdcQueryParametersBase> queryParamsClass) { super(collection, null, ""); this.listQueryType = listQueryType; this.listQueryParamsClass = queryParamsClass; } @Test public void testAddCpuProfile() throws Exception { setUriInfo(setUpBasicUriExpectations()); setUpClusterQueryExpectations(); setUpCreationExpectations(VdcActionType.AddCpuProfile, CpuProfileParameters.class, new String[] {}, new Object[] {}, true, true, GUIDS[0], VdcQueryType.GetCpuProfileById, IdQueryParameters.class, new String[] { "Id" }, new Object[] { Guid.Empty }, getEntity(0)); CpuProfile model = getModel(0); model.setCluster(new Cluster()); model.getCluster().setId(CLUSTER_ID.toString()); Response response = collection.add(model); assertEquals(201, response.getStatus()); assertTrue(response.getEntity() instanceof CpuProfile); verifyModel((CpuProfile) response.getEntity(), 0); } @Test public void testAddCpuProfileCantDo() throws Exception { setUpClusterQueryExpectations(); doTestBadAddCpuProfile(false, true, CANT_DO); } @Test public void testAddCpuProfileFailure() throws Exception { setUpClusterQueryExpectations(); doTestBadAddCpuProfile(true, false, FAILURE); } private void doTestBadAddCpuProfile(boolean valid, boolean success, String detail) throws Exception { setUriInfo(setUpActionExpectations(VdcActionType.AddCpuProfile, CpuProfileParameters.class, new String[] {}, new Object[] {}, valid, success)); CpuProfile model = getModel(0); model.setCluster(new Cluster()); model.getCluster().setId(CLUSTER_ID.toString()); try { collection.add(model); fail("expected WebApplicationException"); } catch (WebApplicationException wae) { verifyFault(wae, detail); } } @Test public void testAddIncompleteParameters() throws Exception { CpuProfile model = createIncompleteCpuProfile(); setUriInfo(setUpBasicUriExpectations()); control.replay(); try { collection.add(model); fail("expected WebApplicationException on incomplete parameters"); } catch (WebApplicationException wae) { verifyIncompleteException(wae, "CpuProfile", "validateParameters", getIncompleteFields()); } } protected String[] getIncompleteFields() { return new String[] { "name" }; } protected CpuProfile createIncompleteCpuProfile() { return new CpuProfile(); } @Test @Ignore @Override public void testQuery() throws Exception { } @Override @Test public void testList() throws Exception { UriInfo uriInfo = setUpUriExpectations(null); setUpCpuProfilesQueryExpectations(null); control.replay(); collection.setUriInfo(uriInfo); verifyCollection(getCollection()); } @Override @Test public void testListFailure() throws Exception { setUpCpuProfilesQueryExpectations(FAILURE); UriInfo uriInfo = setUpUriExpectations(null); collection.setUriInfo(uriInfo); control.replay(); try { getCollection(); fail("expected WebApplicationException"); } catch (WebApplicationException wae) { assertTrue(wae.getResponse().getEntity() instanceof Fault); assertEquals(mockl10n(FAILURE), ((Fault) wae.getResponse().getEntity()).getDetail()); } } @Override @Test public void testListCrash() throws Exception { Throwable t = new RuntimeException(FAILURE); setUpCpuProfilesQueryExpectations(t); UriInfo uriInfo = setUpUriExpectations(null); collection.setUriInfo(uriInfo); control.replay(); try { getCollection(); fail("expected WebApplicationException"); } catch (WebApplicationException wae) { verifyFault(wae, BACKEND_FAILED_SERVER_LOCALE, t); } } @Override @Test public void testListCrashClientLocale() throws Exception { UriInfo uriInfo = setUpUriExpectations(null); locales.add(CLIENT_LOCALE); Throwable t = new RuntimeException(FAILURE); setUpCpuProfilesQueryExpectations(t); collection.setUriInfo(uriInfo); control.replay(); try { getCollection(); fail("expected WebApplicationException"); } catch (WebApplicationException wae) { verifyFault(wae, BACKEND_FAILED_CLIENT_LOCALE, t); } finally { locales.clear(); } } private void setUpCpuProfilesQueryExpectations(Object failure) { VdcQueryReturnValue queryResult = control.createMock(VdcQueryReturnValue.class); expect(queryResult.getSucceeded()).andReturn(failure == null).anyTimes(); List<org.ovirt.engine.core.common.businessentities.profiles.CpuProfile> entities = new ArrayList<>(); if (failure == null) { for (int i = 0; i < NAMES.length; i++) { entities.add(getEntity(i)); } expect(queryResult.getReturnValue()).andReturn(entities).anyTimes(); } else { if (failure instanceof String) { expect(queryResult.getExceptionString()).andReturn((String) failure).anyTimes(); setUpL10nExpectations((String) failure); } else if (failure instanceof Exception) { expect(backend.runQuery(eq(listQueryType), anyObject(listQueryParamsClass))).andThrow((Exception) failure).anyTimes(); return; } } expect(backend.runQuery(eq(listQueryType), anyObject(listQueryParamsClass))).andReturn( queryResult); } protected void setUpEntityQueryExpectations(int times, int index, boolean notFound) throws Exception { while (times-- > 0) { setUpEntityQueryExpectations(VdcQueryType.GetCpuProfileById, IdQueryParameters.class, new String[] { "Id" }, new Object[] { GUIDS[index] }, notFound ? null : getEntity(index)); } } static CpuProfile getModel(int index) { CpuProfile model = new CpuProfile(); model.setId(GUIDS[index].toString()); model.setName(NAMES[index]); model.setDescription(DESCRIPTIONS[index]); return model; } protected List<org.ovirt.engine.core.common.businessentities.profiles.CpuProfile> getEntityList() { List<org.ovirt.engine.core.common.businessentities.profiles.CpuProfile> entities = new ArrayList<>(); for (int i = 0; i < NAMES.length; i++) { entities.add(getEntity(i)); } return entities; } @Override protected org.ovirt.engine.core.common.businessentities.profiles.CpuProfile getEntity(int index) { return setUpEntityExpectations(control.createMock(org.ovirt.engine.core.common.businessentities.profiles.CpuProfile.class), index); } protected void setUpClusterQueryExpectations() { } static org.ovirt.engine.core.common.businessentities.profiles.CpuProfile setUpEntityExpectations(org.ovirt.engine.core.common.businessentities.profiles.CpuProfile entity, int index) { expect(entity.getId()).andReturn(GUIDS[index]).anyTimes(); expect(entity.getName()).andReturn(NAMES[index]).anyTimes(); expect(entity.getDescription()).andReturn(DESCRIPTIONS[index]).anyTimes(); expect(entity.getClusterId()).andReturn(GUIDS[index]).anyTimes(); return entity; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.reports.model; /** * Model definition for UsageReports. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Admin Reports API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class UsageReports extends com.google.api.client.json.GenericJson { /** * ETag of the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String etag; /** * The type of API resource. For a usage report, the value is admin#reports#usageReports. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * Token to specify next page. A report with multiple pages has a nextPageToken property in the * response. For your follow-on requests getting all of the report's pages, enter the * nextPageToken value in the pageToken query string. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nextPageToken; /** * Various application parameter records. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<UsageReport> usageReports; static { // hack to force ProGuard to consider UsageReport used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(UsageReport.class); } /** * Warnings, if any. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Warnings> warnings; static { // hack to force ProGuard to consider Warnings used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Warnings.class); } /** * ETag of the resource. * @return value or {@code null} for none */ public java.lang.String getEtag() { return etag; } /** * ETag of the resource. * @param etag etag or {@code null} for none */ public UsageReports setEtag(java.lang.String etag) { this.etag = etag; return this; } /** * The type of API resource. For a usage report, the value is admin#reports#usageReports. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * The type of API resource. For a usage report, the value is admin#reports#usageReports. * @param kind kind or {@code null} for none */ public UsageReports setKind(java.lang.String kind) { this.kind = kind; return this; } /** * Token to specify next page. A report with multiple pages has a nextPageToken property in the * response. For your follow-on requests getting all of the report's pages, enter the * nextPageToken value in the pageToken query string. * @return value or {@code null} for none */ public java.lang.String getNextPageToken() { return nextPageToken; } /** * Token to specify next page. A report with multiple pages has a nextPageToken property in the * response. For your follow-on requests getting all of the report's pages, enter the * nextPageToken value in the pageToken query string. * @param nextPageToken nextPageToken or {@code null} for none */ public UsageReports setNextPageToken(java.lang.String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** * Various application parameter records. * @return value or {@code null} for none */ public java.util.List<UsageReport> getUsageReports() { return usageReports; } /** * Various application parameter records. * @param usageReports usageReports or {@code null} for none */ public UsageReports setUsageReports(java.util.List<UsageReport> usageReports) { this.usageReports = usageReports; return this; } /** * Warnings, if any. * @return value or {@code null} for none */ public java.util.List<Warnings> getWarnings() { return warnings; } /** * Warnings, if any. * @param warnings warnings or {@code null} for none */ public UsageReports setWarnings(java.util.List<Warnings> warnings) { this.warnings = warnings; return this; } @Override public UsageReports set(String fieldName, Object value) { return (UsageReports) super.set(fieldName, value); } @Override public UsageReports clone() { return (UsageReports) super.clone(); } /** * Model definition for UsageReportsWarnings. */ public static final class Warnings extends com.google.api.client.json.GenericJson { /** * Machine readable code or warning type. The warning code value is 200. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String code; /** * Key-value pairs to give detailed information on the warning. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Data> data; static { // hack to force ProGuard to consider Data used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Data.class); } /** * The human readable messages for a warning are: - Data is not available warning - Sorry, data * for date yyyy-mm-dd for application "application name" is not available. - Partial data is * available warning - Data for date yyyy-mm-dd for application "application name" is not * available right now, please try again after a few hours. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String message; /** * Machine readable code or warning type. The warning code value is 200. * @return value or {@code null} for none */ public java.lang.String getCode() { return code; } /** * Machine readable code or warning type. The warning code value is 200. * @param code code or {@code null} for none */ public Warnings setCode(java.lang.String code) { this.code = code; return this; } /** * Key-value pairs to give detailed information on the warning. * @return value or {@code null} for none */ public java.util.List<Data> getData() { return data; } /** * Key-value pairs to give detailed information on the warning. * @param data data or {@code null} for none */ public Warnings setData(java.util.List<Data> data) { this.data = data; return this; } /** * The human readable messages for a warning are: - Data is not available warning - Sorry, data * for date yyyy-mm-dd for application "application name" is not available. - Partial data is * available warning - Data for date yyyy-mm-dd for application "application name" is not * available right now, please try again after a few hours. * @return value or {@code null} for none */ public java.lang.String getMessage() { return message; } /** * The human readable messages for a warning are: - Data is not available warning - Sorry, data * for date yyyy-mm-dd for application "application name" is not available. - Partial data is * available warning - Data for date yyyy-mm-dd for application "application name" is not * available right now, please try again after a few hours. * @param message message or {@code null} for none */ public Warnings setMessage(java.lang.String message) { this.message = message; return this; } @Override public Warnings set(String fieldName, Object value) { return (Warnings) super.set(fieldName, value); } @Override public Warnings clone() { return (Warnings) super.clone(); } /** * Model definition for UsageReportsWarningsData. */ public static final class Data extends com.google.api.client.json.GenericJson { /** * Key associated with a key-value pair to give detailed information on the warning. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String key; /** * Value associated with a key-value pair to give detailed information on the warning. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String value; /** * Key associated with a key-value pair to give detailed information on the warning. * @return value or {@code null} for none */ public java.lang.String getKey() { return key; } /** * Key associated with a key-value pair to give detailed information on the warning. * @param key key or {@code null} for none */ public Data setKey(java.lang.String key) { this.key = key; return this; } /** * Value associated with a key-value pair to give detailed information on the warning. * @return value or {@code null} for none */ public java.lang.String getValue() { return value; } /** * Value associated with a key-value pair to give detailed information on the warning. * @param value value or {@code null} for none */ public Data setValue(java.lang.String value) { this.value = value; return this; } @Override public Data set(String fieldName, Object value) { return (Data) super.set(fieldName, value); } @Override public Data clone() { return (Data) super.clone(); } } } }
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.io; import com.google.api.client.json.JsonFactory; import com.google.api.services.bigquery.Bigquery; import com.google.api.services.bigquery.model.TableReference; import com.google.api.services.bigquery.model.TableRow; import com.google.api.services.bigquery.model.TableSchema; import com.google.cloud.dataflow.sdk.coders.Coder; import com.google.cloud.dataflow.sdk.coders.TableRowJsonCoder; import com.google.cloud.dataflow.sdk.coders.VoidCoder; import com.google.cloud.dataflow.sdk.options.BigQueryOptions; import com.google.cloud.dataflow.sdk.options.GcpOptions; import com.google.cloud.dataflow.sdk.runners.DirectPipelineRunner; import com.google.cloud.dataflow.sdk.runners.worker.BigQueryReader; import com.google.cloud.dataflow.sdk.transforms.DoFn; import com.google.cloud.dataflow.sdk.transforms.PTransform; import com.google.cloud.dataflow.sdk.transforms.ParDo; import com.google.cloud.dataflow.sdk.transforms.windowing.GlobalWindows; import com.google.cloud.dataflow.sdk.util.BigQueryTableInserter; import com.google.cloud.dataflow.sdk.util.ReaderUtils; import com.google.cloud.dataflow.sdk.util.Transport; import com.google.cloud.dataflow.sdk.util.WindowedValue; import com.google.cloud.dataflow.sdk.values.KV; import com.google.cloud.dataflow.sdk.values.PCollection; import com.google.cloud.dataflow.sdk.values.PDone; import com.google.cloud.dataflow.sdk.values.PInput; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Transformations for reading and writing * <a href="https://developers.google.com/bigquery/">BigQuery</a> tables. * <p><h3>Table References</h3> * A fully-qualified BigQuery table name consists of three components: * <ul> * <li>{@code projectId}: the Cloud project id (defaults to * {@link GcpOptions#getProject()}). * <li>{@code datasetId}: the BigQuery dataset id, unique within a project. * <li>{@code tableId}: a table id, unique within a dataset. * </ul> * <p> * BigQuery table references are stored as a {@link TableReference}, which comes * from the <a href="https://cloud.google.com/bigquery/client-libraries"> * BigQuery Java Client API</a>. * Tables can be referred to as Strings, with or without the {@code projectId}. * A helper function is provided ({@link BigQueryIO#parseTableSpec(String)}), * which parses the following string forms into a {@link TableReference}: * <ul> * <li>[{@code project_id}]:[{@code dataset_id}].[{@code table_id}] * <li>[{@code dataset_id}].[{@code table_id}] * </ul> * <p><h3>Reading</h3> * To read from a BigQuery table, apply a {@link BigQueryIO.Read} transformation. * This produces a {@code PCollection<TableRow>} as output: * <pre>{@code * PCollection<TableRow> shakespeare = pipeline.apply( * BigQueryIO.Read * .named("Read") * .from("clouddataflow-readonly:samples.weather_stations"); * }</pre> * <p><h3>Writing</h3> * To write to a BigQuery table, apply a {@link BigQueryIO.Write} transformation. * This consumes a {@code PCollection<TableRow>} as input. * <p> * <pre>{@code * PCollection<TableRow> quotes = ... * * List<TableFieldSchema> fields = new ArrayList<>(); * fields.add(new TableFieldSchema().setName("source").setType("STRING")); * fields.add(new TableFieldSchema().setName("quote").setType("STRING")); * TableSchema schema = new TableSchema().setFields(fields); * * quotes.apply(BigQueryIO.Write * .named("Write") * .to("my-project:output.output_table") * .withSchema(schema) * .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE)); * }</pre> * <p> * See {@link BigQueryIO.Write} for details on how to specify if a write should * append to an existing table, replace the table, or verify that the table is * empty. Note that the dataset being written to must already exist. * * @see <a href="https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/java/latest/com/google/api/services/bigquery/model/TableRow.html">TableRow</a> */ public class BigQueryIO { private static final Logger LOG = LoggerFactory.getLogger(BigQueryIO.class); /** * Singleton instance of the JSON factory used to read and write JSON * formatted rows. */ private static final JsonFactory JSON_FACTORY = Transport.getJsonFactory(); /** * Project IDs must contain 6-63 lowercase letters, digits, or dashes. * IDs must start with a letter and may not end with a dash. * This regex isn't exact - this allows for patterns that would be rejected by * the service, but this is sufficient for basic parsing of table references. */ private static final String PROJECT_ID_REGEXP = "[a-z][-a-z0-9:.]{4,61}[a-z0-9]"; /** * Regular expression which matches Dataset IDs. */ private static final String DATASET_REGEXP = "[-\\w.]{1,1024}"; /** * Regular expression which matches Table IDs. */ private static final String TABLE_REGEXP = "[-\\w$@]{1,1024}"; /** * Matches table specifications in the form * "[project_id]:[dataset_id].[table_id]" or "[dataset_id].[table_id]". */ private static final String DATASET_TABLE_REGEXP = String.format("((?<PROJECT>%s):)?(?<DATASET>%s)\\.(?<TABLE>%s)", PROJECT_ID_REGEXP, DATASET_REGEXP, TABLE_REGEXP); private static final Pattern TABLE_SPEC = Pattern.compile(DATASET_TABLE_REGEXP); /** * Parse a table specification in the form * "[project_id]:[dataset_id].[table_id]" or "[dataset_id].[table_id]". * <p> * If the project id is omitted, the default project id is used. */ public static TableReference parseTableSpec(String tableSpec) { Matcher match = TABLE_SPEC.matcher(tableSpec); if (!match.matches()) { throw new IllegalArgumentException( "Table reference is not in [project_id]:[dataset_id].[table_id] " + "format: " + tableSpec); } TableReference ref = new TableReference(); ref.setProjectId(match.group("PROJECT")); return ref.setDatasetId(match.group("DATASET")).setTableId(match.group("TABLE")); } /** * Returns a canonical string representation of the TableReference. */ public static String toTableSpec(TableReference ref) { StringBuilder sb = new StringBuilder(); if (ref.getProjectId() != null) { sb.append(ref.getProjectId()); sb.append(":"); } sb.append(ref.getDatasetId()).append('.').append(ref.getTableId()); return sb.toString(); } /** * A PTransform that reads from a BigQuery table and returns a * {@code PCollection<TableRow>} containing each of the rows of the table. * <p> * Each TableRow record contains values indexed by column name. Here is a * sample processing function which processes a "line" column from rows: * <pre><code> * static class ExtractWordsFn extends DoFn{@literal <TableRow, String>} { * {@literal @}Override * public void processElement(ProcessContext c) { * // Get the "line" field of the TableRow object, split it into words, and emit them. * TableRow row = c.element(); * String[] words = row.get("line").toString().split("[^a-zA-Z']+"); * for (String word : words) { * if (!word.isEmpty()) { * c.output(word); * } * } * } * } * </code></pre> */ public static class Read { public static Bound named(String name) { return new Bound().named(name); } /** * Reads a BigQuery table specified as * "[project_id]:[dataset_id].[table_id]" or "[dataset_id].[table_id]" for * tables within the current project. */ public static Bound from(String tableSpec) { return new Bound().from(tableSpec); } /** * Reads a BigQuery table specified as a TableReference object. */ public static Bound from(TableReference table) { return new Bound().from(table); } /** * Disables BigQuery table validation which is enabled by default. */ public static Bound withoutValidation() { return new Bound().withoutValidation(); } /** * A PTransform that reads from a BigQuery table and returns a bounded * {@code PCollection<TableRow>}. */ public static class Bound extends PTransform<PInput, PCollection<TableRow>> { private static final long serialVersionUID = 0; TableReference table; final boolean validate; Bound() { this.validate = true; } Bound(String name, TableReference reference, boolean validate) { super(name); this.table = reference; this.validate = validate; } /** * Sets the name associated with this transformation. */ public Bound named(String name) { return new Bound(name, table, validate); } /** * Sets the table specification. * <p> * Refer to {@link #parseTableSpec(String)} for the specification format. */ public Bound from(String tableSpec) { return from(parseTableSpec(tableSpec)); } /** * Sets the table specification. */ public Bound from(TableReference table) { return new Bound(name, table, validate); } /** * Disable table validation. */ public Bound withoutValidation() { return new Bound(name, table, false); } @Override public PCollection<TableRow> apply(PInput input) { if (table == null) { throw new IllegalStateException( "must set the table reference of a BigQueryIO.Read transform"); } return PCollection.<TableRow>createPrimitiveOutputInternal(new GlobalWindows()) // Force the output's Coder to be what the read is using, and // unchangeable later, to ensure that we read the input in the // format specified by the Read transform. .setCoder(TableRowJsonCoder.of()); } @Override protected Coder<TableRow> getDefaultOutputCoder() { return TableRowJsonCoder.of(); } @Override protected String getKindString() { return "BigQueryIO.Read"; } static { DirectPipelineRunner.registerDefaultTransformEvaluator( Bound.class, new DirectPipelineRunner.TransformEvaluator<Bound>() { @Override public void evaluate( Bound transform, DirectPipelineRunner.EvaluationContext context) { evaluateReadHelper(transform, context); } }); } /** * Returns the table to write. */ public TableReference getTable() { return table; } /** * Returns true if table validation is enabled. */ public boolean getValidate() { return validate; } } } ///////////////////////////////////////////////////////////////////////////// /** * A PTransform that writes a {@code PCollection<TableRow>} containing rows * to a BigQuery table. * <p> * By default, tables will be created if they do not exist, which * corresponds to a {@code CreateDisposition.CREATE_IF_NEEDED} disposition * which matches the default of BigQuery's Jobs API. A schema must be * provided (via {@link Write#withSchema}), or else the transform may fail * at runtime with an {@link java.lang.IllegalArgumentException}. * <p> * The dataset being written must already exist. * <p> * By default, writes require an empty table, which corresponds to * a {@code WriteDisposition.WRITE_EMPTY} disposition which matches the * default of BigQuery's Jobs API. * <p> * Here is a sample transform which produces TableRow values containing * "word" and "count" columns: * <pre><code> * static class FormatCountsFn extends DoFnP{@literal <KV<String, Long>, TableRow>} { * {@literal @}Override * public void processElement(ProcessContext c) { * TableRow row = new TableRow() * .set("word", c.element().getKey()) * .set("count", c.element().getValue().intValue()); * c.output(row); * } * } * </code></pre> */ public static class Write { /** * An enumeration type for the BigQuery create disposition strings publicly * documented as {@code CREATE_NEVER}, and {@code CREATE_IF_NEEDED}. */ public enum CreateDisposition { /** * Specifics that tables should not be created. * <p> * If the output table does not exist, the write fails. */ CREATE_NEVER, /** * Specifies that tables should be created if needed. This is the default * behavior. * <p> * Requires that a table schema is provided via {@link Write#withSchema}. * This precondition is checked before starting a job. The schema is * not required to match an existing table's schema. * <p> * When this transformation is executed, if the output table does not * exist, the table is created from the provided schema. Note that even if * the table exists, it may be recreated if necessary when paired with a * {@link WriteDisposition#WRITE_TRUNCATE}. */ CREATE_IF_NEEDED } /** * An enumeration type for the BigQuery write disposition strings publicly * documented as {@code WRITE_TRUNCATE}, {@code WRITE_APPEND}, and * {@code WRITE_EMPTY}. */ public enum WriteDisposition { /** * Specifies that write should replace a table. * <p> * The replacement may occur in multiple steps - for instance by first * removing the existing table, then creating a replacement, then filling * it in. This is not an atomic operation, and external programs may * see the table in any of these intermediate steps. */ WRITE_TRUNCATE, /** * Specifies that rows may be appended to an existing table. */ WRITE_APPEND, /** * Specifies that the output table must be empty. This is the default * behavior. * <p> * If the output table is not empty, the write fails at runtime. * <p> * This check may occur long before data is written, and does not * guarantee exclusive access to the table. If two programs are run * concurrently, each specifying the same output table and * a {@link WriteDisposition} of {@code WRITE_EMPTY}, it is possible * for both to succeed. */ WRITE_EMPTY } /** * Sets the name associated with this transformation. */ public static Bound named(String name) { return new Bound().named(name); } /** * Creates a write transformation for the given table specification. * <p> * Refer to {@link #parseTableSpec(String)} for the specification format. */ public static Bound to(String tableSpec) { return new Bound().to(tableSpec); } /** Creates a write transformation for the given table. */ public static Bound to(TableReference table) { return new Bound().to(table); } /** * Specifies a table schema to use in table creation. * <p> * The schema is required only if writing to a table which does not already * exist, and {@link BigQueryIO.Write.CreateDisposition} is set to * {@code CREATE_IF_NEEDED}. */ public static Bound withSchema(TableSchema schema) { return new Bound().withSchema(schema); } /** Specifies options for creating the table. */ public static Bound withCreateDisposition(CreateDisposition disposition) { return new Bound().withCreateDisposition(disposition); } /** Specifies options for writing to the table. */ public static Bound withWriteDisposition(WriteDisposition disposition) { return new Bound().withWriteDisposition(disposition); } /** * Disables BigQuery table validation which is enabled by default. */ public static Bound withoutValidation() { return new Bound().withoutValidation(); } /** * A PTransform that can write either a bounded or unbounded * {@code PCollection<TableRow>}s to a BigQuery table. */ public static class Bound extends PTransform<PCollection<TableRow>, PDone> { private static final long serialVersionUID = 0; final TableReference table; // Table schema. The schema is required only if the table does not exist. final TableSchema schema; // Options for creating the table. Valid values are CREATE_IF_NEEDED and // CREATE_NEVER. final CreateDisposition createDisposition; // Options for writing to the table. Valid values are WRITE_TRUNCATE, // WRITE_APPEND and WRITE_EMPTY. final WriteDisposition writeDisposition; // An option to indicate if table validation is desired. Default is true. final boolean validate; public Bound() { this.table = null; this.schema = null; this.createDisposition = CreateDisposition.CREATE_IF_NEEDED; this.writeDisposition = WriteDisposition.WRITE_EMPTY; this.validate = true; } Bound(String name, TableReference ref, TableSchema schema, CreateDisposition createDisposition, WriteDisposition writeDisposition, boolean validate) { super(name); this.table = ref; this.schema = schema; this.createDisposition = createDisposition; this.writeDisposition = writeDisposition; this.validate = validate; } /** * Sets the name associated with this transformation. */ public Bound named(String name) { return new Bound(name, table, schema, createDisposition, writeDisposition, validate); } /** * Specifies the table specification. * <p> * Refer to {@link #parseTableSpec(String)} for the specification format. */ public Bound to(String tableSpec) { return to(parseTableSpec(tableSpec)); } /** * Specifies the table to be written to. */ public Bound to(TableReference table) { return new Bound(name, table, schema, createDisposition, writeDisposition, validate); } /** * Specifies the table schema, used if the table is created. */ public Bound withSchema(TableSchema schema) { return new Bound(name, table, schema, createDisposition, writeDisposition, validate); } /** Specifies options for creating the table. */ public Bound withCreateDisposition(CreateDisposition createDisposition) { return new Bound(name, table, schema, createDisposition, writeDisposition, validate); } /** Specifies options for writing the table. */ public Bound withWriteDisposition(WriteDisposition writeDisposition) { return new Bound(name, table, schema, createDisposition, writeDisposition, validate); } /** * Disable table validation. */ public Bound withoutValidation() { return new Bound(name, table, schema, createDisposition, writeDisposition, false); } @Override public PDone apply(PCollection<TableRow> input) { if (table == null) { throw new IllegalStateException( "must set the table reference of a BigQueryIO.Write transform"); } if (createDisposition == CreateDisposition.CREATE_IF_NEEDED && schema == null) { throw new IllegalArgumentException("CreateDisposition is CREATE_IF_NEEDED, " + "however no schema was provided."); } // In streaming, BigQuery write is taken care of by StreamWithDeDup transform. BigQueryOptions options = getPipeline().getOptions().as(BigQueryOptions.class); if (options.isStreaming()) { return input.apply(new StreamWithDeDup(table, schema)); } return new PDone(); } @Override protected Coder<Void> getDefaultOutputCoder() { return VoidCoder.of(); } @Override protected String getKindString() { return "BigQueryIO.Write"; } static { DirectPipelineRunner.registerDefaultTransformEvaluator( Bound.class, new DirectPipelineRunner.TransformEvaluator<Bound>() { @Override public void evaluate( Bound transform, DirectPipelineRunner.EvaluationContext context) { evaluateWriteHelper(transform, context); } }); } /** Returns the create disposition. */ public CreateDisposition getCreateDisposition() { return createDisposition; } /** Returns the write disposition. */ public WriteDisposition getWriteDisposition() { return writeDisposition; } /** Returns the table schema. */ public TableSchema getSchema() { return schema; } /** Returns the table reference. */ public TableReference getTable() { return table; } /** Returns true if table validation is enabled. */ public boolean getValidate() { return validate; } } } ///////////////////////////////////////////////////////////////////////////// /** * Implementation of DoFn to perform streaming BigQuery write. */ private static class StreamingWriteFn extends DoFn<KV<Integer, KV<String, TableRow>>, Void> implements DoFn.RequiresKeyedState { private static final long serialVersionUID = 0; /** TableReference in JSON. Use String to make the class Serializable. */ private final String jsonTableReference; /** TableSchema in JSON. Use String to make the class Serializable. */ private final String jsonTableSchema; private transient TableReference tableReference; /** JsonTableRows to accumulate BigQuery rows. */ private transient List<TableRow> tableRows; /** The list of unique ids for each BigQuery table row. */ private transient List<String> uniqueIdsForTableRows; /** The list of tables created so far, so we don't try the creation each time. */ private static Set<String> createdTables = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>()); /** Constructor. */ StreamingWriteFn(TableReference table, TableSchema schema) { try { jsonTableReference = JSON_FACTORY.toString(table); jsonTableSchema = JSON_FACTORY.toString(schema); } catch (IOException e) { throw new RuntimeException("Cannot initialize BigQuery streaming writer.", e); } } /** Prepares a target BigQuery table. */ @Override public void startBundle(Context context) { tableRows = new ArrayList<>(); uniqueIdsForTableRows = new ArrayList<>(); BigQueryOptions options = context.getPipelineOptions().as(BigQueryOptions.class); // TODO: Support table sharding and the better place to initialize // BigQuery table. try { tableReference = JSON_FACTORY.fromString(jsonTableReference, TableReference.class); if (!createdTables.contains(jsonTableSchema)) { synchronized (createdTables) { // Another thread may have succeeded in creating the table in the meanwhile, so // check again. This check isn't needed for correctness, but we add it to prevent // every thread from attempting a create and overwhelming our BigQuery quota. if (!createdTables.contains(jsonTableSchema)) { TableSchema tableSchema = JSON_FACTORY.fromString(jsonTableSchema, TableSchema.class); Bigquery client = Transport.newBigQueryClient(options).build(); BigQueryTableInserter inserter = new BigQueryTableInserter(client, tableReference); inserter.tryCreateTable(tableSchema); createdTables.add(jsonTableSchema); } } } } catch (IOException e) { throw new RuntimeException(e); } } /** Accumulates the input into JsonTableRows and uniqueIdsForTableRows. */ @Override public void processElement(ProcessContext context) { KV<Integer, KV<String, TableRow>> kv = context.element(); addRow(kv.getValue().getValue(), kv.getValue().getKey()); } /** Writes the accumulated rows into BigQuery with streaming API. */ @Override public void finishBundle(Context context) { flushRows(context.getPipelineOptions().as(BigQueryOptions.class)); } /** Accumulate a row to be written to BigQuery. */ private void addRow(TableRow tableRow, String uniqueId) { uniqueIdsForTableRows.add(uniqueId); tableRows.add(tableRow); } /** Writes the accumulated rows into BigQuery with streaming API. */ private void flushRows(BigQueryOptions options) { if (!tableRows.isEmpty()) { Bigquery client = Transport.newBigQueryClient(options).build(); try { BigQueryTableInserter inserter = new BigQueryTableInserter(client, tableReference); inserter.insertAll(tableRows, uniqueIdsForTableRows); } catch (IOException e) { throw new RuntimeException(e); } tableRows.clear(); uniqueIdsForTableRows.clear(); } } } ///////////////////////////////////////////////////////////////////////////// /** * Fn that tags each table row with a unique id. * To avoid calling UUID.randomUUID() for each element, which can be costly, * a randomUUID is generated only once per bucket of data. The actual unique * id is created by concatenating this randomUUID with a sequential number. */ private static class TagWithUniqueIds extends DoFn<TableRow, KV<Integer, KV<String, TableRow>>> { private static final long serialVersionUID = 0; private transient String randomUUID; private transient AtomicLong sequenceNo; @Override public void startBundle(Context context) { randomUUID = UUID.randomUUID().toString(); sequenceNo = new AtomicLong(); } /** Tag the input with a unique id. */ @Override public void processElement(ProcessContext context) { String uniqueId = randomUUID + Long.toString(sequenceNo.getAndIncrement()); ThreadLocalRandom randomGenerator = ThreadLocalRandom.current(); // We output on keys 0-50 to ensure that there's enough batching for // BigQuery. context.output(KV.of(randomGenerator.nextInt(0, 50), KV.of(uniqueId, context.element()))); } } ///////////////////////////////////////////////////////////////////////////// /** * PTransform that performs streaming BigQuery write. To increase consistency, * it leverages BigQuery best effort de-dup mechanism. */ private static class StreamWithDeDup extends PTransform<PCollection<TableRow>, PDone> { private static final long serialVersionUID = 0; private final TableReference tableReference; private final TableSchema tableSchema; /** Constructor. */ StreamWithDeDup(TableReference tableReference, TableSchema tableSchema) { this.tableReference = tableReference; this.tableSchema = tableSchema; } @Override protected Coder<Void> getDefaultOutputCoder() { return VoidCoder.of(); } @Override public PDone apply(PCollection<TableRow> in) { // A naive implementation would be to simply stream data directly to BigQuery. // However, this could occassionally lead to duplicated data, e.g., when // a VM that runs this code is restarted and the code is re-run. // The above risk is mitigated in this implementation by relying on // BigQuery built-in best effort de-dup mechanism. // To use this mechanism, each input TableRow is tagged with a generated // unique id, which is then passed to BigQuery and used to ignore duplicates. PCollection<KV<Integer, KV<String, TableRow>>> tagged = in.apply(ParDo.of(new TagWithUniqueIds())); // To prevent having the same TableRow processed more than once with regenerated // different unique ids, this implementation relies on "checkpointing" which is // achieved as a side effect of having StreamingWriteFn implement RequiresKeyedState. tagged.apply(ParDo.of(new StreamingWriteFn(tableReference, tableSchema))); // Note that the implementation to return PDone here breaks the // implicit assumption about the job execution order. If a user // implements a PTransform that takes PDone returned here as its // input, the transform may not necessarily be executed after // the BigQueryIO.Write. return new PDone(); } } ///////////////////////////////////////////////////////////////////////////// /** * Direct mode read evaluator. * <p> * This loads the entire table into an in-memory PCollection. */ private static void evaluateReadHelper( Read.Bound transform, DirectPipelineRunner.EvaluationContext context) { BigQueryOptions options = context.getPipelineOptions(); Bigquery client = Transport.newBigQueryClient(options).build(); TableReference ref = transform.table; if (ref.getProjectId() == null) { ref.setProjectId(options.getProject()); } LOG.info("Reading from BigQuery table {}", toTableSpec(ref)); List<WindowedValue<TableRow>> elems = ReaderUtils.readElemsFromReader(new BigQueryReader(client, ref)); LOG.info("Number of records read from BigQuery: {}", elems.size()); context.setPCollectionWindowedValue(transform.getOutput(), elems); } /** * Direct mode write evaluator. * <p> * This writes the entire table in a single BigQuery request. * The table will be created if necessary. */ private static void evaluateWriteHelper( Write.Bound transform, DirectPipelineRunner.EvaluationContext context) { BigQueryOptions options = context.getPipelineOptions(); Bigquery client = Transport.newBigQueryClient(options).build(); TableReference ref = transform.table; if (ref.getProjectId() == null) { ref.setProjectId(options.getProject()); } LOG.info("Writing to BigQuery table {}", toTableSpec(ref)); try { BigQueryTableInserter inserter = new BigQueryTableInserter(client, ref); inserter.getOrCreateTable( transform.writeDisposition, transform.createDisposition, transform.schema); List<TableRow> tableRows = context.getPCollection(transform.getInput()); inserter.insertAll(tableRows); } catch (IOException e) { throw new RuntimeException(e); } } }
package com.laytonsmith.core.events.drivers; import com.laytonsmith.PureUtilities.Version; import com.laytonsmith.abstraction.MCCommandSender; import com.laytonsmith.abstraction.MCBlockCommandSender; import com.laytonsmith.abstraction.MCConsoleCommandSender; import com.laytonsmith.abstraction.MCLocation; import com.laytonsmith.abstraction.MCPlayer; import com.laytonsmith.abstraction.entities.MCCommandMinecart; import com.laytonsmith.abstraction.events.MCBroadcastMessageEvent; import com.laytonsmith.abstraction.events.MCCommandTabCompleteEvent; import com.laytonsmith.abstraction.events.MCServerCommandEvent; import com.laytonsmith.abstraction.events.MCRedstoneChangedEvent; import com.laytonsmith.abstraction.events.MCServerPingEvent; import com.laytonsmith.annotations.api; import com.laytonsmith.core.ArgumentValidation; import com.laytonsmith.core.MSVersion; import com.laytonsmith.core.ObjectGenerator; import com.laytonsmith.core.Static; import com.laytonsmith.core.constructs.CArray; import com.laytonsmith.core.constructs.CBoolean; import com.laytonsmith.core.constructs.CInt; import com.laytonsmith.core.constructs.CNull; import com.laytonsmith.core.constructs.CString; import com.laytonsmith.core.constructs.Construct; import com.laytonsmith.core.constructs.Target; import com.laytonsmith.core.environments.CommandHelperEnvironment; import com.laytonsmith.core.environments.Environment; import com.laytonsmith.core.events.AbstractEvent; import com.laytonsmith.core.events.BindableEvent; import com.laytonsmith.core.events.BoundEvent.ActiveEvent; import com.laytonsmith.core.events.Driver; import com.laytonsmith.core.events.EventBuilder; import com.laytonsmith.core.events.Prefilters; import com.laytonsmith.core.events.Prefilters.PrefilterType; import com.laytonsmith.core.exceptions.ConfigRuntimeException; import com.laytonsmith.core.exceptions.EventException; import com.laytonsmith.core.exceptions.PrefilterNonMatchException; import com.laytonsmith.core.exceptions.CRE.CRECastException; import com.laytonsmith.core.natives.interfaces.Mixed; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; public class ServerEvents { public static String docs() { return "Contains non-specific server-wide events."; } @api public static class server_command extends AbstractEvent { @Override public String getName() { return "server_command"; } @Override public String docs() { return "{prefix: <string match> The first part of the command, i.e. 'cmd' in '/cmd blah blah'" + " | type: <string match> The command sender type}" + "This event is fired off when any command is run from the console or commandblock. This fires" + " before CommandHelper aliases, allowing you to insert control beforehand. Be careful with this" + " event, because it can override ALL server commands, potentially creating all sorts of havoc." + "{command: The entire command | prefix: The prefix of the command" + " | sendertype: The command sender type. This is one of console, command_block," + " command_minecart or null if the sender is unknown to CommandHelper.}" + "{command}" + "{}"; } @Override public Driver driver() { return Driver.SERVER_COMMAND; } @Override public MSVersion since() { return MSVersion.V3_3_2; } @Override public boolean matches(Map<String, Mixed> prefilter, BindableEvent e) throws PrefilterNonMatchException { if(!(e instanceof MCServerCommandEvent)) { return false; } MCServerCommandEvent event = (MCServerCommandEvent) e; String prefix = event.getCommand().split(" ", 2)[0]; Prefilters.match(prefilter, "prefix", prefix, PrefilterType.STRING_MATCH); Prefilters.match(prefilter, "sendertype", getCommandsenderString(event.getCommandSender()), PrefilterType.STRING_MATCH); return true; } @Override public BindableEvent convert(CArray manualObject, Target t) { throw new UnsupportedOperationException("Not supported yet."); } @Override public Map<String, Mixed> evaluate(BindableEvent e) throws EventException { if(!(e instanceof MCServerCommandEvent)) { throw new EventException("Cannot convert e to MCServerCommandEvent"); } MCServerCommandEvent event = (MCServerCommandEvent) e; Map<String, Mixed> map = new HashMap<>(); map.put("command", new CString(event.getCommand(), Target.UNKNOWN)); String prefix = event.getCommand().split(" ", 2)[0]; map.put("prefix", new CString(prefix, Target.UNKNOWN)); // Set the command sender type. String type = getCommandsenderString(event.getCommandSender()); map.put("sendertype", (type == null ? CNull.NULL : new CString(type, Target.UNKNOWN))); return map; } private static String getCommandsenderString(MCCommandSender sender) { if(sender instanceof MCConsoleCommandSender) { return "console"; } else if(sender instanceof MCBlockCommandSender) { return "command_block"; } else if(sender instanceof MCCommandMinecart) { return "command_minecart"; } else { return null; // Unknown sender implementation. } } @Override public boolean modifyEvent(String key, Mixed value, BindableEvent event) { if(event instanceof MCServerCommandEvent) { MCServerCommandEvent e = (MCServerCommandEvent) event; if(key.equals("command")) { e.setCommand(value.val()); return true; } } return false; } @Override public void preExecution(Environment env, ActiveEvent activeEvent) { if(activeEvent.getUnderlyingEvent() instanceof MCServerCommandEvent) { MCServerCommandEvent event = (MCServerCommandEvent) activeEvent.getUnderlyingEvent(); env.getEnv(CommandHelperEnvironment.class).SetCommandSender(event.getCommandSender()); } } } @api public static class server_ping extends AbstractEvent { @Override public String getName() { return "server_ping"; } @Override public String docs() { return "{players: <math match> | maxplayers: <math match>}" + " Fired when a user who has saved this server looks at their serverlist." + " {ip: The address the ping is coming from | players: The number of players online" + " | maxplayers: The number of slots on the server | motd: The message a player is shown on the serverlist" + " | list: The list of connected players}" + " {motd | maxplayers | list: It is only possible to remove players, the added players" + " will be ignored. This will also change the player count.}" + " {}"; } @Override public boolean matches(Map<String, Mixed> prefilter, BindableEvent e) throws PrefilterNonMatchException { if(e instanceof MCServerPingEvent) { MCServerPingEvent event = (MCServerPingEvent) e; Prefilters.match(prefilter, "players", event.getNumPlayers(), PrefilterType.MATH_MATCH); Prefilters.match(prefilter, "maxplayers", event.getMaxPlayers(), PrefilterType.MATH_MATCH); return true; } return false; } @Override public BindableEvent convert(CArray manualObject, Target t) { throw ConfigRuntimeException.CreateUncatchableException("Unsupported Operation", Target.UNKNOWN); } @Override public Map<String, Mixed> evaluate(BindableEvent e) throws EventException { if(e instanceof MCServerPingEvent) { MCServerPingEvent event = (MCServerPingEvent) e; Target t = Target.UNKNOWN; Map<String, Mixed> ret = evaluate_helper(event); String ip; try { ip = event.getAddress().getHostAddress(); } catch (NullPointerException npe) { ip = ""; } ret.put("ip", new CString(ip, t)); ret.put("motd", new CString(event.getMOTD(), t)); ret.put("players", new CInt(event.getNumPlayers(), t)); ret.put("maxplayers", new CInt(event.getMaxPlayers(), t)); CArray players = new CArray(t); for(MCPlayer player : event.getPlayers()) { players.push(new CString(player.getName(), t), t); } ret.put("list", players); return ret; } else { throw new EventException("Could not convert to MCPingEvent"); } } @Override public boolean modifyEvent(String key, Mixed value, BindableEvent event) { if(event instanceof MCServerPingEvent) { MCServerPingEvent e = (MCServerPingEvent) event; switch(key.toLowerCase()) { case "motd": e.setMOTD(value.val()); return true; case "maxplayers": e.setMaxPlayers(ArgumentValidation.getInt32(value, value.getTarget())); return true; case "list": // Modifies the player list. The new list will be the intersection of the original // and the given list. Names and UUID's outside this intersection will simply be ignored. Set<MCPlayer> modifiedPlayers = new HashSet<>(); List<Mixed> passedList = ArgumentValidation.getArray(value, value.getTarget()).asList(); for(MCPlayer player : e.getPlayers()) { for(Mixed construct : passedList) { String playerStr = construct.val(); if(playerStr.length() > 0 && playerStr.length() <= 16) { // "player" is a name. if(playerStr.equalsIgnoreCase(player.getName())) { modifiedPlayers.add(player); break; } } else { // "player" is the UUID of the player. if(playerStr.equalsIgnoreCase(player.getUniqueID().toString())) { modifiedPlayers.add(player); break; } } } } e.setPlayers(modifiedPlayers); return true; } } return false; } @Override public Driver driver() { return Driver.SERVER_PING; } @Override public Version since() { return MSVersion.V3_3_1; } } @api public static class tab_complete_command extends AbstractEvent { @Override public String getName() { return "tab_complete_command"; } @Override public String docs() { return "{}" + " This will fire if a tab completer has not been set for a command registered with" + " register_command(), or if the set tab completer doesn't return an array. If completions are " + " not modified, registered commands will tab complete online player names." + " {command: The command name that was registered. | alias: The alias the player entered to run" + " the command. | args: The given arguments after the alias. | completions: The available" + " completions for the last argument. | sender: The player that ran the command. }" + " {completions}" + " {}"; } @Override public boolean matches(Map<String, Mixed> prefilter, BindableEvent event) throws PrefilterNonMatchException { return event instanceof MCCommandTabCompleteEvent; } @Override public BindableEvent convert(CArray manualObject, Target t) { throw ConfigRuntimeException.CreateUncatchableException("Unsupported Operation", Target.UNKNOWN); } @Override public Map<String, Mixed> evaluate(BindableEvent event) throws EventException { if(event instanceof MCCommandTabCompleteEvent) { MCCommandTabCompleteEvent e = (MCCommandTabCompleteEvent) event; Target t = Target.UNKNOWN; Map<String, Mixed> ret = evaluate_helper(event); ret.put("sender", new CString(e.getCommandSender().getName(), t)); CArray comp = new CArray(t); if(e.getCompletions() != null) { for(String c : e.getCompletions()) { comp.push(new CString(c, t), t); } } ret.put("completions", comp); ret.put("command", new CString(e.getCommand().getName(), t)); CArray args = new CArray(t); for(String a : e.getArguments()) { args.push(new CString(a, t), t); } ret.put("args", args); ret.put("alias", new CString(e.getAlias(), t)); return ret; } else { throw new EventException("Could not convert to MCCommandTabCompleteEvent"); } } @Override public Driver driver() { return Driver.TAB_COMPLETE; } @Override public boolean modifyEvent(String key, Mixed value, BindableEvent event) { if(event instanceof MCCommandTabCompleteEvent) { MCCommandTabCompleteEvent e = (MCCommandTabCompleteEvent) event; if("completions".equals(key)) { if(value.isInstanceOf(CArray.TYPE)) { List<String> comp = new ArrayList<>(); if(((CArray) value).inAssociativeMode()) { for(Mixed k : ((CArray) value).keySet()) { comp.add(((CArray) value).get(k, value.getTarget()).val()); } } else { for(Mixed v : ((CArray) value).asList()) { comp.add(v.val()); } } e.setCompletions(comp); return true; } } } return false; } @Override public Version since() { return MSVersion.V3_3_1; } } private static final Map<MCLocation, Boolean> REDSTONE_MONITORS = Collections.synchronizedMap(new HashMap<MCLocation, Boolean>()); /** * Returns a synchronized set of redstone monitors. When iterating on the list, be sure to synchronize manually. * * @return */ public static Map<MCLocation, Boolean> getRedstoneMonitors() { return REDSTONE_MONITORS; } @api public static class redstone_changed extends AbstractEvent { @Override public void hook() { REDSTONE_MONITORS.clear(); } @Override public String getName() { return "redstone_changed"; } @Override public String docs() { return "{location: <location match>}" + " Fired when a redstone activatable block is toggled, either on or off, AND the block has been set to be monitored" + " with the monitor_redstone function." + " {location: The location of the block | active: Whether or not the block is now active, or disabled.}" + " {}" + " {}"; } @Override public boolean matches(Map<String, com.laytonsmith.core.natives.interfaces.Mixed> prefilter, BindableEvent e) throws PrefilterNonMatchException { if(e instanceof MCRedstoneChangedEvent) { MCRedstoneChangedEvent event = (MCRedstoneChangedEvent) e; Prefilters.match(prefilter, "location", event.getLocation(), PrefilterType.LOCATION_MATCH); return true; } return false; } @Override public BindableEvent convert(CArray manualObject, Target t) { throw new UnsupportedOperationException("Not supported yet."); } @Override public Map<String, Mixed> evaluate(BindableEvent e) throws EventException { MCRedstoneChangedEvent event = (MCRedstoneChangedEvent) e; Map<String, Mixed> map = evaluate_helper(e); map.put("location", ObjectGenerator.GetGenerator().location(event.getLocation())); map.put("active", CBoolean.get(event.isActive())); return map; } @Override public Driver driver() { return Driver.REDSTONE_CHANGED; } @Override public boolean modifyEvent(String key, Mixed value, BindableEvent event) { return false; } @Override public Version since() { return MSVersion.V3_3_1; } } @api public static class broadcast_message extends AbstractEvent { @Override public String getName() { return "broadcast_message"; } @Override public String docs() { return "{message: <string match>}" + " Fired when a message is broadcasted on the server." + " {message: The message that will be broadcasted" + " | player_recipients: An array of players who will receive the message.}" + " {message}" + " {}"; } @Override public boolean matches(Map<String, Mixed> prefilter, BindableEvent e) throws PrefilterNonMatchException { if(e instanceof MCBroadcastMessageEvent) { MCBroadcastMessageEvent event = (MCBroadcastMessageEvent) e; Prefilters.match(prefilter, "message", event.getMessage(), PrefilterType.STRING_MATCH); return true; } return false; } @Override public BindableEvent convert(CArray manualObject, Target t) { // Get the player recipients. Mixed cRecipients = manualObject.get("player_recipients", t); if(!(cRecipients instanceof CArray) && !(cRecipients instanceof CNull)) { throw new CRECastException("Expected player_recipients to be an array, but received: " + cRecipients.typeof().toString(), t); } Set<MCCommandSender> recipients = new HashSet<>(); CArray recipientsArray = (CArray) cRecipients; for(int i = 0; i < recipientsArray.size(); i++) { MCPlayer player = Static.GetPlayer(recipientsArray.get(i, t), t); recipients.add(player); } // Get the message. Mixed cMessage = manualObject.get("message", t); if(!(cMessage instanceof CString)) { throw new CRECastException("Expected message to be a string, but received: " + cMessage.typeof().toString(), t); } // Instantiate and return the event. return EventBuilder.instantiate(MCBroadcastMessageEvent.class, Construct.nval((CString) cMessage), recipients); } @Override public Map<String, Mixed> evaluate(BindableEvent e) throws EventException { MCBroadcastMessageEvent event = (MCBroadcastMessageEvent) e; Map<String, Mixed> map = evaluate_helper(e); map.put("message", new CString(event.getMessage(), Target.UNKNOWN)); CArray cRecipients = new CArray(Target.UNKNOWN); for(MCPlayer player : event.getPlayerRecipients()) { cRecipients.push(new CString(player.getName(), Target.UNKNOWN), Target.UNKNOWN); } map.put("player_recipients", cRecipients); return map; } @Override public Driver driver() { return Driver.BROADCAST_MESSAGE; } @Override public boolean modifyEvent(String key, Mixed value, BindableEvent e) { if(key.equals("message")) { MCBroadcastMessageEvent event = (MCBroadcastMessageEvent) e; event.setMessage(Construct.nval(value)); return true; } return false; } @Override public Version since() { return MSVersion.V3_3_2; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.nio.ByteBuffer; import java.util.*; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.cql3.ColumnIdentifier; import org.apache.cassandra.cql3.Operator; import org.apache.cassandra.db.filter.*; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.CollectionType; import org.apache.cassandra.dht.*; import org.apache.cassandra.utils.FBUtilities; public abstract class AbstractReadCommandBuilder { protected final ColumnFamilyStore cfs; protected int nowInSeconds; private int cqlLimit = -1; private int pagingLimit = -1; protected boolean reversed = false; protected Set<ColumnIdentifier> columns; protected final RowFilter filter = RowFilter.create(); private Slice.Bound lowerClusteringBound; private Slice.Bound upperClusteringBound; private NavigableSet<Clustering> clusterings; // Use Util.cmd() instead of this ctor directly AbstractReadCommandBuilder(ColumnFamilyStore cfs) { this.cfs = cfs; this.nowInSeconds = FBUtilities.nowInSeconds(); } public AbstractReadCommandBuilder withNowInSeconds(int nowInSec) { this.nowInSeconds = nowInSec; return this; } public AbstractReadCommandBuilder fromIncl(Object... values) { assert lowerClusteringBound == null && clusterings == null; this.lowerClusteringBound = Slice.Bound.create(cfs.metadata.comparator, true, true, values); return this; } public AbstractReadCommandBuilder fromExcl(Object... values) { assert lowerClusteringBound == null && clusterings == null; this.lowerClusteringBound = Slice.Bound.create(cfs.metadata.comparator, true, false, values); return this; } public AbstractReadCommandBuilder toIncl(Object... values) { assert upperClusteringBound == null && clusterings == null; this.upperClusteringBound = Slice.Bound.create(cfs.metadata.comparator, false, true, values); return this; } public AbstractReadCommandBuilder toExcl(Object... values) { assert upperClusteringBound == null && clusterings == null; this.upperClusteringBound = Slice.Bound.create(cfs.metadata.comparator, false, false, values); return this; } public AbstractReadCommandBuilder includeRow(Object... values) { assert lowerClusteringBound == null && upperClusteringBound == null; if (this.clusterings == null) this.clusterings = new TreeSet<>(cfs.metadata.comparator); this.clusterings.add(cfs.metadata.comparator.make(values)); return this; } public AbstractReadCommandBuilder reverse() { this.reversed = true; return this; } public AbstractReadCommandBuilder withLimit(int newLimit) { this.cqlLimit = newLimit; return this; } public AbstractReadCommandBuilder withPagingLimit(int newLimit) { this.pagingLimit = newLimit; return this; } public AbstractReadCommandBuilder columns(String... columns) { if (this.columns == null) this.columns = new HashSet<>(); for (String column : columns) this.columns.add(ColumnIdentifier.getInterned(column, true)); return this; } private ByteBuffer bb(Object value, AbstractType<?> type) { return value instanceof ByteBuffer ? (ByteBuffer)value : ((AbstractType)type).decompose(value); } private AbstractType<?> forValues(AbstractType<?> collectionType) { assert collectionType instanceof CollectionType; CollectionType ct = (CollectionType)collectionType; switch (ct.kind) { case LIST: case MAP: return ct.valueComparator(); case SET: return ct.nameComparator(); } throw new AssertionError(); } private AbstractType<?> forKeys(AbstractType<?> collectionType) { assert collectionType instanceof CollectionType; CollectionType ct = (CollectionType)collectionType; switch (ct.kind) { case LIST: case MAP: return ct.nameComparator(); } throw new AssertionError(); } public AbstractReadCommandBuilder filterOn(String column, Operator op, Object value) { ColumnDefinition def = cfs.metadata.getColumnDefinition(ColumnIdentifier.getInterned(column, true)); assert def != null; AbstractType<?> type = def.type; if (op == Operator.CONTAINS) type = forValues(type); else if (op == Operator.CONTAINS_KEY) type = forKeys(type); this.filter.add(def, op, bb(value, type)); return this; } protected ColumnFilter makeColumnFilter() { if (columns == null || columns.isEmpty()) return ColumnFilter.all(cfs.metadata); ColumnFilter.Builder filter = ColumnFilter.selectionBuilder(); for (ColumnIdentifier column : columns) filter.add(cfs.metadata.getColumnDefinition(column)); return filter.build(); } protected ClusteringIndexFilter makeFilter() { if (clusterings != null) { return new ClusteringIndexNamesFilter(clusterings, reversed); } else { Slice slice = Slice.make(lowerClusteringBound == null ? Slice.Bound.BOTTOM : lowerClusteringBound, upperClusteringBound == null ? Slice.Bound.TOP : upperClusteringBound); return new ClusteringIndexSliceFilter(Slices.with(cfs.metadata.comparator, slice), reversed); } } protected DataLimits makeLimits() { DataLimits limits = cqlLimit < 0 ? DataLimits.NONE : DataLimits.cqlLimits(cqlLimit); if (pagingLimit >= 0) limits = limits.forPaging(pagingLimit); return limits; } public abstract ReadCommand build(); public static class SinglePartitionBuilder extends AbstractReadCommandBuilder { private final DecoratedKey partitionKey; public SinglePartitionBuilder(ColumnFamilyStore cfs, DecoratedKey key) { super(cfs); this.partitionKey = key; } @Override public ReadCommand build() { return SinglePartitionReadCommand.create(cfs.metadata, nowInSeconds, makeColumnFilter(), filter, makeLimits(), partitionKey, makeFilter()); } } public static class SinglePartitionSliceBuilder extends AbstractReadCommandBuilder { private final DecoratedKey partitionKey; private Slices.Builder sliceBuilder; public SinglePartitionSliceBuilder(ColumnFamilyStore cfs, DecoratedKey key) { super(cfs); this.partitionKey = key; sliceBuilder = new Slices.Builder(cfs.getComparator()); } public SinglePartitionSliceBuilder addSlice(Slice slice) { sliceBuilder.add(slice); return this; } @Override protected ClusteringIndexFilter makeFilter() { return new ClusteringIndexSliceFilter(sliceBuilder.build(), reversed); } @Override public ReadCommand build() { return SinglePartitionSliceCommand.create(cfs.metadata, nowInSeconds, makeColumnFilter(), filter, makeLimits(), partitionKey, makeFilter()); } } public static class PartitionRangeBuilder extends AbstractReadCommandBuilder { private DecoratedKey startKey; private boolean startInclusive; private DecoratedKey endKey; private boolean endInclusive; public PartitionRangeBuilder(ColumnFamilyStore cfs) { super(cfs); } public PartitionRangeBuilder fromKeyIncl(Object... values) { assert startKey == null; this.startInclusive = true; this.startKey = makeKey(cfs.metadata, values); return this; } public PartitionRangeBuilder fromKeyExcl(Object... values) { assert startKey == null; this.startInclusive = false; this.startKey = makeKey(cfs.metadata, values); return this; } public PartitionRangeBuilder toKeyIncl(Object... values) { assert endKey == null; this.endInclusive = true; this.endKey = makeKey(cfs.metadata, values); return this; } public PartitionRangeBuilder toKeyExcl(Object... values) { assert endKey == null; this.endInclusive = false; this.endKey = makeKey(cfs.metadata, values); return this; } @Override public ReadCommand build() { PartitionPosition start = startKey; if (start == null) { start = cfs.getPartitioner().getMinimumToken().maxKeyBound(); startInclusive = false; } PartitionPosition end = endKey; if (end == null) { end = cfs.getPartitioner().getMinimumToken().maxKeyBound(); endInclusive = true; } AbstractBounds<PartitionPosition> bounds; if (startInclusive && endInclusive) bounds = new Bounds<>(start, end); else if (startInclusive && !endInclusive) bounds = new IncludingExcludingBounds<>(start, end); else if (!startInclusive && endInclusive) bounds = new Range<>(start, end); else bounds = new ExcludingBounds<>(start, end); return new PartitionRangeReadCommand(cfs.metadata, nowInSeconds, makeColumnFilter(), filter, makeLimits(), new DataRange(bounds, makeFilter()), Optional.empty()); } static DecoratedKey makeKey(CFMetaData metadata, Object... partitionKey) { if (partitionKey.length == 1 && partitionKey[0] instanceof DecoratedKey) return (DecoratedKey)partitionKey[0]; ByteBuffer key = CFMetaData.serializePartitionKey(metadata.getKeyValidatorAsClusteringComparator().make(partitionKey)); return metadata.decorateKey(key); } } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.data.util; import com.flowpowered.math.vector.Vector3d; import com.flowpowered.math.vector.Vector3i; import net.minecraft.entity.item.EntityArmorStand; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.BlockTypes; import org.spongepowered.api.data.type.*; import org.spongepowered.api.entity.EntityArchetype; import org.spongepowered.api.entity.EntityType; import org.spongepowered.api.entity.EntityTypes; import org.spongepowered.api.entity.living.player.gamemode.GameMode; import org.spongepowered.api.entity.living.player.gamemode.GameModes; import org.spongepowered.api.util.Axis; import org.spongepowered.api.util.Direction; import org.spongepowered.common.entity.EntityUtil; import org.spongepowered.common.util.VecHelper; import org.spongepowered.api.util.weighted.WeightedSerializableObject; /** * A standard class where all various "constants" for various data are stored. * This is for a singular unique point of reference that can be changed * for implementation requirements. * * <p><em>WARNING</em>: USAGE OF THESE CONSTANTS, DUE TO STATIC INITIALIZATION, * IS ABSOLUTELY FORBIDDEN UNTIL THE GAME IS DURING THE POST-INIT PHASE DUE * TO REGISTRATION OF CATALOG TYPES. UNTIL THE REGISTRATION IS HANDLED WHERE * THE PROVIDED CATALOG TYPES ARE PROPERLY REGISTERED AND NOT <code>null</code>, * ANY USE OF THIS CLASS WILL RESULT IN A GLORIOUS FAIL INDESCRIBABLE MAGNITUDES. * </p> */ public final class DataConstants { public static final int DEFAULT_FIRE_TICKSVALUE = 10; public static final int DEFAULT_FIRE_DAMAGE_DELAY = 20; private DataConstants() {} public static final Axis DEFAULT_AXIS = Axis.X; public static final boolean DEFAULT_DECAYABLE_VALUE = false; public static final Direction DEFAULT_DIRECTION = Direction.NONE; public static final boolean DEFAULT_DISARMED = true; public static final boolean DEFAULT_SHOULD_DROP = true; public static final boolean DEFAULT_PISTON_EXTENDED = false; // A bunch of entity defaults (for use in constructing "default" values) public static final boolean CAN_FLY_DEFAULT = false; public static final boolean ELDER_GUARDIAN_DEFAULT = false; public static final boolean IS_WET_DEFAULT = false; public static final boolean DEFAULT_ATTACHED = false; public static final boolean DEFAULT_GLOWING = false; public static final boolean DEFAULT_HAS_GRAVITY = true; public static final int DEFAULT_FIRE_TICKS = 10; public static final int MINIMUM_FIRE_TICKS = 1; public static final int HIDE_MISCELLANEOUS_FLAG = 32; public static final int HIDE_CAN_PLACE_FLAG = 16; public static final int HIDE_CAN_DESTROY_FLAG = 8; public static final int HIDE_UNBREAKABLE_FLAG = 4; public static final int HIDE_ATTRIBUTES_FLAG = 2; public static final int HIDE_ENCHANTMENTS_FLAG = 1; public static final double DEFAULT_FLYING_SPEED = 0.05D; public static final double DEFAULT_EXHAUSTION = 0; public static final double MINIMUM_EXHAUSTION = 0; public static final double DEFAULT_SATURATION = 0; public static final int DEFAULT_FOOD_LEVEL = 20; public static final double DEFAULT_FALLING_BLOCK_FALL_DAMAGE_PER_BLOCK = 2D; public static final double DEFAULT_FALLING_BLOCK_MAX_FALL_DAMAGE = 40; public static final boolean DEFAULT_FALLING_BLOCK_CAN_PLACE_AS_BLOCK = false; public static final boolean DEFAULT_FALLING_BLOCK_CAN_DROP_AS_ITEM = true; public static final int DEFAULT_FALLING_BLOCK_FALL_TIME = 1; public static final boolean DEFAULT_FALLING_BLOCK_CAN_HURT_ENTITIES = false; public static final BlockState DEFAULT_BLOCK_STATE = BlockTypes.STONE.getDefaultState(); public static final boolean ANGRY_DEFAULT = false; // Original (0) / Copy of original (1) / Copy of a copy (2) / Tattered (3) public static final int MAXIMUM_GENERATION = 3; public static final Vector3d DEFAULT_HEAD_ROTATION = VecHelper.toVector3d(EntityArmorStand.DEFAULT_HEAD_ROTATION); public static final Vector3d DEFAULT_CHEST_ROTATION = VecHelper.toVector3d(EntityArmorStand.DEFAULT_BODY_ROTATION); public static final Vector3d DEFAULT_LEFT_ARM_ROTATION = VecHelper.toVector3d(EntityArmorStand.DEFAULT_LEFTARM_ROTATION); public static final Vector3d DEFAULT_RIGHT_ARM_ROTATION = VecHelper.toVector3d(EntityArmorStand.DEFAULT_RIGHTARM_ROTATION); public static final Vector3d DEFAULT_LEFT_LEG_ROTATION = VecHelper.toVector3d(EntityArmorStand.DEFAULT_LEFTLEG_ROTATION); public static final Vector3d DEFAULT_RIGHT_LEG_ROTATION = VecHelper.toVector3d(EntityArmorStand.DEFAULT_RIGHTLEG_ROTATION); // Structure block entity public static final String DEFAULT_STRUCTURE_AUTHOR = ""; // intentionally empty, as in vanilla public static final boolean DEFAULT_STRUCTURE_IGNORE_ENTITIES = true; public static final float DEFAULT_STRUCTURE_INTEGRITY = 1.0F; public static final StructureMode DEFAULT_STRUCTURE_MODE = StructureModes.DATA; public static final Vector3i DEFAULT_STRUCTURE_POSITION = Vector3i.ONE; public static final boolean DEFAULT_STRUCTURE_POWERED = false; public static final boolean DEFAULT_STRUCTURE_SHOW_AIR = false; public static final boolean DEFAULT_STRUCTURE_SHOW_BOUNDING_BOX = true; public static final long DEFAULT_STRUCTURE_SEED = 0L; public static final Vector3i DEFAULT_STRUCTURE_SIZE = Vector3i.ONE; public static final short MINIMUM_SPAWNER_MAXIMUM_SPAWN_DELAY = 1; public static final short DEFAULT_SPAWNER_REMAINING_DELAY = 20; public static final short DEFAULT_SPAWNER_MINIMUM_SPAWN_DELAY = 200; public static final short DEFAULT_SPAWNER_MAXIMUM_SPAWN_DELAY = 800; public static final short DEFAULT_SPAWNER_SPAWN_COUNT = 4; public static final short DEFAULT_SPAWNER_MAXMIMUM_NEARBY_ENTITIES = 6; public static final short DEFAULT_SPAWNER_REQUIRED_PLAYER_RANGE = 16; public static final short DEFAULT_SPAWNER_SPAWN_RANGE = 4; public static final WeightedSerializableObject<EntityArchetype> DEFAULT_SPAWNER_NEXT_ENTITY_TO_SPAWN = new WeightedSerializableObject<> (EntityUtil.archetype(Catalog.DEFAULT_SPAWNER_ENTITY), 1); public static final class Catalog { public static final DyeColor DEFAULT_SHEEP_COLOR = DyeColors.WHITE; public static final EntityType DEFAULT_SPAWNER_ENTITY = EntityTypes.PIG; private Catalog() {} public static final BigMushroomType DEFAULT_BIG_MUSHROOM_TYPE = BigMushroomTypes.ALL_OUTSIDE; public static final BrickType DEFAULT_BRICK_TYPE = BrickTypes.DEFAULT; public static final ComparatorType DEFAULT_COMPARATOR_TYPE = ComparatorTypes.COMPARE; public static final DirtType DEFAULT_DIRT_TYPE = DirtTypes.DIRT; public static final DisguisedBlockType DEFAULT_DISGUISED_BLOCK = DisguisedBlockTypes.STONE; public static final DoublePlantType DEFAULT_DOUBLE_PLANT = DoublePlantTypes.GRASS; public static final DyeColor DEFAULT_BANNER_BASE = DyeColors.BLACK; public static final OcelotType DEFAULT_OCELOT = OcelotTypes.WILD_OCELOT; public static final Career CAREER_DEFAULT = Careers.FARMER; public static final GameMode DEFAULT_GAMEMODE = GameModes.NOT_SET; public static final BlockState DEFAULT_FALLING_BLOCK_BLOCKSTATE = BlockTypes.SAND.getDefaultState(); public static final BlockState DEFAULT_BLOCK_STATE = BlockTypes.STONE.getDefaultState(); public static final Art DEFAULT_ART = Arts.KEBAB; public static final PickupRule DEFAULT_PICKUP_RULE = PickupRules.ALLOWED; } public static final class Entity { public static final double DEFAULT_ABSORPTION = 0.0f; public static final class Item { public static final int MIN_PICKUP_DELAY = Short.MIN_VALUE; public static final int MAX_PICKUP_DELAY = Short.MAX_VALUE; public static final int DEFAULT_PICKUP_DELAY = 0; public static final int MIN_DESPAWN_DELAY = Short.MIN_VALUE; public static final int MAX_DESPAWN_DELAY = Short.MAX_VALUE; public static final int DEFAULT_DESPAWN_DELAY = 0; public static final int MAGIC_NO_PICKUP = MAX_PICKUP_DELAY; public static final int MAGIC_NO_DESPAWN = MIN_DESPAWN_DELAY; private Item() { } } } public static final class Horse { public static final HorseStyle DEFAULT_STYLE = HorseStyles.NONE; public static final HorseColor DEFAULT_COLOR = HorseColors.WHITE; private Horse() { } } public static final class Rabbit { public static final RabbitType DEFAULT_TYPE = RabbitTypes.WHITE; private Rabbit() { } } public static final class Ocelot { public static final OcelotType DEFAULT_TYPE = OcelotTypes.WILD_OCELOT; } public static final class Llama { public static final LlamaVariant DEFAULT_VARIANT = LlamaVariants.WHITE; public static final int DEFAULT_STRENGTH = 1; public static final int MINIMUM_STRENGTH = 1; public static final int MAXIMUM_STRENGTH = 5; } }
/* * The MIT License * * Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.logging; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.FeedAdapter; import hudson.Functions; import hudson.init.Initializer; import static hudson.init.InitMilestone.PLUGINS_PREPARED; import hudson.model.AbstractModelObject; import jenkins.model.Jenkins; import hudson.model.RSS; import hudson.util.CopyOnWriteMap; import jenkins.model.JenkinsLocationConfiguration; import jenkins.model.ModelObjectWithChildren; import jenkins.model.ModelObjectWithContextMenu.ContextMenu; import jenkins.util.SystemProperties; import org.apache.commons.io.filefilter.WildcardFileFilter; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerProxy; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.HttpRedirect; import org.kohsuke.stapler.interceptor.RequirePOST; import javax.servlet.ServletException; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; /** * Owner of {@link LogRecorder}s, bound to "/log". * * @author Kohsuke Kawaguchi */ public class LogRecorderManager extends AbstractModelObject implements ModelObjectWithChildren, StaplerProxy { /** * {@link LogRecorder}s keyed by their {@linkplain LogRecorder#name name}. */ public final transient Map<String,LogRecorder> logRecorders = new CopyOnWriteMap.Tree<>(); @Override public String getDisplayName() { return Messages.LogRecorderManager_DisplayName(); } @Override public String getSearchUrl() { return "/log"; } public LogRecorder getDynamic(String token) { return getLogRecorder(token); } public LogRecorder getLogRecorder(String token) { return logRecorders.get(token); } static File configDir() { return new File(Jenkins.get().getRootDir(), "log"); } /** * Loads the configuration from disk. */ public void load() throws IOException { logRecorders.clear(); File dir = configDir(); File[] files = dir.listFiles((FileFilter)new WildcardFileFilter("*.xml")); if(files==null) return; for (File child : files) { String name = child.getName(); name = name.substring(0,name.length()-4); // cut off ".xml" LogRecorder lr = new LogRecorder(name); lr.load(); logRecorders.put(name,lr); } } /** * Creates a new log recorder. */ @RequirePOST public HttpResponse doNewLogRecorder(@QueryParameter String name) { Jenkins.get().checkPermission(Jenkins.ADMINISTER); Jenkins.checkGoodName(name); logRecorders.put(name,new LogRecorder(name)); // redirect to the config screen return new HttpRedirect(name+"/configure"); } @Override public ContextMenu doChildrenContextMenu(StaplerRequest request, StaplerResponse response) throws Exception { ContextMenu menu = new ContextMenu(); menu.add("all","All Jenkins Logs"); for (LogRecorder lr : logRecorders.values()) { menu.add(lr.getSearchUrl(), lr.getDisplayName()); } return menu; } /** * Configure the logging level. */ @edu.umd.cs.findbugs.annotations.SuppressFBWarnings("LG_LOST_LOGGER_DUE_TO_WEAK_REFERENCE") @RequirePOST public HttpResponse doConfigLogger(@QueryParameter String name, @QueryParameter String level) { Jenkins.get().checkPermission(Jenkins.ADMINISTER); Level lv; if(level.equals("inherit")) lv = null; else lv = Level.parse(level.toUpperCase(Locale.ENGLISH)); Logger.getLogger(name).setLevel(lv); return new HttpRedirect("levels"); } /** * RSS feed for log entries. */ public void doRss( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException { doRss(req, rsp, Jenkins.logRecords); } /** * Renders the given log recorders as RSS. */ /*package*/ static void doRss(StaplerRequest req, StaplerResponse rsp, List<LogRecord> logs) throws IOException, ServletException { // filter log records based on the log level String entryType = "all"; String level = req.getParameter("level"); if(level!=null) { Level threshold = Level.parse(level); List<LogRecord> filtered = new ArrayList<>(); for (LogRecord r : logs) { if(r.getLevel().intValue() >= threshold.intValue()) filtered.add(r); } logs = filtered; entryType = level; } RSS.forwardToRss("Jenkins:log (" + entryType + " entries)","", logs, new FeedAdapter<LogRecord>() { @Override public String getEntryTitle(LogRecord entry) { return entry.getMessage(); } @Override public String getEntryUrl(LogRecord entry) { return "log"; // TODO: one URL for one log entry? } @Override public String getEntryID(LogRecord entry) { return String.valueOf(entry.getSequenceNumber()); } @Override public String getEntryDescription(LogRecord entry) { return Functions.printLogRecord(entry); } @Override public Calendar getEntryTimestamp(LogRecord entry) { GregorianCalendar cal = new GregorianCalendar(); cal.setTimeInMillis(entry.getMillis()); return cal; } @Override public String getEntryAuthor(LogRecord entry) { return JenkinsLocationConfiguration.get().getAdminAddress(); } },req,rsp); } @Initializer(before=PLUGINS_PREPARED) public static void init(Jenkins h) throws IOException { h.getLog().load(); } @Override @Restricted(NoExternalUse.class) public Object getTarget() { if (!SKIP_PERMISSION_CHECK) { Jenkins.get().checkPermission(Jenkins.SYSTEM_READ); } return this; } /** * Escape hatch for StaplerProxy-based access control */ @Restricted(NoExternalUse.class) @SuppressFBWarnings("MS_SHOULD_BE_FINAL") public static /* Script Console modifiable */ boolean SKIP_PERMISSION_CHECK = SystemProperties.getBoolean(LogRecorderManager.class.getName() + ".skipPermissionCheck"); }
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.sync; import android.app.Activity; import android.content.Context; import android.util.Log; import org.chromium.base.ActivityState; import org.chromium.base.ApplicationStatus; import org.chromium.base.ApplicationStatus.ActivityStateListener; import org.chromium.base.CalledByNative; import org.chromium.base.ThreadUtils; import org.chromium.base.VisibleForTesting; import org.chromium.base.annotations.SuppressFBWarnings; import org.chromium.chrome.browser.identity.UniqueIdentificationGenerator; import org.chromium.sync.internal_api.pub.PassphraseType; import org.chromium.sync.internal_api.pub.base.ModelType; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.CopyOnWriteArrayList; /** * Android wrapper of the ProfileSyncService which provides access from the Java layer. * <p/> * This class mostly wraps native classes, but it make a few business logic decisions, both in Java * and in native. * <p/> * Only usable from the UI thread as the native ProfileSyncService requires its access to be in the * UI thread. * <p/> * See chrome/browser/sync/profile_sync_service.h for more details. */ public class ProfileSyncService { /** * Listener for the underlying sync status. */ public interface SyncStateChangedListener { // Invoked when the status has changed. public void syncStateChanged(); } private static final String TAG = "ProfileSyncService"; @VisibleForTesting public static final String SESSION_TAG_PREFIX = "session_sync"; private static ProfileSyncService sProfileSyncService; @VisibleForTesting protected final Context mContext; // Sync state changes more often than listeners are added/removed, so using CopyOnWrite. private final List<SyncStateChangedListener> mListeners = new CopyOnWriteArrayList<SyncStateChangedListener>(); // Native ProfileSyncServiceAndroid object. Can not be final since we set it to 0 in destroy(). private final long mNativeProfileSyncServiceAndroid; /** * A helper method for retrieving the application-wide SyncSetupManager. * <p/> * Can only be accessed on the main thread. * * @param context the ApplicationContext is retrieved from the context used as an argument. * @return a singleton instance of the SyncSetupManager */ @SuppressFBWarnings("LI_LAZY_INIT") public static ProfileSyncService get(Context context) { ThreadUtils.assertOnUiThread(); if (sProfileSyncService == null) { sProfileSyncService = new ProfileSyncService(context); } return sProfileSyncService; } @VisibleForTesting public static void overrideForTests(ProfileSyncService profileSyncService) { sProfileSyncService = profileSyncService; } /** * This is called pretty early in our application. Avoid any blocking operations here. */ protected ProfileSyncService(Context context) { ThreadUtils.assertOnUiThread(); // We should store the application context, as we outlive any activity which may create us. mContext = context.getApplicationContext(); // This may cause us to create ProfileSyncService even if sync has not // been set up, but ProfileSyncService::Startup() won't be called until // credentials are available. mNativeProfileSyncServiceAndroid = nativeInit(); // When the application gets paused, tell sync to flush the directory to disk. ApplicationStatus.registerStateListenerForAllActivities(new ActivityStateListener() { @Override public void onActivityStateChange(Activity activity, int newState) { if (newState == ActivityState.PAUSED) { flushDirectory(); } } }); } @CalledByNative private static long getProfileSyncServiceAndroid(Context context) { return get(context).mNativeProfileSyncServiceAndroid; } /** * If we are currently in the process of setting up sync, this method clears the * sync setup in progress flag. */ @VisibleForTesting public void finishSyncFirstSetupIfNeeded() { if (isFirstSetupInProgress()) { setSyncSetupCompleted(); setSetupInProgress(false); } } public void signOut() { nativeSignOutSync(mNativeProfileSyncServiceAndroid); } /** * Signs in to sync, using the currently signed-in account. */ public void syncSignIn() { nativeSignInSync(mNativeProfileSyncServiceAndroid); // Notify listeners right away that the sync state has changed (native side does not do // this) syncStateChanged(); } public String querySyncStatus() { ThreadUtils.assertOnUiThread(); return nativeQuerySyncStatusSummary(mNativeProfileSyncServiceAndroid); } /** * Sets the the machine tag used by session sync to a unique value. */ public void setSessionsId(UniqueIdentificationGenerator generator) { ThreadUtils.assertOnUiThread(); String uniqueTag = generator.getUniqueId(null); if (uniqueTag.isEmpty()) { Log.e(TAG, "Unable to get unique tag for sync. " + "This may lead to unexpected tab sync behavior."); return; } String sessionTag = SESSION_TAG_PREFIX + uniqueTag; if (!nativeSetSyncSessionsId(mNativeProfileSyncServiceAndroid, sessionTag)) { Log.e(TAG, "Unable to write session sync tag. " + "This may lead to unexpected tab sync behavior."); } } /** * Returns the actual passphrase type being used for encryption. * The sync backend must be running (isSyncInitialized() returns true) before * calling this function. * <p/> * This method should only be used if you want to know the raw value. For checking whether * we should ask the user for a passphrase, use isPassphraseRequiredForDecryption(). */ public PassphraseType getPassphraseType() { assert isSyncInitialized(); int passphraseType = nativeGetPassphraseType(mNativeProfileSyncServiceAndroid); return PassphraseType.fromInternalValue(passphraseType); } public boolean isSyncKeystoreMigrationDone() { assert isSyncInitialized(); return nativeIsSyncKeystoreMigrationDone(mNativeProfileSyncServiceAndroid); } /** * Returns true if the current explicit passphrase time is defined. */ public boolean hasExplicitPassphraseTime() { assert isSyncInitialized(); return nativeHasExplicitPassphraseTime(mNativeProfileSyncServiceAndroid); } /** * Returns the current explicit passphrase time in milliseconds since epoch. */ public long getExplicitPassphraseTime() { assert isSyncInitialized(); return nativeGetExplicitPassphraseTime(mNativeProfileSyncServiceAndroid); } public String getSyncEnterGooglePassphraseBodyWithDateText() { assert isSyncInitialized(); return nativeGetSyncEnterGooglePassphraseBodyWithDateText(mNativeProfileSyncServiceAndroid); } public String getSyncEnterCustomPassphraseBodyWithDateText() { assert isSyncInitialized(); return nativeGetSyncEnterCustomPassphraseBodyWithDateText(mNativeProfileSyncServiceAndroid); } public String getCurrentSignedInAccountText() { assert isSyncInitialized(); return nativeGetCurrentSignedInAccountText(mNativeProfileSyncServiceAndroid); } public String getSyncEnterCustomPassphraseBodyText() { return nativeGetSyncEnterCustomPassphraseBodyText(mNativeProfileSyncServiceAndroid); } /** * Checks if sync is currently set to use a custom passphrase. The sync backend must be running * (isSyncInitialized() returns true) before calling this function. * * @return true if sync is using a custom passphrase. */ public boolean isUsingSecondaryPassphrase() { assert isSyncInitialized(); return nativeIsUsingSecondaryPassphrase(mNativeProfileSyncServiceAndroid); } /** * Checks if we need a passphrase to decrypt a currently-enabled data type. This returns false * if a passphrase is needed for a type that is not currently enabled. * * @return true if we need a passphrase. */ public boolean isPassphraseRequiredForDecryption() { assert isSyncInitialized(); return nativeIsPassphraseRequiredForDecryption(mNativeProfileSyncServiceAndroid); } /** * Checks if we need a passphrase to decrypt any data type (including types that aren't * currently enabled or supported, such as passwords). This API is used to determine if we * need to provide a decryption passphrase before we can re-encrypt with a custom passphrase. * * @return true if we need a passphrase for some type. */ public boolean isPassphraseRequiredForExternalType() { assert isSyncInitialized(); return nativeIsPassphraseRequiredForExternalType(mNativeProfileSyncServiceAndroid); } /** * Checks if the sync backend is running. * * @return true if sync is initialized/running. */ public boolean isSyncInitialized() { return nativeIsSyncInitialized(mNativeProfileSyncServiceAndroid); } /** * Checks if the first sync setup is currently in progress. * * @return true if first sync setup is in progress */ public boolean isFirstSetupInProgress() { return nativeIsFirstSetupInProgress(mNativeProfileSyncServiceAndroid); } /** * Checks if encrypting all the data types is allowed. * * @return true if encrypting all data types is allowed, false if only passwords are allowed to * be encrypted. */ public boolean isEncryptEverythingAllowed() { assert isSyncInitialized(); return nativeIsEncryptEverythingAllowed(mNativeProfileSyncServiceAndroid); } /** * Checks if the all the data types are encrypted. * * @return true if all data types are encrypted, false if only passwords are encrypted. */ public boolean isEncryptEverythingEnabled() { assert isSyncInitialized(); return nativeIsEncryptEverythingEnabled(mNativeProfileSyncServiceAndroid); } /** * Turns on encryption of all data types. This only takes effect after sync configuration is * completed and setPreferredDataTypes() is invoked. */ public void enableEncryptEverything() { assert isSyncInitialized(); nativeEnableEncryptEverything(mNativeProfileSyncServiceAndroid); } public void setEncryptionPassphrase(String passphrase, boolean isGaia) { assert isSyncInitialized(); nativeSetEncryptionPassphrase(mNativeProfileSyncServiceAndroid, passphrase, isGaia); } public boolean isCryptographerReady() { assert isSyncInitialized(); return nativeIsCryptographerReady(mNativeProfileSyncServiceAndroid); } public boolean setDecryptionPassphrase(String passphrase) { assert isSyncInitialized(); return nativeSetDecryptionPassphrase(mNativeProfileSyncServiceAndroid, passphrase); } public GoogleServiceAuthError.State getAuthError() { int authErrorCode = nativeGetAuthError(mNativeProfileSyncServiceAndroid); return GoogleServiceAuthError.State.fromCode(authErrorCode); } /** * Gets the set of data types that are currently syncing. * * This is affected by whether sync is on. * * @return Set of active data types. */ public Set<ModelType> getActiveDataTypes() { long modelTypeSelection = nativeGetActiveDataTypes(mNativeProfileSyncServiceAndroid); return modelTypeSelectionToSet(modelTypeSelection); } /** * Gets the set of data types that are enabled in sync. * * This is unaffected by whether sync is on. * * @return Set of preferred types. */ public Set<ModelType> getPreferredDataTypes() { long modelTypeSelection = nativeGetPreferredDataTypes(mNativeProfileSyncServiceAndroid); return modelTypeSelectionToSet(modelTypeSelection); } @VisibleForTesting public static Set<ModelType> modelTypeSelectionToSet(long modelTypeSelection) { Set<ModelType> syncTypes = new HashSet<ModelType>(); if ((modelTypeSelection & ModelTypeSelection.AUTOFILL) != 0) { syncTypes.add(ModelType.AUTOFILL); } if ((modelTypeSelection & ModelTypeSelection.AUTOFILL_PROFILE) != 0) { syncTypes.add(ModelType.AUTOFILL_PROFILE); } if ((modelTypeSelection & ModelTypeSelection.AUTOFILL_WALLET) != 0) { syncTypes.add(ModelType.AUTOFILL_WALLET); } if ((modelTypeSelection & ModelTypeSelection.BOOKMARK) != 0) { syncTypes.add(ModelType.BOOKMARK); } if ((modelTypeSelection & ModelTypeSelection.EXPERIMENTS) != 0) { syncTypes.add(ModelType.EXPERIMENTS); } if ((modelTypeSelection & ModelTypeSelection.NIGORI) != 0) { syncTypes.add(ModelType.NIGORI); } if ((modelTypeSelection & ModelTypeSelection.PASSWORD) != 0) { syncTypes.add(ModelType.PASSWORD); } if ((modelTypeSelection & ModelTypeSelection.SESSION) != 0) { syncTypes.add(ModelType.SESSION); } if ((modelTypeSelection & ModelTypeSelection.TYPED_URL) != 0) { syncTypes.add(ModelType.TYPED_URL); } if ((modelTypeSelection & ModelTypeSelection.HISTORY_DELETE_DIRECTIVE) != 0) { syncTypes.add(ModelType.HISTORY_DELETE_DIRECTIVE); } if ((modelTypeSelection & ModelTypeSelection.DEVICE_INFO) != 0) { syncTypes.add(ModelType.DEVICE_INFO); } if ((modelTypeSelection & ModelTypeSelection.PROXY_TABS) != 0) { syncTypes.add(ModelType.PROXY_TABS); } if ((modelTypeSelection & ModelTypeSelection.FAVICON_IMAGE) != 0) { syncTypes.add(ModelType.FAVICON_IMAGE); } if ((modelTypeSelection & ModelTypeSelection.FAVICON_TRACKING) != 0) { syncTypes.add(ModelType.FAVICON_TRACKING); } if ((modelTypeSelection & ModelTypeSelection.SUPERVISED_USER_SETTING) != 0) { syncTypes.add(ModelType.MANAGED_USER_SETTING); } if ((modelTypeSelection & ModelTypeSelection.SUPERVISED_USER_WHITELIST) != 0) { syncTypes.add(ModelType.MANAGED_USER_WHITELIST); } return syncTypes; } public boolean hasKeepEverythingSynced() { return nativeHasKeepEverythingSynced(mNativeProfileSyncServiceAndroid); } /** * Enables syncing for the passed data types. * * @param syncEverything Set to true if the user wants to sync all data types * (including new data types we add in the future). * @param enabledTypes The set of types to enable. Ignored (can be null) if * syncEverything is true. */ public void setPreferredDataTypes(boolean syncEverything, Set<ModelType> enabledTypes) { long modelTypeSelection = 0; if (syncEverything || enabledTypes.contains(ModelType.AUTOFILL)) { modelTypeSelection |= ModelTypeSelection.AUTOFILL; } if (syncEverything || enabledTypes.contains(ModelType.BOOKMARK)) { modelTypeSelection |= ModelTypeSelection.BOOKMARK; } if (syncEverything || enabledTypes.contains(ModelType.PASSWORD)) { modelTypeSelection |= ModelTypeSelection.PASSWORD; } if (syncEverything || enabledTypes.contains(ModelType.PROXY_TABS)) { modelTypeSelection |= ModelTypeSelection.PROXY_TABS; } if (syncEverything || enabledTypes.contains(ModelType.TYPED_URL)) { modelTypeSelection |= ModelTypeSelection.TYPED_URL; } nativeSetPreferredDataTypes( mNativeProfileSyncServiceAndroid, syncEverything, modelTypeSelection); } public void setSyncSetupCompleted() { nativeSetSyncSetupCompleted(mNativeProfileSyncServiceAndroid); } public boolean hasSyncSetupCompleted() { return nativeHasSyncSetupCompleted(mNativeProfileSyncServiceAndroid); } public boolean isStartSuppressed() { return nativeIsStartSuppressed(mNativeProfileSyncServiceAndroid); } /** * Notifies sync whether sync setup is in progress - this tells sync whether it should start * syncing data types when it starts up, or if it should just stay in "configuration mode". * * @param inProgress True to put sync in configuration mode, false to turn off configuration * and allow syncing. */ public void setSetupInProgress(boolean inProgress) { nativeSetSetupInProgress(mNativeProfileSyncServiceAndroid, inProgress); } public void addSyncStateChangedListener(SyncStateChangedListener listener) { ThreadUtils.assertOnUiThread(); mListeners.add(listener); } public void removeSyncStateChangedListener(SyncStateChangedListener listener) { ThreadUtils.assertOnUiThread(); mListeners.remove(listener); } public boolean hasUnrecoverableError() { return nativeHasUnrecoverableError(mNativeProfileSyncServiceAndroid); } /** * Called when the state of the native sync engine has changed, so various * UI elements can update themselves. */ @CalledByNative public void syncStateChanged() { if (!mListeners.isEmpty()) { for (SyncStateChangedListener listener : mListeners) { listener.syncStateChanged(); } } } @VisibleForTesting public String getSyncInternalsInfoForTest() { ThreadUtils.assertOnUiThread(); return nativeGetAboutInfoForTest(mNativeProfileSyncServiceAndroid); } /** * Starts the sync engine. */ public void enableSync() { nativeEnableSync(mNativeProfileSyncServiceAndroid); } /** * Stops the sync engine. */ public void disableSync() { nativeDisableSync(mNativeProfileSyncServiceAndroid); } /** * Flushes the sync directory. */ public void flushDirectory() { nativeFlushDirectory(mNativeProfileSyncServiceAndroid); } /** * Returns the time when the last sync cycle was completed. * * @return The difference measured in microseconds, between last sync cycle completion time * and 1 January 1970 00:00:00 UTC. */ @VisibleForTesting public long getLastSyncedTimeForTest() { return nativeGetLastSyncedTimeForTest(mNativeProfileSyncServiceAndroid); } /** * Overrides the Sync engine's NetworkResources. This is used to set up the Sync FakeServer for * testing. * * @param networkResources the pointer to the NetworkResources created by the native code. It * is assumed that the Java caller has ownership of this pointer; * ownership is transferred as part of this call. */ public void overrideNetworkResourcesForTest(long networkResources) { nativeOverrideNetworkResourcesForTest(mNativeProfileSyncServiceAndroid, networkResources); } @CalledByNative private static String modelTypeSelectionToStringForTest(long modelTypeSelection) { SortedSet<String> set = new TreeSet<String>(); Set<ModelType> filteredTypes = ModelType.filterOutNonInvalidationTypes( modelTypeSelectionToSet(modelTypeSelection)); for (ModelType type : filteredTypes) { set.add(type.toString()); } StringBuilder sb = new StringBuilder(); Iterator<String> it = set.iterator(); if (it.hasNext()) { sb.append(it.next()); while (it.hasNext()) { sb.append(", "); sb.append(it.next()); } } return sb.toString(); } /** * @return Whether sync is enabled to sync urls or open tabs with a non custom passphrase. */ public boolean isSyncingUrlsWithKeystorePassphrase() { return isSyncInitialized() && getPreferredDataTypes().contains(ModelType.TYPED_URL) && getPassphraseType().equals(PassphraseType.KEYSTORE_PASSPHRASE); } // Native methods private native long nativeInit(); private native void nativeEnableSync(long nativeProfileSyncServiceAndroid); private native void nativeDisableSync(long nativeProfileSyncServiceAndroid); private native void nativeFlushDirectory(long nativeProfileSyncServiceAndroid); private native void nativeSignInSync(long nativeProfileSyncServiceAndroid); private native void nativeSignOutSync(long nativeProfileSyncServiceAndroid); private native boolean nativeSetSyncSessionsId( long nativeProfileSyncServiceAndroid, String tag); private native String nativeQuerySyncStatusSummary(long nativeProfileSyncServiceAndroid); private native int nativeGetAuthError(long nativeProfileSyncServiceAndroid); private native boolean nativeIsSyncInitialized(long nativeProfileSyncServiceAndroid); private native boolean nativeIsFirstSetupInProgress(long nativeProfileSyncServiceAndroid); private native boolean nativeIsEncryptEverythingAllowed(long nativeProfileSyncServiceAndroid); private native boolean nativeIsEncryptEverythingEnabled(long nativeProfileSyncServiceAndroid); private native void nativeEnableEncryptEverything(long nativeProfileSyncServiceAndroid); private native boolean nativeIsPassphraseRequiredForDecryption( long nativeProfileSyncServiceAndroid); private native boolean nativeIsPassphraseRequiredForExternalType( long nativeProfileSyncServiceAndroid); private native boolean nativeIsUsingSecondaryPassphrase(long nativeProfileSyncServiceAndroid); private native boolean nativeSetDecryptionPassphrase( long nativeProfileSyncServiceAndroid, String passphrase); private native void nativeSetEncryptionPassphrase( long nativeProfileSyncServiceAndroid, String passphrase, boolean isGaia); private native boolean nativeIsCryptographerReady(long nativeProfileSyncServiceAndroid); private native int nativeGetPassphraseType(long nativeProfileSyncServiceAndroid); private native boolean nativeHasExplicitPassphraseTime(long nativeProfileSyncServiceAndroid); private native long nativeGetExplicitPassphraseTime(long nativeProfileSyncServiceAndroid); private native String nativeGetSyncEnterGooglePassphraseBodyWithDateText( long nativeProfileSyncServiceAndroid); private native String nativeGetSyncEnterCustomPassphraseBodyWithDateText( long nativeProfileSyncServiceAndroid); private native String nativeGetCurrentSignedInAccountText(long nativeProfileSyncServiceAndroid); private native String nativeGetSyncEnterCustomPassphraseBodyText( long nativeProfileSyncServiceAndroid); private native boolean nativeIsSyncKeystoreMigrationDone(long nativeProfileSyncServiceAndroid); private native long nativeGetActiveDataTypes(long nativeProfileSyncServiceAndroid); private native long nativeGetPreferredDataTypes(long nativeProfileSyncServiceAndroid); private native void nativeSetPreferredDataTypes( long nativeProfileSyncServiceAndroid, boolean syncEverything, long modelTypeSelection); private native void nativeSetSetupInProgress( long nativeProfileSyncServiceAndroid, boolean inProgress); private native void nativeSetSyncSetupCompleted(long nativeProfileSyncServiceAndroid); private native boolean nativeHasSyncSetupCompleted(long nativeProfileSyncServiceAndroid); private native boolean nativeIsStartSuppressed(long nativeProfileSyncServiceAndroid); private native boolean nativeHasKeepEverythingSynced(long nativeProfileSyncServiceAndroid); private native boolean nativeHasUnrecoverableError(long nativeProfileSyncServiceAndroid); private native String nativeGetAboutInfoForTest(long nativeProfileSyncServiceAndroid); private native long nativeGetLastSyncedTimeForTest(long nativeProfileSyncServiceAndroid); private native void nativeOverrideNetworkResourcesForTest( long nativeProfileSyncServiceAndroid, long networkResources); }
/* * Copyright 2014-2015 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kotcrab.vis.editor.ui; import com.badlogic.gdx.backends.lwjgl.LwjglApplication; import com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration; import com.badlogic.gdx.backends.lwjgl.LwjglCanvas; import com.kotcrab.vis.editor.App; import com.kotcrab.vis.editor.Editor; import com.kotcrab.vis.editor.Log; import com.kotcrab.vis.editor.util.vis.LaunchConfiguration; import javax.imageio.ImageIO; import javax.swing.*; import java.awt.BorderLayout; import java.awt.Canvas; import java.awt.EventQueue; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.awt.image.BufferedImage; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.util.Set; /** * VisEditor AWT/Swing based frame that holds {@link LwjglCanvas}. {@link LwjglApplication} is not used directly, * because unfortunately we need some features from swing. * @author Kotcrab */ public class EditorFrame extends JFrame { private final LaunchConfiguration launchConfig; private Editor editor; private LwjglCanvas editorCanvas; public EditorFrame (SplashController splashController, LaunchConfiguration launchConfig) { this.launchConfig = launchConfig; setTitle("VisEditor"); setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); addWindowListener(new WindowAdapter() { @Override public void windowClosing (WindowEvent e) { performEditorExit(); } }); setIconImage(loadImage("/com/kotcrab/vis/editor/icon.png")); LwjglApplicationConfiguration config = new LwjglApplicationConfiguration(); config.width = 1280; config.height = 720; config.backgroundFPS = 0; //default is 60, when in background it takes a lot of cpu, maybe vsync causes it? config.allowSoftwareMode = launchConfig.allowSoftwareMode; editor = new Editor(this, launchConfig); editorCanvas = new LwjglCanvas(editor, config); Canvas canvas = editorCanvas.getCanvas(); canvas.setSize(1280, 720); getContentPane().add(canvas, BorderLayout.CENTER); pack(); setLocationRelativeTo(null); splashController.shouldClose = true; } public static void main (String[] args) { App.init(); LaunchConfiguration launchConfig = new LaunchConfiguration(); //TODO: needs some better parser for (int i = 0; i < args.length; i++) { String arg = args[i]; if (arg.equals("--no-splash")) { launchConfig.showSplash = false; continue; } if (arg.equals("--scale-ui")) { launchConfig.scaleUIEnabled = true; continue; } if (arg.equals("--allow-software-mode")) { launchConfig.allowSoftwareMode = true; continue; } if (arg.equals("--project")) { if (i + 1 >= args.length) { throw new IllegalStateException("Not enough parameters for --project <project path>"); } launchConfig.projectPath = args[i + 1]; i++; continue; } if (arg.equals("--scene")) { if (i + 1 >= args.length) { throw new IllegalStateException("Not enough parameters for --scene <scene path>"); } launchConfig.scenePath = args[i + 1]; i++; continue; } Log.warn("Unrecognized command line argument: " + arg); } launchConfig.verify(); try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (ClassNotFoundException | InstantiationException | UnsupportedLookAndFeelException | IllegalAccessException e) { Log.exception(e); } SplashController splashController = new SplashController(); if (launchConfig.showSplash) { try { EventQueue.invokeAndWait(() -> new Splash(splashController).setVisible(true)); } catch (InterruptedException | InvocationTargetException e) { Log.exception(e); } } EventQueue.invokeLater(() -> { try { new EditorFrame(splashController, launchConfig).setVisible(true); } catch (Exception e) { splashController.fatalExceptionOccurred = true; throw e; } }); } /** * Performs editor exit, if editor is still running, this will cause to display "Do you really want to exit?" dialog in editor window. * If editor LibGDX thread died, for example after uncaught GdxRuntimeException this will simply kill app. */ private void performEditorExit () { Set<Thread> threadSet = Thread.getAllStackTraces().keySet(); for (Thread thread : threadSet) { if (thread.getName().contains("LWJGL Timer")) { editor.requestExit(); return; } } Log.fatal("Editor LibGDX thread is not running, performing force exit."); Log.dispose(); System.exit(-4); } @Override public void dispose () { super.dispose(); editorCanvas.stop(); } private static BufferedImage loadImage (String path) { try { return ImageIO.read(getResource(path)); } catch (IOException e) { Log.exception(e); } throw new IllegalStateException("Failed to load image: " + path); } private static URL getResource (String path) { return EditorFrame.class.getResource(path); } private static class SplashController { boolean shouldClose = false; boolean fatalExceptionOccurred; } private static class Splash extends JWindow { public Splash (SplashController controller) { getContentPane().add(new JLabel(new ImageIcon(loadImage("/com/kotcrab/vis/editor/splash.png"))), BorderLayout.CENTER); pack(); setLocationRelativeTo(null); new Thread(() -> { while (true) { if (controller.shouldClose) { dispose(); break; } if (controller.fatalExceptionOccurred) { Log.fatal("Initialization error"); JOptionPane.showMessageDialog(null, "An error occurred during editor initialization, please check log: " + Log.getLogFile().parent().path()); System.exit(-5); } try { Thread.sleep(10); } catch (InterruptedException e) { Log.exception(e); } } }, "Splash").start(); } } }
package com.salt.entity; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.PrimaryKeyJoinColumn; import javax.persistence.Table; import java.sql.Date; @Entity @Table(name = "cf_sell_detail") public class SellDetail { @Id @PrimaryKeyJoinColumn(name = "sellDetail_fid") private Integer fid; private java.sql.Date periodusestart; private java.sql.Date perioduseend; private Integer buildingstructure; private Integer buildingtype; private Integer travelconve; private String facilities; private Integer driloadweight; private Integer liftsloadweight; private Integer landuse; private Integer natureland; private Integer canteen; private Integer canteenarea; private Integer dormroom; private Integer dormitoryarea; private Integer transcapacity; private Integer kvacapacity; private Integer transformer; private String trafficcondition; private Integer groundbearing; private Integer numberbuildings; private String content; private String thumbarr; public Integer getFid() { return fid; } public void setFid(Integer fid) { this.fid = fid; } public Date getPeriodusestart() { return periodusestart; } public void setPeriodusestart(Date periodusestart) { this.periodusestart = periodusestart; } public Date getPerioduseend() { return perioduseend; } public void setPerioduseend(Date perioduseend) { this.perioduseend = perioduseend; } public Integer getBuildingstructure() { return buildingstructure; } public void setBuildingstructure(Integer buildingstructure) { this.buildingstructure = buildingstructure; } public Integer getBuildingtype() { return buildingtype; } public void setBuildingtype(Integer buildingtype) { this.buildingtype = buildingtype; } public Integer getTravelconve() { return travelconve; } public void setTravelconve(Integer travelconve) { this.travelconve = travelconve; } public String getFacilities() { return facilities; } public void setFacilities(String facilities) { this.facilities = facilities; } public Integer getDriloadweight() { return driloadweight; } public void setDriloadweight(Integer driloadweight) { this.driloadweight = driloadweight; } public Integer getLiftsloadweight() { return liftsloadweight; } public void setLiftsloadweight(Integer liftsloadweight) { this.liftsloadweight = liftsloadweight; } public Integer getLanduse() { return landuse; } public void setLanduse(Integer landuse) { this.landuse = landuse; } public Integer getNatureland() { return natureland; } public void setNatureland(Integer natureland) { this.natureland = natureland; } public Integer getCanteen() { return canteen; } public void setCanteen(Integer canteen) { this.canteen = canteen; } public Integer getCanteenarea() { return canteenarea; } public void setCanteenarea(Integer canteenarea) { this.canteenarea = canteenarea; } public Integer getDormroom() { return dormroom; } public void setDormroom(Integer dormroom) { this.dormroom = dormroom; } public Integer getDormitoryarea() { return dormitoryarea; } public void setDormitoryarea(Integer dormitoryarea) { this.dormitoryarea = dormitoryarea; } public Integer getTranscapacity() { return transcapacity; } public void setTranscapacity(Integer transcapacity) { this.transcapacity = transcapacity; } public Integer getKvacapacity() { return kvacapacity; } public void setKvacapacity(Integer kvacapacity) { this.kvacapacity = kvacapacity; } public Integer getTransformer() { return transformer; } public void setTransformer(Integer transformer) { this.transformer = transformer; } public String getTrafficcondition() { return trafficcondition; } public void setTrafficcondition(String trafficcondition) { this.trafficcondition = trafficcondition; } public Integer getGroundbearing() { return groundbearing; } public void setGroundbearing(Integer groundbearing) { this.groundbearing = groundbearing; } public Integer getNumberbuildings() { return numberbuildings; } public void setNumberbuildings(Integer numberbuildings) { this.numberbuildings = numberbuildings; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public String getThumbarr() { return thumbarr; } public void setThumbarr(String thumbarr) { this.thumbarr = thumbarr; } @Override public String toString() { return "SellDetail{" + "fid=" + fid + ", periodusestart=" + periodusestart + ", perioduseend=" + perioduseend + ", buildingstructure=" + buildingstructure + ", buildingtype=" + buildingtype + ", travelconve=" + travelconve + ", facilities='" + facilities + '\'' + ", driloadweight=" + driloadweight + ", liftsloadweight=" + liftsloadweight + ", landuse=" + landuse + ", natureland=" + natureland + ", canteen=" + canteen + ", canteenarea=" + canteenarea + ", dormroom=" + dormroom + ", dormitoryarea=" + dormitoryarea + ", transcapacity=" + transcapacity + ", kvacapacity=" + kvacapacity + ", transformer=" + transformer + ", trafficcondition='" + trafficcondition + '\'' + ", groundbearing=" + groundbearing + ", numberbuildings=" + numberbuildings + ", content='" + content + '\'' + ", thumbarr='" + thumbarr + '\'' + '}'; } }
package com.gmail.simplemodified.SER3604.music; import com.gmail.simplemodified.SER3604.utils.DiscordUtil; import com.gmail.simplemodified.SER3604.utils.TAUUtil; import com.sedmelluq.discord.lavaplayer.player.AudioLoadResultHandler; import com.sedmelluq.discord.lavaplayer.player.AudioPlayer; import com.sedmelluq.discord.lavaplayer.player.AudioPlayerManager; import com.sedmelluq.discord.lavaplayer.player.DefaultAudioPlayerManager; import com.sedmelluq.discord.lavaplayer.tools.FriendlyException; import com.sedmelluq.discord.lavaplayer.track.AudioPlaylist; import com.sedmelluq.discord.lavaplayer.track.AudioTrack; import com.sedmelluq.discord.lavaplayer.track.AudioTrackInfo; import net.dv8tion.jda.api.EmbedBuilder; import net.dv8tion.jda.api.entities.*; import net.dv8tion.jda.api.managers.AudioManager; import java.awt.*; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.concurrent.TimeUnit; public class MusicDirector { public AudioPlayerManager playerManager = new DefaultAudioPlayerManager(); private HashMap<String, GuildMusicManager> directory = new HashMap<>(); public void loadAndPlay(TextChannel tc, String mention, String trackUrl, VoiceChannel vc, boolean isSearch) { GuildMusicManager musicManager = lazyRetrieveGMM(tc.getGuild().getId(), tc); setupSender(tc.getGuild().getId()); if (musicManager.getTC() == null) { musicManager.setTC(tc); } playerManager.loadItemOrdered(musicManager, trackUrl, new AudioLoadResultHandler() { @Override public void trackLoaded(AudioTrack track) { AudioTrackInfo info = track.getInfo(); mdEmbedSender(tc, mention, "Added an Audio Track to the Queue", "Added " + info.title + " by " + info.author + " lasting " + TAUUtil.formatTime(info.length) + " to the queue.", "Added Audio Track"); play(tc, mention, musicManager, track, vc); } @Override public void playlistLoaded(AudioPlaylist playlist) { AudioTrack track1 = playlist.getSelectedTrack(); if (track1 == null) { track1 = playlist.getTracks().get(0); } AudioTrackInfo info = track1.getInfo(); if (isSearch) { mdEmbedSender(tc, mention, "Added an Audio Track to the Queue", "Added " + info.title + " by " + info.author + " lasting " + TAUUtil.formatTime(info.length) + " to the queue.", "Added Audio Track"); play(tc, mention, musicManager, track1, vc); } else { mdEmbedSender(tc, mention, "Added Playlist to the Queue", "Added a playlist called " + playlist.getName() + " to the queue. The first track is " + info.title + " (by " + info.author + " and lasting " + TAUUtil.formatTime(info.length) + ".)", "Added Playlist"); for (AudioTrack track : playlist.getTracks()) { play(tc, mention, musicManager, track, vc); } } } @Override public void noMatches() { mdEmbedSender(tc, mention, "Nothing Found at Request", "Nothing was found at " + trackUrl + ", please try again.", "No Matches"); } @Override public void loadFailed(FriendlyException exception) { mdEmbedSender(tc, mention, "Failed to Load the Audio From the Source", "Failed to load the audio from the source because of the following reason: " + exception.getMessage() + "", "Audio Loading Failed"); exception.printStackTrace(); } }); } private void play(TextChannel textc, String mention, GuildMusicManager musicManager, AudioTrack track, VoiceChannel chan) { AudioManager am = musicManager.getGuild().getAudioManager(); TrackScheduler ts = musicManager.getScheduler(); String id = musicManager.getId(); if (!am.isConnected() && !am.isAttemptingToConnect()) connectTo(textc, mention, am, chan); ts.queue(track); if (ts.getQueueSize() < 1) nowPlaying(id, chan.getName()); } public void skipTrack(TextChannel textc, String mention) { GuildMusicManager musicManager = retrieveGMM(textc.getGuild().getId()); AudioTrackInfo track = musicManager.getPlayer().getPlayingTrack().getInfo(); mdEmbedSender(textc, mention, "Skipped Current Song", "Skipped the current song (" + track.title + ", by " + track.author + " with a length of " + TAUUtil.formatTime(track.length) + ".", "Skip Succeeded"); musicManager.getScheduler().nextTrack(); } public void skipTracks(TextChannel textc, String mention, int amount) { TrackScheduler scheduler = retrieveGMM(textc.getGuild().getId()).getScheduler(); if (scheduler.getQueueSize() < amount) { mdEmbedSender(textc, mention, "Less (or as many) Songs Than Skips", "You requested more (or as many) skips than there are songs in the queue.", "Multi-skip Failed"); } else { mdEmbedSender(textc, mention, "Skipped " + amount + " Songs", "Skipped " + amount + " songs to get to track number " + amount + " in the queue (random stops are from rate-limiting.)", "Multi-skip Succeeded"); scheduler.skipToQueuePos(amount - 1); scheduler.nextTrack(); } } public MessageEmbed getSongsAsEmbed(String guildid) { return retrieveGMM(guildid).getScheduler().getQueueAsEmbed(); } public void connectTo(TextChannel textc, String mention, AudioManager am, VoiceChannel chan) { lazyRetrieveGMM(textc.getGuild().getId(), textc); setupSender(textc.getGuild().getId()); Member mem = am.getGuild().getSelfMember(); DiscordUtil.Permablity jAbility = DiscordUtil.canJoinVC(mem, chan); DiscordUtil.Permablity tAbility = DiscordUtil.canTalkVC(mem, chan); if (!am.isConnected() || !am.isAttemptingToConnect()) { if (jAbility.equals(DiscordUtil.Permablity.CAN)) { if (!tAbility.equals(DiscordUtil.Permablity.CAN)) { mdEmbedSender(textc, mention, "WARNING: Cannot Speak in Voice Channel", "I can't speak in the voice channel you asked me to join, because " + tAbility.msg + ".", "Cannot Speak in VC"); } am.openAudioConnection(chan); } else { mdEmbedSender(textc, mention, "Cannot Connect to Voice Channel", "I couldn't connect to your voice channel, because " + jAbility.msg + "." + " Closing the music module in this guild.", "Connection Failed"); closeAndClear(chan.getGuild().getId()); } } else { mdEmbedSender(textc, mention, "Already Connected to a Voice Channel", "I'm already connected in this guild; disconnect me and reinitialize me somewhere else.", "Connection Failed"); } } public void closeAndClear(Guild g) { AudioPlayer player = retrieveGMM(g.getId()).getPlayer(); AudioManager am = g.getAudioManager(); if (!(player.getPlayingTrack() == null)) { quitPlaying(g.getId(), player.getPlayingTrack().getInfo()); } if (am.isConnected() || am.isAttemptingToConnect()) { g.getAudioManager().closeAudioConnection(); } am.setSendingHandler(null); removeGMM(g.getId()); } private void closeAndClear(String id) { Guild g = retrieveGMM(id).getGuild(); AudioPlayer player = retrieveGMM(id).getPlayer(); AudioManager am = g.getAudioManager(); if (!(player.getPlayingTrack() == null)) { quitPlaying(g.getId(), player.getPlayingTrack().getInfo()); } if (am.isConnected() || am.isAttemptingToConnect()) { g.getAudioManager().closeAudioConnection(); } am.setSendingHandler(null); retrieveGMM(id); } public void simpleClear(String id) { GuildMusicManager gmm = retrieveGMM(id); if (!(gmm == null)) { TrackScheduler scheduler = gmm.getScheduler(); AudioPlayer player = gmm.getPlayer(); scheduler.clearQueue(); player.stopTrack(); player.setVolume(100); } } public void adjustVolume(String id, int amount) { AudioPlayer player = retrieveGMM(id).getPlayer(); player.setVolume(amount); } public int getVolume(String id) { AudioPlayer player = retrieveGMM(id).getPlayer(); return player.getVolume(); } public boolean isPaused(String id) { AudioPlayer player = retrieveGMM(id).getPlayer(); return player.isPaused(); } public void pause(String id) { AudioPlayer player = retrieveGMM(id).getPlayer(); player.setPaused(true); } public void unpause(String id) { AudioPlayer player = retrieveGMM(id).getPlayer(); player.setPaused(false); } public AudioTrack getCurrentSong(String id) { GuildMusicManager gmm = retrieveGMM(id); if (!(gmm == null)) { AudioPlayer player = gmm.getPlayer(); return player.getPlayingTrack(); } else { return null; } } public int getQueueAmount(String id) { GuildMusicManager gmm = retrieveGMM(id); if (!(gmm == null)) { TrackScheduler scheduler = gmm.getScheduler(); return scheduler.getQueueSize(); } else { return 0; } } public void goTo(String id, long time) { AudioPlayer player = retrieveGMM(id).getPlayer(); player.getPlayingTrack().setPosition(time); } public void shuffle(String id) { TrackScheduler scheduler = retrieveGMM(id).getScheduler(); scheduler.shuffleTracks(); } public boolean botActive(String givenId) { boolean hasActiveGMM = false; for (String gId : getActiveIds()) { if (gId.matches(givenId)) { hasActiveGMM = true; break; } } return hasActiveGMM; } public boolean canMusicCMD(Guild guild, Member member) { VoiceChannel vc = guild.getAudioManager().getConnectedChannel(); return vc == null || vc.getMembers().contains(member); } public void checkIfLeave() { for (Guild g : getActiveGuilds()) { AudioManager am = g.getAudioManager(); GuildMusicManager gmm = retrieveGMM(g.getId()); TextChannel tc = gmm.getTC(); VoiceChannel vc = (am.isConnected()) ? am.getConnectedChannel() : am.getQueuedAudioConnection(); if (!(vc == null) && vc.getMembers().size() < 2) { closeAndClear(g); if (!(tc == null)) { mdEmbedSender(tc, "Closing Music Module in this Guild", "Closing the music module in this guild, because it is has been unused for a while.", "Closing Music Module"); } } else if (vc == null) { closeAndClear(g); if (!(tc == null)) { mdEmbedSender(tc, "Closing Music Module in this Guild", "Closing the music module in this guild, because it is has been unused for a while.", "Closing Music Module"); } } } } public void shutdownMSG() { for (String gId : getActiveIds()) { GuildMusicManager gmm = retrieveGMM(gId); TextChannel tc = gmm.getTC(); if (!(tc == null)) { tc.sendMessage("@here, I am being shutdown for maintenance; your music will be shut down shortly.") .queue(message -> message.delete().queueAfter(10, TimeUnit.SECONDS)); } closeAndClear(gId); } } public void rebootMSG() { for (String gId : getActiveIds()) { GuildMusicManager gmm = retrieveGMM(gId); TextChannel tc = gmm.getTC(); if (!(tc == null)) { tc.sendMessage("@here, I am being restarted for maintenance; your music will have to be reinitialized shortly.") .queue(message -> message.delete().queueAfter(10, TimeUnit.SECONDS)); } closeAndClear(gId); } } void nowPlaying(String id) { GuildMusicManager gmm = retrieveGMM(id); TextChannel textChannel = gmm.getTC(); AudioTrack current = gmm.getPlayer().getPlayingTrack(); String vcName = gmm.getGuild().getAudioManager().getConnectedChannel().getName(); if (!(textChannel == null)) { if (!(current == null)) { AudioTrackInfo info = current.getInfo(); mdEmbedSender(textChannel, "Now Playing a Requested Track", "Now Playing: " + info.title + ", by " + info.author + " (will last " + TAUUtil.formatTime(info.length) + ") in " + vcName + ".", "Now Playing"); } } } private void nowPlaying(String id, String vcName) { GuildMusicManager gmm = retrieveGMM(id); TextChannel textChannel = gmm.getTC(); AudioTrack current = gmm.getPlayer().getPlayingTrack(); if (!(textChannel == null)) { if (!(current == null)) { AudioTrackInfo info = current.getInfo(); mdEmbedSender(textChannel, "Now Playing a Requested Track", "Now Playing: " + info.title + ", by " + info.author + " (will last " + TAUUtil.formatTime(info.length) + ") in " + vcName + ".", "Now Playing"); } } } private void quitPlaying(String id, AudioTrackInfo info) { GuildMusicManager gmm = retrieveGMM(id); TextChannel textChannel = gmm.getTC(); String vcName = gmm.getGuild().getAudioManager().getConnectedChannel().getName(); if (!(textChannel == null)) { mdEmbedSender(textChannel, "Stopped Playing a Track", "Stopped Playing: " + info.title + ", by " + info.author + " (lasted " + TAUUtil.formatTime(info.length) + ") in " + vcName + ".", "Stopped Playing"); } } void finishedPlaying(String id, AudioTrackInfo info) { GuildMusicManager gmm = retrieveGMM(id); TextChannel textChannel = gmm.getTC(); String vcName = gmm.getGuild().getAudioManager().getConnectedChannel().getName(); if (!(textChannel == null)) { mdEmbedSender(textChannel, "Finished Playing a Track", "Finished Playing: " + info.title + ", by " + info.author + " (lasted " + TAUUtil.formatTime(info.length) + ") in " + vcName + ".", "Finished Playing"); } } void skipContPlaying(String id, AudioTrackInfo info) { GuildMusicManager gmm = retrieveGMM(id); TextChannel textChannel = gmm.getTC(); String vcName = gmm.getGuild().getAudioManager().getConnectedChannel().getName(); if (!(textChannel == null)) { mdEmbedSender(textChannel, "Stopping Playing a Requested Track", "Stopped Playing: " + info.title + ", by " + info.author + " (would've lasted " + TAUUtil.formatTime(info.length) + ") in " + vcName + ". It failed to play, so I'm going to the next song.", "Stopped Playing"); } } void skipContPlaying(String id, AudioTrackInfo info, long delay) { GuildMusicManager gmm = retrieveGMM(id); TextChannel textChannel = gmm.getTC(); String vcName = gmm.getGuild().getAudioManager().getConnectedChannel().getName(); if (!(textChannel == null)) { mdEmbedSender(textChannel, "Stopping Playing a Requested Track", "Stopped Playing: " + info.title + ", by " + info.author + " (would've lasted " + TAUUtil.formatTime(info.length) + ") in " + vcName + ". It failed to play in " + delay + " milliseconds, so I'm going to the next song.", "Stopped Playing"); } } void failToPlay(String id, String bad, String message) { GuildMusicManager gmm = retrieveGMM(id); TextChannel textChannel = gmm.getTC(); if (!(textChannel == null)) { mdEmbedSender(textChannel, "Failed to Play a Requested Track", "Caught an exception while trying to play track (SEVERITY: " + bad + ", MESSAGE: " + message + "); shutting down the music module in this guild", "Failed Playing"); } closeAndClear(gmm.getGuild()); } private void place(GuildMusicManager g) { directory.put(g.getId(), g); } private boolean exists(String i) { return directory.containsKey(i); } private GuildMusicManager retrieveGMM(String i) { return directory.get(i); } private GuildMusicManager lazyRetrieveGMM(String i, TextChannel tc) { if (!exists(i)) place(new GuildMusicManager(i, tc.getGuild(), tc, playerManager)); return directory.get(i); } private void removeGMM(String id) { directory.remove(id); } private void setupSender(String id) { GuildMusicManager gmm = directory.get(id); gmm.getGuild().getAudioManager().setSendingHandler(gmm.getSendHandler()); } private List<Guild> getActiveGuilds() { List<Guild> guilds = new ArrayList<>(); directory.values().forEach(gmm -> guilds.add(gmm.getGuild())); return guilds; } public String getAGuildsAsString() { List<String> actives = new ArrayList<>(); getActiveGuilds().parallelStream().forEach(guild -> actives.add(guild.getName())); String aGuildNames = String.join(", ", actives); aGuildNames = (aGuildNames.length() > 1500) ? aGuildNames.substring(0, 1500).concat(" and more") : aGuildNames; return aGuildNames; } private List<String> getActiveIds() { return new ArrayList<>(directory.keySet()); } private void mdEmbedSender(TextChannel tc, String title, String desc, String footerText) { EmbedBuilder builder = new EmbedBuilder() .setAuthor("SER-3604" , "https://github.com/Daffehh/SER-3604" , "https://i.imgur.com/6tsslZe.png") .setTitle(title) .setDescription(desc) .setFooter(footerText + " | SER-3604" , "https://i.imgur.com/6tsslZe.png") .setColor(Color.PINK); tc.sendMessage(builder.build()).queue(message -> message.delete().queueAfter(2, TimeUnit.HOURS)); } private void mdEmbedSender(TextChannel tc, String mention, String title, String desc, String footerText) { EmbedBuilder builder = new EmbedBuilder() .setAuthor("SER-3604" , "https://github.com/Daffehh/SER-3604" , "https://i.imgur.com/6tsslZe.png") .setTitle(title) .setDescription(desc) .setFooter(footerText + " | SER-3604" , "https://i.imgur.com/6tsslZe.png") .setColor(Color.PINK); tc.sendMessage(mention).queue(message -> message.delete().queueAfter(2, TimeUnit.HOURS)); tc.sendMessage(builder.build()).queue(message -> message.delete().queueAfter(2, TimeUnit.HOURS)); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.backup; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase; import org.apache.hadoop.hbase.backup.BackupInfo.BackupState; import org.apache.hadoop.hbase.backup.impl.BackupAdminImpl; import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.FullTableBackupClient; import org.apache.hadoop.hbase.backup.impl.IncrementalBackupManager; import org.apache.hadoop.hbase.backup.impl.IncrementalTableBackupClient; import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager; import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.master.cleaner.LogCleaner; import org.apache.hadoop.hbase.master.cleaner.TimeToLiveLogCleaner; import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.access.SecureTestUtil; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.wal.WALFactory; import org.junit.AfterClass; import org.junit.Before; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is only a base for other integration-level backup tests. Do not add tests here. * TestBackupSmallTests is where tests that don't require bring machines up/down should go All other * tests should have their own classes and extend this one */ public class TestBackupBase { private static final Logger LOG = LoggerFactory.getLogger(TestBackupBase.class); protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected static HBaseTestingUtility TEST_UTIL2; protected static Configuration conf1 = TEST_UTIL.getConfiguration(); protected static Configuration conf2; protected static TableName table1 = TableName.valueOf("table1"); protected static HTableDescriptor table1Desc; protected static TableName table2 = TableName.valueOf("table2"); protected static TableName table3 = TableName.valueOf("table3"); protected static TableName table4 = TableName.valueOf("table4"); protected static TableName table1_restore = TableName.valueOf("default:table1"); protected static TableName table2_restore = TableName.valueOf("ns2:table2"); protected static TableName table3_restore = TableName.valueOf("ns3:table3_restore"); protected static TableName table4_restore = TableName.valueOf("ns4:table4_restore"); protected static final int NB_ROWS_IN_BATCH = 99; protected static final byte[] qualName = Bytes.toBytes("q1"); protected static final byte[] famName = Bytes.toBytes("f"); protected static String BACKUP_ROOT_DIR = Path.SEPARATOR +"backupUT"; protected static String BACKUP_REMOTE_ROOT_DIR = Path.SEPARATOR + "backupUT"; protected static String provider = "defaultProvider"; protected static boolean secure = false; protected static boolean autoRestoreOnFailure = true; protected static boolean setupIsDone = false; protected static boolean useSecondCluster = false; static class IncrementalTableBackupClientForTest extends IncrementalTableBackupClient { public IncrementalTableBackupClientForTest() { } public IncrementalTableBackupClientForTest(Connection conn, String backupId, BackupRequest request) throws IOException { super(conn, backupId, request); } @Override public void execute() throws IOException { // case INCREMENTAL_COPY: try { // case PREPARE_INCREMENTAL: failStageIf(Stage.stage_0); beginBackup(backupManager, backupInfo); failStageIf(Stage.stage_1); backupInfo.setPhase(BackupPhase.PREPARE_INCREMENTAL); LOG.debug("For incremental backup, current table set is " + backupManager.getIncrementalBackupTableSet()); newTimestamps = ((IncrementalBackupManager) backupManager).getIncrBackupLogFileMap(); // copy out the table and region info files for each table BackupUtils.copyTableRegionInfo(conn, backupInfo, conf); // convert WAL to HFiles and copy them to .tmp under BACKUP_ROOT convertWALsToHFiles(); incrementalCopyHFiles(new String[] {getBulkOutputDir().toString()}, backupInfo.getBackupRootDir()); failStageIf(Stage.stage_2); // Save list of WAL files copied backupManager.recordWALFiles(backupInfo.getIncrBackupFileList()); // case INCR_BACKUP_COMPLETE: // set overall backup status: complete. Here we make sure to complete the backup. // After this checkpoint, even if entering cancel process, will let the backup finished // Set the previousTimestampMap which is before this current log roll to the manifest. HashMap<TableName, HashMap<String, Long>> previousTimestampMap = backupManager.readLogTimestampMap(); backupInfo.setIncrTimestampMap(previousTimestampMap); // The table list in backupInfo is good for both full backup and incremental backup. // For incremental backup, it contains the incremental backup table set. backupManager.writeRegionServerLogTimestamp(backupInfo.getTables(), newTimestamps); failStageIf(Stage.stage_3); HashMap<TableName, HashMap<String, Long>> newTableSetTimestampMap = backupManager.readLogTimestampMap(); Long newStartCode = BackupUtils.getMinValue(BackupUtils.getRSLogTimestampMins(newTableSetTimestampMap)); backupManager.writeBackupStartCode(newStartCode); handleBulkLoad(backupInfo.getTableNames()); failStageIf(Stage.stage_4); // backup complete completeBackup(conn, backupInfo, backupManager, BackupType.INCREMENTAL, conf); } catch (Exception e) { failBackup(conn, backupInfo, backupManager, e, "Unexpected Exception : ", BackupType.INCREMENTAL, conf); throw new IOException(e); } } } static class FullTableBackupClientForTest extends FullTableBackupClient { public FullTableBackupClientForTest() { } public FullTableBackupClientForTest(Connection conn, String backupId, BackupRequest request) throws IOException { super(conn, backupId, request); } @Override public void execute() throws IOException { // Get the stage ID to fail on try (Admin admin = conn.getAdmin()) { // Begin BACKUP beginBackup(backupManager, backupInfo); failStageIf(Stage.stage_0); String savedStartCode; boolean firstBackup; // do snapshot for full table backup savedStartCode = backupManager.readBackupStartCode(); firstBackup = savedStartCode == null || Long.parseLong(savedStartCode) == 0L; if (firstBackup) { // This is our first backup. Let's put some marker to system table so that we can hold the // logs while we do the backup. backupManager.writeBackupStartCode(0L); } failStageIf(Stage.stage_1); // We roll log here before we do the snapshot. It is possible there is duplicate data // in the log that is already in the snapshot. But if we do it after the snapshot, we // could have data loss. // A better approach is to do the roll log on each RS in the same global procedure as // the snapshot. LOG.info("Execute roll log procedure for full backup ..."); Map<String, String> props = new HashMap<>(); props.put("backupRoot", backupInfo.getBackupRootDir()); admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE, LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props); failStageIf(Stage.stage_2); newTimestamps = backupManager.readRegionServerLastLogRollResult(); if (firstBackup) { // Updates registered log files // We record ALL old WAL files as registered, because // this is a first full backup in the system and these // files are not needed for next incremental backup List<String> logFiles = BackupUtils.getWALFilesOlderThan(conf, newTimestamps); backupManager.recordWALFiles(logFiles); } // SNAPSHOT_TABLES: backupInfo.setPhase(BackupPhase.SNAPSHOT); for (TableName tableName : tableList) { String snapshotName = "snapshot_" + Long.toString(EnvironmentEdgeManager.currentTime()) + "_" + tableName.getNamespaceAsString() + "_" + tableName.getQualifierAsString(); snapshotTable(admin, tableName, snapshotName); backupInfo.setSnapshotName(tableName, snapshotName); } failStageIf(Stage.stage_3); // SNAPSHOT_COPY: // do snapshot copy LOG.debug("snapshot copy for " + backupId); snapshotCopy(backupInfo); // Updates incremental backup table set backupManager.addIncrementalBackupTableSet(backupInfo.getTables()); // BACKUP_COMPLETE: // set overall backup status: complete. Here we make sure to complete the backup. // After this checkpoint, even if entering cancel process, will let the backup finished backupInfo.setState(BackupState.COMPLETE); // The table list in backupInfo is good for both full backup and incremental backup. // For incremental backup, it contains the incremental backup table set. backupManager.writeRegionServerLogTimestamp(backupInfo.getTables(), newTimestamps); HashMap<TableName, HashMap<String, Long>> newTableSetTimestampMap = backupManager.readLogTimestampMap(); Long newStartCode = BackupUtils.getMinValue(BackupUtils .getRSLogTimestampMins(newTableSetTimestampMap)); backupManager.writeBackupStartCode(newStartCode); failStageIf(Stage.stage_4); // backup complete completeBackup(conn, backupInfo, backupManager, BackupType.FULL, conf); } catch (Exception e) { if(autoRestoreOnFailure) { failBackup(conn, backupInfo, backupManager, e, "Unexpected BackupException : ", BackupType.FULL, conf); } throw new IOException(e); } } } /** * @throws Exception if starting the mini cluster or setting up the tables fails */ @Before public void setUp() throws Exception { if (setupIsDone) { return; } if (secure) { // set the always on security provider UserProvider.setUserProviderForTesting(TEST_UTIL.getConfiguration(), HadoopSecurityEnabledUserProviderForTesting.class); // setup configuration SecureTestUtil.enableSecurity(TEST_UTIL.getConfiguration()); } conf1.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true); BackupManager.decorateMasterConfiguration(conf1); BackupManager.decorateRegionServerConfiguration(conf1); conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1"); // Set TTL for old WALs to 1 sec to enforce fast cleaning of an archived // WAL files conf1.setLong(TimeToLiveLogCleaner.TTL_CONF_KEY, 1000); conf1.setLong(LogCleaner.OLD_WALS_CLEANER_THREAD_TIMEOUT_MSEC, 1000); // Set MultiWAL (with 2 default WAL files per RS) conf1.set(WALFactory.WAL_PROVIDER, provider); TEST_UTIL.startMiniCluster(); if (useSecondCluster) { conf2 = HBaseConfiguration.create(conf1); conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2"); TEST_UTIL2 = new HBaseTestingUtility(conf2); TEST_UTIL2.setZkCluster(TEST_UTIL.getZkCluster()); TEST_UTIL2.startMiniDFSCluster(3); String root2 = TEST_UTIL2.getConfiguration().get("fs.defaultFS"); Path p = new Path(new Path(root2), "/tmp/wal"); CommonFSUtils.setWALRootDir(TEST_UTIL2.getConfiguration(), p); TEST_UTIL2.startMiniCluster(); } conf1 = TEST_UTIL.getConfiguration(); TEST_UTIL.startMiniMapReduceCluster(); BACKUP_ROOT_DIR = new Path(new Path(TEST_UTIL.getConfiguration().get("fs.defaultFS")), BACKUP_ROOT_DIR).toString(); LOG.info("ROOTDIR " + BACKUP_ROOT_DIR); if (useSecondCluster) { BACKUP_REMOTE_ROOT_DIR = new Path(new Path(TEST_UTIL2.getConfiguration().get("fs.defaultFS")) + BACKUP_REMOTE_ROOT_DIR).toString(); LOG.info("REMOTE ROOTDIR " + BACKUP_REMOTE_ROOT_DIR); } createTables(); populateFromMasterConfig(TEST_UTIL.getHBaseCluster().getMaster().getConfiguration(), conf1); setupIsDone = true; } private static void populateFromMasterConfig(Configuration masterConf, Configuration conf) { Iterator<Entry<String, String>> it = masterConf.iterator(); while (it.hasNext()) { Entry<String, String> e = it.next(); conf.set(e.getKey(), e.getValue()); } } /** * @throws Exception if deleting the archive directory or shutting down the mini cluster fails */ @AfterClass public static void tearDown() throws Exception { try{ SnapshotTestingUtils.deleteAllSnapshots(TEST_UTIL.getAdmin()); } catch (Exception e) { } SnapshotTestingUtils.deleteArchiveDirectory(TEST_UTIL); if (useSecondCluster) { TEST_UTIL2.shutdownMiniCluster(); } TEST_UTIL.shutdownMiniCluster(); TEST_UTIL.shutdownMiniMapReduceCluster(); } Table insertIntoTable(Connection conn, TableName table, byte[] family, int id, int numRows) throws IOException { Table t = conn.getTable(table); Put p1; for (int i = 0; i < numRows; i++) { p1 = new Put(Bytes.toBytes("row-" + table + "-" + id + "-" + i)); p1.addColumn(family, qualName, Bytes.toBytes("val" + i)); t.put(p1); } return t; } protected BackupRequest createBackupRequest(BackupType type, List<TableName> tables, String path) { BackupRequest.Builder builder = new BackupRequest.Builder(); BackupRequest request = builder.withBackupType(type) .withTableList(tables) .withTargetRootDir(path).build(); return request; } protected String backupTables(BackupType type, List<TableName> tables, String path) throws IOException { Connection conn = null; BackupAdmin badmin = null; String backupId; try { conn = ConnectionFactory.createConnection(conf1); badmin = new BackupAdminImpl(conn); BackupRequest request = createBackupRequest(type, tables, path); backupId = badmin.backupTables(request); } finally { if (badmin != null) { badmin.close(); } if (conn != null) { conn.close(); } } return backupId; } protected String fullTableBackup(List<TableName> tables) throws IOException { return backupTables(BackupType.FULL, tables, BACKUP_ROOT_DIR); } protected String incrementalTableBackup(List<TableName> tables) throws IOException { return backupTables(BackupType.INCREMENTAL, tables, BACKUP_ROOT_DIR); } protected static void loadTable(Table table) throws Exception { Put p; // 100 + 1 row to t1_syncup for (int i = 0; i < NB_ROWS_IN_BATCH; i++) { p = new Put(Bytes.toBytes("row" + i)); p.setDurability(Durability.SKIP_WAL); p.addColumn(famName, qualName, Bytes.toBytes("val" + i)); table.put(p); } } protected static void createTables() throws Exception { long tid = System.currentTimeMillis(); table1 = TableName.valueOf("test-" + tid); Admin ha = TEST_UTIL.getAdmin(); // Create namespaces NamespaceDescriptor desc1 = NamespaceDescriptor.create("ns1").build(); NamespaceDescriptor desc2 = NamespaceDescriptor.create("ns2").build(); NamespaceDescriptor desc3 = NamespaceDescriptor.create("ns3").build(); NamespaceDescriptor desc4 = NamespaceDescriptor.create("ns4").build(); ha.createNamespace(desc1); ha.createNamespace(desc2); ha.createNamespace(desc3); ha.createNamespace(desc4); HTableDescriptor desc = new HTableDescriptor(table1); HColumnDescriptor fam = new HColumnDescriptor(famName); desc.addFamily(fam); ha.createTable(desc); table1Desc = desc; Connection conn = ConnectionFactory.createConnection(conf1); Table table = conn.getTable(table1); loadTable(table); table.close(); table2 = TableName.valueOf("ns2:test-" + tid + 1); desc = new HTableDescriptor(table2); desc.addFamily(fam); ha.createTable(desc); table = conn.getTable(table2); loadTable(table); table.close(); table3 = TableName.valueOf("ns3:test-" + tid + 2); table = TEST_UTIL.createTable(table3, famName); table.close(); table4 = TableName.valueOf("ns4:test-" + tid + 3); table = TEST_UTIL.createTable(table4, famName); table.close(); ha.close(); conn.close(); } protected boolean checkSucceeded(String backupId) throws IOException { BackupInfo status = getBackupInfo(backupId); if (status == null) { return false; } return status.getState() == BackupState.COMPLETE; } protected boolean checkFailed(String backupId) throws IOException { BackupInfo status = getBackupInfo(backupId); if (status == null) { return false; } return status.getState() == BackupState.FAILED; } private BackupInfo getBackupInfo(String backupId) throws IOException { try (BackupSystemTable table = new BackupSystemTable(TEST_UTIL.getConnection())) { BackupInfo status = table.readBackupInfo(backupId); return status; } } protected BackupAdmin getBackupAdmin() throws IOException { return new BackupAdminImpl(TEST_UTIL.getConnection()); } /** * Helper method */ protected List<TableName> toList(String... args) { List<TableName> ret = new ArrayList<>(); for (int i = 0; i < args.length; i++) { ret.add(TableName.valueOf(args[i])); } return ret; } protected void dumpBackupDir() throws IOException { // Dump Backup Dir FileSystem fs = FileSystem.get(conf1); RemoteIterator<LocatedFileStatus> it = fs.listFiles(new Path(BACKUP_ROOT_DIR), true); while (it.hasNext()) { LOG.debug(Objects.toString(it.next().getPath())); } } }
package teamroots.emberroot.util; import java.util.ArrayList; import java.util.List; import java.util.Random; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.Blocks; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d; import net.minecraft.world.World; import net.minecraftforge.oredict.OreDictionary; public class Util { public static Random random = new Random(); public static ArrayList<IBlockState> oreList = new ArrayList<IBlockState>(); public static ArrayList<Block> naturalBlocks = new ArrayList<Block>(); public static double randomDouble(double min, double max) { double range = max - min; double scale = random.nextDouble() * range; double shifted = scale + min; return shifted; } public static float fastSin(float x) { if (x < -3.14159265) { x += 6.28318531; } else { if (x > 3.14159265) { x -= 6.28318531; } } if (x < 0) { return (float) (1.27323954 * x + .405284735 * x * x); } else { return (float) (1.27323954 * x - 0.405284735 * x * x); } } public static float fastCos(float x) { if (x < -3.14159265) { x += 6.28318531; } else { if (x > 3.14159265) { x -= 6.28318531; } } x += 1.57079632; if (x > 3.14159265) { x -= 6.28318531; } if (x < 0) { return (float) (1.27323954 * x + 0.405284735 * x * x); } else { return (float) (1.27323954 * x - 0.405284735 * x * x); } } public static boolean hasOreDictPrefix(ItemStack stack, String dict) { int[] ids = OreDictionary.getOreIDs(stack); for (int i = 0; i < ids.length; i++) { if (OreDictionary.getOreName(ids[i]).length() >= dict.length()) { if (OreDictionary.getOreName(ids[i]).substring(0, dict.length()).compareTo(dict.substring(0, dict.length())) == 0) { return true; } } } return false; } public static float yawDegreesBetweenPointsSafe(double posX, double posY, double posZ, double posX2, double posY2, double posZ2, double previousYaw) { float f = (float) ((180.0f * Math.atan2(posX2 - posX, posZ2 - posZ)) / (float) Math.PI); if (Math.abs(f - previousYaw) > 90) { if (f < previousYaw) { f += 360.0; } else { f -= 360.0; } } return f; } public static float yawDegreesBetweenPoints(double posX, double posY, double posZ, double posX2, double posY2, double posZ2) { float f = (float) ((180.0f * Math.atan2(posX2 - posX, posZ2 - posZ)) / (float) Math.PI); return f; } public static Vec3d lookVector(float rotYaw, float rotPitch) { return new Vec3d( Math.sin(rotYaw) * Math.cos(rotPitch), Math.sin(rotPitch), Math.cos(rotYaw) * Math.cos(rotPitch)); } public static float interpolateYawDegrees(float angle1, float ratio1, float angle2, float ratio2) { if (Math.abs(angle1 - angle2) > 180) { if (angle2 > angle1) { angle2 -= 360; } else { angle1 -= 360; } } return angle1 * ratio1 + angle2 * ratio2; } public static float pitchDegreesBetweenPoints(double posX, double posY, double posZ, double posX2, double posY2, double posZ2) { return (float) Math.toDegrees(Math.atan2(posY2 - posY, Math.sqrt((posX2 - posX) * (posX2 - posX) + (posZ2 - posZ) * (posZ2 - posZ)))); } public static double interpolate(float s, float e, float t) { double t2 = (1.0 - fastCos(t * 3.14159265358979323f)) / 2.0; return (s * (1.0 - t2) + (e) * t2); } public static BlockPos getRayTrace(World world, EntityPlayer player, int reachDistance) { double x = player.posX; double y = player.posY + player.getEyeHeight(); double z = player.posZ; for (int i = 0; i < reachDistance * 4.0; i++) { x += player.getLookVec().x * 0.25; y += player.getLookVec().y * 0.25; z += player.getLookVec().z * 0.25; if (world.getBlockState(new BlockPos(x, y, z)).isFullCube()) { return new BlockPos(x, y, z); } } return new BlockPos(x, y, z); } public static BlockPos getRayTraceNonFull(World world, EntityPlayer player, int reachDistance) { double x = player.posX; double y = player.posY + player.getEyeHeight(); double z = player.posZ; for (int i = 0; i < reachDistance * 4.0; i++) { x += player.getLookVec().x * 0.25; y += player.getLookVec().y * 0.25; z += player.getLookVec().z * 0.25; if (!world.isAirBlock(new BlockPos(x, y, z))) { return new BlockPos(x, y, z); } } return new BlockPos(x, y, z); } // public static void addTickTracking(Entity entity){ // if (entity.getEntityData().hasKey(RootsNames.TAG_TRACK_TICKS)){ // entity.getEntityData().setInteger(RootsNames.TAG_TRACK_TICKS, entity.getEntityData().getInteger(RootsNames.TAG_TRACK_TICKS)+1); // } // else { // entity.getEntityData().setInteger(RootsNames.TAG_TRACK_TICKS, 1); // } // } // // public static void decrementTickTracking(Entity entity){ // if (entity.getEntityData().hasKey(RootsNames.TAG_TRACK_TICKS)){ // entity.getEntityData().setInteger(RootsNames.TAG_TRACK_TICKS, entity.getEntityData().getInteger(RootsNames.TAG_TRACK_TICKS)-1); // if (entity.getEntityData().getInteger(RootsNames.TAG_TRACK_TICKS) == 0){ // entity.removeTag(RootsNames.TAG_TRACK_TICKS); // } // } // } public static Entity getRayTraceEntity(World world, EntityPlayer player, int reachDistance) { double x = player.posX; double y = player.posY + player.getEyeHeight(); double z = player.posZ; for (int i = 0; i < reachDistance * 100.0; i++) { x += player.getLookVec().x * 0.01; y += player.getLookVec().y * 0.01; z += player.getLookVec().z * 0.01; List<Entity> entities = world.getEntitiesWithinAABB(Entity.class, new AxisAlignedBB(x - 0.1, y - 0.1, z - 0.1, x + 0.1, y + 0.1, z + 0.1)); if (entities.size() > 0) { if (entities.get(0).getUniqueID() != player.getUniqueID()) { return entities.get(0); } } } return null; } public static void initOres() { oreList.add(Blocks.IRON_ORE.getDefaultState()); oreList.add(Blocks.GOLD_ORE.getDefaultState()); oreList.add(Blocks.DIAMOND_ORE.getDefaultState()); oreList.add(Blocks.REDSTONE_ORE.getDefaultState()); oreList.add(Blocks.LAPIS_ORE.getDefaultState()); oreList.add(Blocks.COAL_ORE.getDefaultState()); } public static void initNaturalBlocks() { naturalBlocks.add(Blocks.TALLGRASS); naturalBlocks.add(Blocks.GRASS); naturalBlocks.add(Blocks.GRASS_PATH); naturalBlocks.add(Blocks.LEAVES); naturalBlocks.add(Blocks.LOG); naturalBlocks.add(Blocks.LOG2); naturalBlocks.add(Blocks.PLANKS); naturalBlocks.add(Blocks.CACTUS); naturalBlocks.add(Blocks.WATERLILY); naturalBlocks.add(Blocks.WATER); naturalBlocks.add(Blocks.FLOWING_WATER); naturalBlocks.add(Blocks.RED_FLOWER); naturalBlocks.add(Blocks.YELLOW_FLOWER); } public static boolean containsItem(List<ItemStack> list, Item item) { for (int i = 0; i < list.size(); i++) { if (list.get(i) != null) { if (list.get(i).getItem() == item) { return true; } } } return false; } public static boolean containsItem(List<ItemStack> list, Block item) { for (int i = 0; i < list.size(); i++) { if (list.get(i) != null) { if (Block.getBlockFromItem(list.get(i).getItem()) == item) { return true; } } } return false; } public static boolean containsItem(List<ItemStack> list, Item item, int meta) { for (int i = 0; i < list.size(); i++) { if (list.get(i) != null) { if (list.get(i).getItem() == item && list.get(i).getMetadata() == meta) { return true; } } } return false; } public static boolean containsItem(List<ItemStack> list, Block item, int meta) { for (int i = 0; i < list.size(); i++) { if (list.get(i) != null) { if (Block.getBlockFromItem(list.get(i).getItem()) == item && list.get(i).getMetadata() == meta) { return true; } } } return false; } public static IBlockState getRandomOre() { return oreList.get(random.nextInt(oreList.size())); } public static boolean oreDictMatches(ItemStack stack1, ItemStack stack2) { if (OreDictionary.itemMatches(stack1, stack2, true)) { return true; } else { int[] oreIds = OreDictionary.getOreIDs(stack1); for (int i = 0; i < oreIds.length; i++) { if (OreDictionary.containsMatch(true, OreDictionary.getOres(OreDictionary.getOreName(oreIds[i])), stack2)) { return true; } } } return false; } public static int intColor(int r, int g, int b) { return (r * 65536 + g * 256 + b); } public static boolean isNaturalBlock(Block block) { for (int i = 0; i < naturalBlocks.size(); i++) { if (naturalBlocks.get(i) == block) { return true; } } return false; } public static boolean itemListsMatchWithSize(List<ItemStack> i1, List<ItemStack> i2) { ArrayList<ItemStack> recipe = new ArrayList<ItemStack>(); ArrayList<ItemStack> available = new ArrayList<ItemStack>(); recipe.addAll(i1); available.addAll(i2); for (int i = 0; i < recipe.size(); i++) { if (recipe.get(i) == null) { recipe.remove(i); i--; } } for (int i = 0; i < available.size(); i++) { if (available.get(i) == null) { available.remove(i); i--; } } if (available.size() == recipe.size()) { for (int j = 0; j < available.size(); j++) { boolean endIteration = false; for (int i = 0; i < recipe.size() && !endIteration; i++) { if (oreDictMatches(available.get(j), recipe.get(i))) { recipe.remove(i); endIteration = true; } } } } return recipe.size() == 0; } public static float fract(float f) { return f - (int) f; } public static double fract(double d) { return d - (int) d; } public static boolean itemListsMatch(List<ItemStack> i1, List<ItemStack> i2) { ArrayList<ItemStack> recipe = new ArrayList<ItemStack>(); ArrayList<ItemStack> available = new ArrayList<ItemStack>(); recipe.addAll(i1); available.addAll(i2); for (int i = 0; i < recipe.size(); i++) { if (recipe.get(i) == null) { recipe.remove(i); i--; } } for (int i = 0; i < available.size(); i++) { if (available.get(i) == null) { available.remove(i); i--; } } if (available.size() >= recipe.size()) { for (int j = 0; j < available.size(); j++) { boolean endIteration = false; for (int i = 0; i < recipe.size() && !endIteration; i++) { if (oreDictMatches(available.get(j), recipe.get(i))) { recipe.remove(i); endIteration = true; } } } } return recipe.size() == 0; } public static float getNatureAmount(IBlockState state) { if (state.getBlock() == Blocks.DIRT) { return 0.04f; } if (state.getBlock() == Blocks.GRASS) { return 0.16f; } if (state.getBlock() == Blocks.TALLGRASS) { return 0.24f; } if (state.getBlock() == Blocks.RED_FLOWER) { return 0.64f; } if (state.getBlock() == Blocks.YELLOW_FLOWER) { return 0.64f; } if (state.getBlock() == Blocks.DOUBLE_PLANT) { return 0.8f; } if (state.getBlock() == Blocks.WATER) { return 0.16f; } if (state.getBlock() == Blocks.LEAVES || state.getBlock() == Blocks.LEAVES2) { return 0.32f; } if (state.getBlock() == Blocks.LOG || state.getBlock() == Blocks.LOG2) { return 0.24f; } if (state.getBlock() == Blocks.WATERLILY) { return 0.56f; } if (state.getBlock() == Blocks.CACTUS) { return 0.72f; } return 0; } public static EnumFacing getRayFace(World world, EntityPlayer player, int reachDistance) { double x = player.posX; double y = player.posY + player.getEyeHeight(); double z = player.posZ; for (int i = 0; i < reachDistance * 4.0; i++) { x += player.getLookVec().x * 0.25; y += player.getLookVec().y * 0.25; z += player.getLookVec().z * 0.25; if (world.getBlockState(new BlockPos(x, y, z)).isFullCube()) { BlockPos pos = new BlockPos(x, y, z); double centerX = pos.getX() + 0.5; double centerY = pos.getY() + 0.5; double centerZ = pos.getZ() + 0.5; double dx = Math.abs(x - centerX); double dy = Math.abs(y - centerY); double dz = Math.abs(z - centerZ); if (dx > dy && dx > dz) { if (x - centerX > 0) { return EnumFacing.EAST; } else { return EnumFacing.WEST; } } else if (dy > dz) { if (y - centerY > 0) { return EnumFacing.UP; } else { return EnumFacing.DOWN; } } else { if (z - centerZ > 0) { return EnumFacing.SOUTH; } else { return EnumFacing.NORTH; } } } } return EnumFacing.UP; } }
package com.lordrhys.mod.tileentity; import com.lordrhys.mod.block.BlockFurnaceOfLight; import com.lordrhys.mod.block.BlockGoldenFurnace; import com.lordrhys.mod.crafting.DualFurnaceRecipes; import cpw.mods.fml.common.registry.GameRegistry; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.inventory.ISidedInventory; import net.minecraft.item.Item; import net.minecraft.item.ItemBlock; import net.minecraft.item.ItemHoe; import net.minecraft.item.ItemStack; import net.minecraft.item.ItemSword; import net.minecraft.item.ItemTool; import net.minecraft.item.crafting.FurnaceRecipes; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; public class TileEntityFurnaceOfLight extends TileEntity implements ISidedInventory { private String localizedName; private ItemStack[] slots = new ItemStack[4]; private static final int[] slots_top = new int[]{0,1}; private static final int[] slots_bottom = new int[]{3,2}; // slots listed in priority private static final int[] slots_sides = new int[]{2}; public int dualFurnaceSpeed = 125; //Twice as fast as standard furnace(200) public int dualBurnTime; public int dualCurrentItemBurnTime; public int dualCookTime; public int getSizeInventory() { return this.slots.length; } public ItemStack getStackInSlot(int i) { return this.slots[i]; } public ItemStack decrStackSize(int i, int j) { if (this.slots[i] != null) { ItemStack itemstack; if (this.slots[i].stackSize <= j) { itemstack = this.slots[i]; this.slots[i] = null; return itemstack; } else { itemstack = this.slots[i].splitStack(j); if (this.slots[i].stackSize == 0) { this.slots[i] =null; } return itemstack; } } return null; } public ItemStack getStackInSlotOnClosing(int i) { if (this.slots[i] != null) { ItemStack itemstack = this.slots[i]; this.slots[i] = null; return itemstack; } return null; } public void setInventorySlotContents(int i, ItemStack itemstack) { this.slots[i] = itemstack; if (itemstack != null && itemstack.stackSize > this.getInventoryStackLimit()) { itemstack.stackSize = this.getInventoryStackLimit(); } } public void setGuiDisplayName(String displayName) { this.localizedName = displayName; } public String getInventoryName() { return this.hasCustomInventoryName() ? this.localizedName : "Furnace Of Light"; } public boolean hasCustomInventoryName() { return this.localizedName != null && this.localizedName.length() > 0; } public int getInventoryStackLimit() { return 64; } public boolean isUseableByPlayer(EntityPlayer player) { return this.worldObj.getTileEntity(this.xCoord, this.yCoord, this.zCoord) != this ? false : player.getDistanceSq((double)this.xCoord + 0.5D, (double)this.yCoord + 0.5D, (double)this.zCoord + 0.5D) <= 64.0D; } public void openInventory() { } public void closeInventory() { } public void readFromNBT(NBTTagCompound nbt) { super.readFromNBT(nbt); NBTTagList list = nbt.getTagList("Items", 10); this.slots = new ItemStack[this.getSizeInventory()]; for (int i = 0; i < list.tagCount(); i++) { NBTTagCompound compound = (NBTTagCompound) list.getCompoundTagAt(i); byte b = compound.getByte("Slot"); if (b >= 0 && b < this.slots.length) { this.slots[b] = ItemStack.loadItemStackFromNBT(compound); } } this.dualBurnTime = (int)nbt.getShort("BurnTime"); this.dualCookTime = (int)nbt.getShort("CookTime"); this.dualCurrentItemBurnTime = (int)nbt.getShort("CurrentBurnTime"); if (nbt.hasKey("CustomName")) { this.localizedName = nbt.getString("CustomName"); } } public void writeToNBT(NBTTagCompound nbt) { super.writeToNBT(nbt); nbt.setShort("BurnTime", (short)this.dualBurnTime); nbt.setShort("CookTime", (short)this.dualCookTime); nbt.setShort("CurrentBurnTime", (short)this.dualCurrentItemBurnTime); NBTTagList list = new NBTTagList(); for (int i = 0; i < this.slots.length; i++) { if (this.slots[i] != null) { NBTTagCompound compound = new NBTTagCompound(); compound.setByte("Slot", (byte)i); this.slots[i].writeToNBT(compound); list.appendTag(compound); } } nbt.setTag("Items", list); if(this.hasCustomInventoryName()) { nbt.setString("CustomName", this.localizedName); } } public boolean isItemValidForSlot(int i, ItemStack itemstack) { return i == 3 ? false : (i == 2 ? isItemFuel(itemstack) : true); } public static boolean isItemFuel(ItemStack itemstack) { return getItemBurnTime(itemstack) > 0; } private static int getItemBurnTime(ItemStack itemstack) { if (itemstack == null) { return 0; } else { Item item = itemstack.getItem(); if (item instanceof ItemBlock && Block.getBlockFromItem(item) != Blocks.air) { Block block = Block.getBlockFromItem(item); if (block == Blocks.wooden_slab) return 150; if (block.getMaterial() == Material.wood) return 300; if (block == Blocks.coal_block) return 16000; } if (item instanceof ItemTool && ((ItemTool) item).getToolMaterialName().equals("WOOD")) return 200; if (item instanceof ItemSword && ((ItemSword) item).getToolMaterialName().equals("WOOD")) return 200; if (item instanceof ItemHoe && ((ItemHoe) item).getToolMaterialName().equals("WOOD")) return 200; if (item == Items.stick) return 100; if (item == Items.coal) return 1600; if (item == Items.lava_bucket) return 20000; if (item == Item.getItemFromBlock(Blocks.sapling)) return 100; if (item == Items.blaze_rod) return 2400; // Not a real recipe if (item == Items.quartz) return 250; return GameRegistry.getFuelValue(itemstack); } } public void updateEntity() { boolean flag = this.dualBurnTime > 0; boolean flag1 = false; if (this.dualBurnTime > 0) { --this.dualBurnTime; } if (!this.worldObj.isRemote) { if (this.dualBurnTime == 0 && this.canSmelt()) { this.dualCurrentItemBurnTime = this.dualBurnTime = getItemBurnTime(this.slots[2]); if (this.dualBurnTime > 0) { flag1 = true; if (this.slots[1] != null) { --this.slots[2].stackSize; if(this.slots[2].stackSize == 0) { this.slots[2] = this.slots[2].getItem().getContainerItem(this.slots[2]); } } } } if (this.isBurning() && this.canSmelt()) { ++this.dualCookTime; if (this.dualCookTime == this.dualFurnaceSpeed) { this.dualCookTime = 0; this.smeltItem(); flag1 = true; } } else { this.dualCookTime = 0; } if (flag != this.dualBurnTime > 0) { flag1 = true; BlockFurnaceOfLight.updateFurnaceBlockState(this.dualBurnTime > 0, this.worldObj, this.xCoord, this.yCoord, this.zCoord); } } if (flag1) { this.markDirty(); } } public boolean isBurning() { return dualBurnTime > 0; } private void smeltItem() { if (this.canSmelt()) { ItemStack[] stack = {this.slots[0], this.slots[1]}; ItemStack itemstack = DualFurnaceRecipes.recipes().getSmeltingResult(stack); if (this.slots[3] == null) { this.slots[3] = itemstack.copy(); } else if (this.slots[3].isItemEqual(itemstack)) { this.slots[3].stackSize += itemstack.stackSize; } for (int i = 0; i < 2; i++) { this.slots[i].stackSize--; if (this.slots[i].stackSize <= 0) { this.slots[i] = null; } } } } private boolean canSmelt() { if(this.slots[0] == null || this.slots[1] == null) { return false; } else { ItemStack[] stack = {this.slots[0], this.slots[1]}; ItemStack itemstack = DualFurnaceRecipes.recipes().getSmeltingResult(stack); // if (itemstack == null) // { // stack[0] = this.slots[1]; // stack[1] = this.slots[0]; // itemstack = DualFurnaceRecipes.recipes().getSmeltingResult(stack); // } if (itemstack == null) return false; if (this.slots[3] == null) return true; if (!this.slots[3].isItemEqual(itemstack)) return false; int result = this.slots[3].stackSize + itemstack.stackSize; return (result <= getInventoryStackLimit() && result <= itemstack.getMaxStackSize()); } } @SideOnly(Side.CLIENT) public int getBurnTimeRemainingScaled(int i) { if (this.dualCurrentItemBurnTime == 0) { this.dualCurrentItemBurnTime = this.dualFurnaceSpeed; } return this.dualBurnTime * i / this.dualCurrentItemBurnTime; } @SideOnly(Side.CLIENT) public int getCookProgressScaled(int i) { return this.dualCookTime * i / this.dualFurnaceSpeed; } public int[] getAccessibleSlotsFromSide(int var1) { return var1 < 2 ? slots_bottom : (var1 == 2 ? slots_top : slots_sides); } public boolean canInsertItem(int i, ItemStack itemstack, int j) { return this.isItemValidForSlot(i, itemstack); } public boolean canExtractItem(int i, ItemStack itemstack, int j) { return j != 0 || j != 1 || i != 2 || itemstack.getItem() == Items.bucket; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.pinpoint.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Specifies the contents of an email message, composed of a subject, a text part, and an HTML part. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-2016-12-01/SimpleEmail" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SimpleEmail implements Serializable, Cloneable, StructuredPojo { /** * <p> * The body of the email message, in HTML format. We recommend using HTML format for email clients that render HTML * content. You can include links, formatted text, and more in an HTML message. * </p> */ private SimpleEmailPart htmlPart; /** * <p> * The subject line, or title, of the email. * </p> */ private SimpleEmailPart subject; /** * <p> * The body of the email message, in plain text format. We recommend using plain text format for email clients that * don't render HTML content and clients that are connected to high-latency networks, such as mobile devices. * </p> */ private SimpleEmailPart textPart; /** * <p> * The body of the email message, in HTML format. We recommend using HTML format for email clients that render HTML * content. You can include links, formatted text, and more in an HTML message. * </p> * * @param htmlPart * The body of the email message, in HTML format. We recommend using HTML format for email clients that * render HTML content. You can include links, formatted text, and more in an HTML message. */ public void setHtmlPart(SimpleEmailPart htmlPart) { this.htmlPart = htmlPart; } /** * <p> * The body of the email message, in HTML format. We recommend using HTML format for email clients that render HTML * content. You can include links, formatted text, and more in an HTML message. * </p> * * @return The body of the email message, in HTML format. We recommend using HTML format for email clients that * render HTML content. You can include links, formatted text, and more in an HTML message. */ public SimpleEmailPart getHtmlPart() { return this.htmlPart; } /** * <p> * The body of the email message, in HTML format. We recommend using HTML format for email clients that render HTML * content. You can include links, formatted text, and more in an HTML message. * </p> * * @param htmlPart * The body of the email message, in HTML format. We recommend using HTML format for email clients that * render HTML content. You can include links, formatted text, and more in an HTML message. * @return Returns a reference to this object so that method calls can be chained together. */ public SimpleEmail withHtmlPart(SimpleEmailPart htmlPart) { setHtmlPart(htmlPart); return this; } /** * <p> * The subject line, or title, of the email. * </p> * * @param subject * The subject line, or title, of the email. */ public void setSubject(SimpleEmailPart subject) { this.subject = subject; } /** * <p> * The subject line, or title, of the email. * </p> * * @return The subject line, or title, of the email. */ public SimpleEmailPart getSubject() { return this.subject; } /** * <p> * The subject line, or title, of the email. * </p> * * @param subject * The subject line, or title, of the email. * @return Returns a reference to this object so that method calls can be chained together. */ public SimpleEmail withSubject(SimpleEmailPart subject) { setSubject(subject); return this; } /** * <p> * The body of the email message, in plain text format. We recommend using plain text format for email clients that * don't render HTML content and clients that are connected to high-latency networks, such as mobile devices. * </p> * * @param textPart * The body of the email message, in plain text format. We recommend using plain text format for email * clients that don't render HTML content and clients that are connected to high-latency networks, such as * mobile devices. */ public void setTextPart(SimpleEmailPart textPart) { this.textPart = textPart; } /** * <p> * The body of the email message, in plain text format. We recommend using plain text format for email clients that * don't render HTML content and clients that are connected to high-latency networks, such as mobile devices. * </p> * * @return The body of the email message, in plain text format. We recommend using plain text format for email * clients that don't render HTML content and clients that are connected to high-latency networks, such as * mobile devices. */ public SimpleEmailPart getTextPart() { return this.textPart; } /** * <p> * The body of the email message, in plain text format. We recommend using plain text format for email clients that * don't render HTML content and clients that are connected to high-latency networks, such as mobile devices. * </p> * * @param textPart * The body of the email message, in plain text format. We recommend using plain text format for email * clients that don't render HTML content and clients that are connected to high-latency networks, such as * mobile devices. * @return Returns a reference to this object so that method calls can be chained together. */ public SimpleEmail withTextPart(SimpleEmailPart textPart) { setTextPart(textPart); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getHtmlPart() != null) sb.append("HtmlPart: ").append(getHtmlPart()).append(","); if (getSubject() != null) sb.append("Subject: ").append(getSubject()).append(","); if (getTextPart() != null) sb.append("TextPart: ").append(getTextPart()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SimpleEmail == false) return false; SimpleEmail other = (SimpleEmail) obj; if (other.getHtmlPart() == null ^ this.getHtmlPart() == null) return false; if (other.getHtmlPart() != null && other.getHtmlPart().equals(this.getHtmlPart()) == false) return false; if (other.getSubject() == null ^ this.getSubject() == null) return false; if (other.getSubject() != null && other.getSubject().equals(this.getSubject()) == false) return false; if (other.getTextPart() == null ^ this.getTextPart() == null) return false; if (other.getTextPart() != null && other.getTextPart().equals(this.getTextPart()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getHtmlPart() == null) ? 0 : getHtmlPart().hashCode()); hashCode = prime * hashCode + ((getSubject() == null) ? 0 : getSubject().hashCode()); hashCode = prime * hashCode + ((getTextPart() == null) ? 0 : getTextPart().hashCode()); return hashCode; } @Override public SimpleEmail clone() { try { return (SimpleEmail) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.pinpoint.model.transform.SimpleEmailMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package org.verdictdb; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import org.apache.commons.lang3.RandomStringUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.verdictdb.commons.DatabaseConnectionHelpers; import org.verdictdb.commons.VerdictOption; import org.verdictdb.connection.CachedDbmsConnection; import org.verdictdb.connection.DbmsConnection; import org.verdictdb.connection.JdbcConnection; import org.verdictdb.coordinator.ScramblingCoordinator; import org.verdictdb.coordinator.SelectQueryCoordinator; import org.verdictdb.coordinator.VerdictResultStreamFromExecutionResultReader; import org.verdictdb.core.resulthandler.ExecutionResultReader; import org.verdictdb.core.scrambling.ScrambleMeta; import org.verdictdb.core.scrambling.ScrambleMetaSet; import org.verdictdb.exception.VerdictDBException; import org.verdictdb.sqlsyntax.MysqlSyntax; /** * This test is to check NULL value is returned when no row is selected by sum() or avg(). */ public class VerdictDBAggNullValueTest { // lineitem has 10 blocks, orders has 3 blocks; // lineitem join orders has 12 blocks static final int blockSize = 100; static ScrambleMetaSet meta = new ScrambleMetaSet(); static VerdictOption options = new VerdictOption(); static Connection conn; private static Statement stmt; private static final String MYSQL_HOST; static { String env = System.getenv("BUILD_ENV"); if (env != null && env.equals("GitLab")) { MYSQL_HOST = "mysql"; } else { MYSQL_HOST = "localhost"; } } private static final String MYSQL_DATABASE = "mysql_test_" + RandomStringUtils.randomAlphanumeric(8).toLowerCase(); private static final String MYSQL_UESR = "root"; private static final String MYSQL_PASSWORD = ""; @BeforeClass public static void setupMySqlDatabase() throws SQLException, VerdictDBException { String mysqlConnectionString = String.format("jdbc:mysql://%s?autoReconnect=true&useSSL=false", MYSQL_HOST); conn = DatabaseConnectionHelpers.setupMySql( mysqlConnectionString, MYSQL_UESR, MYSQL_PASSWORD, MYSQL_DATABASE); conn.setCatalog(MYSQL_DATABASE); stmt = conn.createStatement(); stmt.execute(String.format("use `%s`", MYSQL_DATABASE)); DbmsConnection dbmsConn = JdbcConnection.create(conn); // Create Scramble table dbmsConn.execute( String.format("DROP TABLE IF EXISTS `%s`.`lineitem_scrambled`", MYSQL_DATABASE)); dbmsConn.execute(String.format("DROP TABLE IF EXISTS `%s`.`orders_scrambled`", MYSQL_DATABASE)); ScramblingCoordinator scrambler = new ScramblingCoordinator(dbmsConn, MYSQL_DATABASE, MYSQL_DATABASE, (long) 100); ScrambleMeta meta1 = scrambler.scramble( MYSQL_DATABASE, "lineitem", MYSQL_DATABASE, "lineitem_scrambled", "uniform"); ScrambleMeta meta2 = scrambler.scramble(MYSQL_DATABASE, "orders", MYSQL_DATABASE, "orders_scrambled", "uniform"); meta.addScrambleMeta(meta1); meta.addScrambleMeta(meta2); stmt.execute(String.format("drop schema if exists `%s`", options.getVerdictTempSchemaName())); stmt.execute( String.format("create schema if not exists `%s`", options.getVerdictTempSchemaName())); } @Test public void testAvg() throws VerdictDBException { // This query doesn't select any rows. String sql = String.format( "select avg(l_extendedprice) from " + "%s.lineitem, %s.customer, %s.orders " + "where c_mktsegment='AAAAAA' and c_custkey=o_custkey and o_orderkey=l_orderkey", MYSQL_DATABASE, MYSQL_DATABASE, MYSQL_DATABASE); JdbcConnection jdbcConn = new JdbcConnection(conn, new MysqlSyntax()); jdbcConn.setOutputDebugMessage(true); DbmsConnection dbmsconn = new CachedDbmsConnection(jdbcConn); dbmsconn.setDefaultSchema(MYSQL_DATABASE); SelectQueryCoordinator coordinator = new SelectQueryCoordinator(dbmsconn); coordinator.setScrambleMetaSet(meta); ExecutionResultReader reader = coordinator.process(sql); VerdictResultStream stream = new VerdictResultStreamFromExecutionResultReader(reader); try { while (stream.hasNext()) { VerdictSingleResult rs = stream.next(); rs.next(); assertNull(rs.getValue(0)); assertEquals(0, rs.getDouble(0), 0); assertEquals(0, rs.getInt(0)); } } catch (RuntimeException e) { throw e; } } @Test public void testSum() throws VerdictDBException { // This query doesn't select any rows. String sql = String.format( "select sum(l_extendedprice) from " + "%s.lineitem, %s.customer, %s.orders " + "where c_mktsegment='AAAAAA' and c_custkey=o_custkey and o_orderkey=l_orderkey", MYSQL_DATABASE, MYSQL_DATABASE, MYSQL_DATABASE); JdbcConnection jdbcConn = new JdbcConnection(conn, new MysqlSyntax()); jdbcConn.setOutputDebugMessage(true); DbmsConnection dbmsconn = new CachedDbmsConnection(jdbcConn); dbmsconn.setDefaultSchema(MYSQL_DATABASE); SelectQueryCoordinator coordinator = new SelectQueryCoordinator(dbmsconn); coordinator.setScrambleMetaSet(meta); ExecutionResultReader reader = coordinator.process(sql); VerdictResultStream stream = new VerdictResultStreamFromExecutionResultReader(reader); try { while (stream.hasNext()) { VerdictSingleResult rs = stream.next(); rs.next(); assertNull(rs.getValue(0)); assertEquals(0, rs.getDouble(0), 0); assertEquals(0, rs.getInt(0)); } } catch (RuntimeException e) { throw e; } } @Test public void testSumAvg() throws VerdictDBException { // This query doesn't select any rows. String sql = String.format( "select sum(l_extendedprice), avg(l_extendedprice) from " + "%s.lineitem, %s.customer, %s.orders " + "where c_mktsegment='AAAAAA' and c_custkey=o_custkey and o_orderkey=l_orderkey", MYSQL_DATABASE, MYSQL_DATABASE, MYSQL_DATABASE); JdbcConnection jdbcConn = new JdbcConnection(conn, new MysqlSyntax()); jdbcConn.setOutputDebugMessage(true); DbmsConnection dbmsconn = new CachedDbmsConnection(jdbcConn); dbmsconn.setDefaultSchema(MYSQL_DATABASE); SelectQueryCoordinator coordinator = new SelectQueryCoordinator(dbmsconn); coordinator.setScrambleMetaSet(meta); ExecutionResultReader reader = coordinator.process(sql); VerdictResultStream stream = new VerdictResultStreamFromExecutionResultReader(reader); try { while (stream.hasNext()) { VerdictSingleResult rs = stream.next(); rs.next(); assertNull(rs.getValue(0)); assertEquals(0, rs.getDouble(0), 0); assertEquals(0, rs.getInt(0)); assertNull(rs.getValue(1)); assertEquals(0, rs.getDouble(1), 0); assertEquals(0, rs.getInt(1)); } } catch (RuntimeException e) { throw e; } } @Test public void testCount() throws VerdictDBException { // This query doesn't select any rows. String sql = String.format( "select count(l_orderkey) from " + "%s.lineitem, %s.customer, %s.orders " + "where c_mktsegment='AAAAAA' and c_custkey=o_custkey and o_orderkey=l_orderkey", MYSQL_DATABASE, MYSQL_DATABASE, MYSQL_DATABASE); JdbcConnection jdbcConn = new JdbcConnection(conn, new MysqlSyntax()); jdbcConn.setOutputDebugMessage(true); DbmsConnection dbmsconn = new CachedDbmsConnection(jdbcConn); dbmsconn.setDefaultSchema(MYSQL_DATABASE); SelectQueryCoordinator coordinator = new SelectQueryCoordinator(dbmsconn); coordinator.setScrambleMetaSet(meta); ExecutionResultReader reader = coordinator.process(sql); VerdictResultStream stream = new VerdictResultStreamFromExecutionResultReader(reader); try { while (stream.hasNext()) { VerdictSingleResult rs = stream.next(); rs.next(); assertEquals(0, rs.getDouble(0), 0); assertEquals(0, rs.getInt(0)); } } catch (RuntimeException e) { throw e; } } @AfterClass public static void tearDown() throws SQLException { stmt.execute(String.format("DROP SCHEMA IF EXISTS `%s`", MYSQL_DATABASE)); } }
/* Copyright (c) 2016 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Johnathan Garrett (Prominent Edge) - initial implementation */ package org.locationtech.geogig.storage.impl; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.google.common.base.Optional; /** * Simple implementation of an INI parser and serializer that operates on byte arrays. */ public abstract class INIBlob { /** * Content of the blob */ private List<Entry> data = null; public abstract byte[] iniBytes() throws IOException; public abstract void setBytes(byte[] bytes) throws IOException; public synchronized Optional<String> get(String section, String key) throws IOException { if (section == null || section.length() == 0) { throw new IllegalArgumentException("Section name required"); } if (key == null || key.length() == 0) { throw new IllegalArgumentException("Key required"); } checkReload(); for (Entry e : data) { Optional<String> result = e.get(section, key); if (result.isPresent()) return result; } return Optional.absent(); } public synchronized Map<String, String> getAll() throws IOException { checkReload(); Map<String, String> m = new HashMap<String, String>(); for (Entry e : data) { if (e instanceof Section) { Section s = (Section) e; for (KeyAndValue kv : s.getValues()) { m.put(s.getHeader() + "." + kv.getKey(), kv.getValue()); } } } return m; } public synchronized List<String> listSubsections(String section) throws IOException { if (section == null || section.length() == 0) { throw new IllegalArgumentException("Section name required"); } checkReload(); List<String> results = new ArrayList<String>(); for (Entry e : data) { if (e instanceof Section) { Section s = (Section) e; if (s.getHeader().startsWith(section + ".")) { results.add(s.getHeader().substring(section.length() + 1)); } } } return results; } public synchronized Map<String, String> getSection(String section) throws IOException { if (section == null || section.length() == 0) { throw new IllegalArgumentException("Section name required"); } checkReload(); for (Entry e : data) { if (e instanceof Section) { Section s = (Section) e; if (s.getHeader().equals(section)) { Map<String, String> values = new HashMap<String, String>(); for (KeyAndValue kv : s.getValues()) { values.put(kv.getKey(), kv.getValue()); } return values; } } } return new HashMap<String, String>(); } public synchronized void set(String section, String key, String value) throws IOException { if (section == null || section.length() == 0) { throw new IllegalArgumentException("Section name required"); } if (key == null || key.length() == 0) { throw new IllegalArgumentException("Key required"); } checkReload(); boolean written = false; for (Entry e : data) { written = e.set(section, key, value); if (written) { break; } } if (!written) { // didn't add to an existing section, time to add a new section. List<KeyAndValue> kvs = new ArrayList<KeyAndValue>(); kvs.add(new KeyAndValue(key, value)); data.add(new Section(section, kvs)); } write(); } public synchronized void removeSection(String section) throws IOException { if (section == null || section.length() == 0) { throw new IllegalArgumentException("Section name required"); } checkReload(); boolean written = false; Iterator<Entry> iter = data.iterator(); while (iter.hasNext()) { Entry e = iter.next(); if (e instanceof Section && ((Section) e).getHeader().equals(section)) { iter.remove(); written = true; break; } } if (written) { write(); } else { throw new NoSuchElementException("No such section"); } } public synchronized void remove(String section, String key) throws IOException { if (section == null || section.length() == 0) { throw new IllegalArgumentException("Section name required"); } if (key == null || key.length() == 0) { throw new IllegalArgumentException("Section name required"); } checkReload(); boolean written = false; for (Entry e : data) { written |= e.unset(section, key); } if (written) { write(); } } private final static class KeyAndValue { private String key, value; public KeyAndValue(String key, String value) { this.key = key; this.value = value; } public String getKey() { return this.key; } public String getValue() { return this.value; } public void setValue(String value) { this.value = value; } } private static abstract class Entry { public abstract void write(PrintWriter w); public Optional<String> get(String section, String key) { // No-op return Optional.absent(); } public boolean set(String section, String key, String value) { // No-op return false; } public boolean unset(String section, String key) { return false; } } private static class Section extends Entry { private String header; private List<KeyAndValue> values; public Section(String header, List<KeyAndValue> values) { this.header = header; this.values = values; } public String getHeader() { return this.header; } public List<KeyAndValue> getValues() { return Collections.unmodifiableList(values); } @Override public Optional<String> get(String section, String key) { if (!header.equals(section)) { return Optional.absent(); } else { for (KeyAndValue kv : values) { if (kv.getKey().equals(key)) { return Optional.of(kv.getValue()); } } return Optional.absent(); } } @Override public boolean set(String section, String key, String value) { if (!header.equals(section)) { return false; } else { for (KeyAndValue kv : values) { if (kv.getKey().equals(key)) { kv.setValue(value); return true; } } values.add(new KeyAndValue(key, value)); return true; } } @Override public boolean unset(String section, String key) { if (!header.equals(section)) { return false; } else { boolean modified = false; Iterator<KeyAndValue> viterator = values.iterator(); while (viterator.hasNext()) { if (viterator.next().getKey().equals(key)) { viterator.remove(); modified = true; } } return modified; } } public void write(PrintWriter w) { w.println("[" + header.replaceAll("\\.", "\\\\") + "]"); for (KeyAndValue kv : values) { w.println(kv.getKey() + " = " + kv.getValue()); } } @Override public String toString() { StringBuffer buff = new StringBuffer(); buff.append("[" + header + "]"); for (KeyAndValue kv : values) { buff.append("[" + kv.getKey() + " : " + kv.getValue() + "]"); } return buff.toString(); } } private static class Blanks extends Entry { private int nBlanks; public Blanks(int nBlanks) { this.nBlanks = nBlanks; } public void write(PrintWriter w) { for (int i = nBlanks; i > 0; i--) { w.print(" "); } w.println(); } } private static class Comment extends Entry { private String content; public Comment(String content) { this.content = content; } public void write(PrintWriter w) { w.println("#" + content); } } private void checkReload() throws IOException { if (data == null || needsReload()) { reload(iniBytes()); } } public boolean needsReload() { return false; } // Note. If you're tweaking these be careful, throwing an exception in a // static initializer prevents the class from being loaded entirely. private static Pattern SECTION_HEADER = Pattern .compile("^\\p{Space}*\\[([^\\[\\]]+)]\\p{Space}*$"); private static Pattern KEY_VALUE = Pattern .compile("^\\p{Space}*([^=\\p{Space}]+)\\p{Space}*=\\p{Space}*(.*)\\p{Space}*$"); private static Pattern BLANK = Pattern.compile("^(\\p{Space}*)$"); private static Pattern COMMENT = Pattern.compile("^\\p{Space}*#(.*)$"); private void reload(byte[] ini) throws IOException { BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(ini))); String sectionName = null; List<Entry> results = new ArrayList<Entry>(); List<KeyAndValue> kvs = new ArrayList<KeyAndValue>(); String line; while ((line = reader.readLine()) != null) { Matcher m; if ((m = SECTION_HEADER.matcher(line)).matches()) { String header = m.group(1); if (sectionName != null) { results.add(new Section(sectionName, kvs)); kvs = new ArrayList<KeyAndValue>(); } sectionName = header.replaceAll("\\\\", "."); } else if ((m = KEY_VALUE.matcher(line)).matches()) { if (sectionName != null) { // if we haven't encountered a section name yet, // ignore the values String key = m.group(1); String value = m.group(2); kvs.add(new KeyAndValue(key, value)); } } else if ((m = BLANK.matcher(line)).matches()) { String blanks = m.group(1); results.add(new Blanks(blanks.length())); } else if ((m = COMMENT.matcher(line)).matches()) { String comment = m.group(1); results.add(new Comment(comment)); } // If no pattern matches we have an invalid .ini blob but we just drop those lines. } if (sectionName != null) { results.add(new Section(sectionName, kvs)); } data = results; } catch (IOException e) { data = new ArrayList<Entry>(); } catch (RuntimeException e) { data = new ArrayList<Entry>(); } finally { if (reader != null) { reader.close(); } } } private void write() throws IOException { ByteArrayOutputStream stream = new ByteArrayOutputStream(); PrintWriter writer = new PrintWriter( new BufferedWriter(new OutputStreamWriter(stream))); try { for (Entry e : data) { e.write(writer); } } finally { writer.flush(); writer.close(); } setBytes(stream.toByteArray()); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.services.stub; import com.google.ads.googleads.v9.resources.MobileAppCategoryConstant; import com.google.ads.googleads.v9.services.GetMobileAppCategoryConstantRequest; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link MobileAppCategoryConstantServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li> The default service address (googleads.googleapis.com) and default port (443) are used. * <li> Credentials are acquired automatically through Application Default Credentials. * <li> Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of getMobileAppCategoryConstant to 30 seconds: * * <pre>{@code * MobileAppCategoryConstantServiceStubSettings.Builder * mobileAppCategoryConstantServiceSettingsBuilder = * MobileAppCategoryConstantServiceStubSettings.newBuilder(); * mobileAppCategoryConstantServiceSettingsBuilder * .getMobileAppCategoryConstantSettings() * .setRetrySettings( * mobileAppCategoryConstantServiceSettingsBuilder * .getMobileAppCategoryConstantSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * MobileAppCategoryConstantServiceStubSettings mobileAppCategoryConstantServiceSettings = * mobileAppCategoryConstantServiceSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class MobileAppCategoryConstantServiceStubSettings extends StubSettings<MobileAppCategoryConstantServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/adwords").build(); private final UnaryCallSettings<GetMobileAppCategoryConstantRequest, MobileAppCategoryConstant> getMobileAppCategoryConstantSettings; /** Returns the object with the settings used for calls to getMobileAppCategoryConstant. */ public UnaryCallSettings<GetMobileAppCategoryConstantRequest, MobileAppCategoryConstant> getMobileAppCategoryConstantSettings() { return getMobileAppCategoryConstantSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public MobileAppCategoryConstantServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcMobileAppCategoryConstantServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "googleads.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "googleads.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(MobileAppCategoryConstantServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected MobileAppCategoryConstantServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); getMobileAppCategoryConstantSettings = settingsBuilder.getMobileAppCategoryConstantSettings().build(); } /** Builder for MobileAppCategoryConstantServiceStubSettings. */ public static class Builder extends StubSettings.Builder<MobileAppCategoryConstantServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder< GetMobileAppCategoryConstantRequest, MobileAppCategoryConstant> getMobileAppCategoryConstantSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(3600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(3600000L)) .setTotalTimeout(Duration.ofMillis(3600000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); getMobileAppCategoryConstantSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(getMobileAppCategoryConstantSettings); initDefaults(this); } protected Builder(MobileAppCategoryConstantServiceStubSettings settings) { super(settings); getMobileAppCategoryConstantSettings = settings.getMobileAppCategoryConstantSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(getMobileAppCategoryConstantSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .getMobileAppCategoryConstantSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to getMobileAppCategoryConstant. */ public UnaryCallSettings.Builder<GetMobileAppCategoryConstantRequest, MobileAppCategoryConstant> getMobileAppCategoryConstantSettings() { return getMobileAppCategoryConstantSettings; } @Override public MobileAppCategoryConstantServiceStubSettings build() throws IOException { return new MobileAppCategoryConstantServiceStubSettings(this); } } }
/* * Copyright (c) JForum Team * All rights reserved. * * Redistribution and use in source and binary forms, * with or without modification, are permitted provided * that the following conditions are met: * * 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the * following disclaimer. * 2) Redistributions in binary form must reproduce the * above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or * other materials provided with the distribution. * 3) Neither the name of "Rafael Steil" nor * the names of its contributors may be used to endorse * or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT * HOLDERS AND CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE * * Created on 18/07/2007 17:18:41 * * The JForum Project * http://www.jforum.net */ package net.jforum.search; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import net.jforum.entities.Post; import net.jforum.exceptions.SearchException; import net.jforum.util.preferences.ConfigKeys; import net.jforum.util.preferences.SystemGlobals; import org.apache.log4j.Logger; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Index; import org.apache.lucene.document.Field.Store; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; /** * @author Rafael Steil * @version $Id$ */ public class LuceneIndexer { private static final Logger LOGGER = Logger.getLogger(LuceneIndexer.class); private static final Object MUTEX = new Object(); private LuceneSettings settings; private Directory ramDirectory; private IndexWriter ramWriter; private int ramNumDocs; private List<NewDocumentAdded> newDocumentAddedList = new ArrayList<NewDocumentAdded>(); public LuceneIndexer(final LuceneSettings settings) { this.settings = settings; this.createRAMWriter(); } public void watchNewDocuDocumentAdded(NewDocumentAdded newDoc) { this.newDocumentAddedList.add(newDoc); } public void batchCreate(final Post post) { synchronized (MUTEX) { try { final Document document = this.createDocument(post); this.ramWriter.addDocument(document); this.flushRAMDirectoryIfNecessary(); } catch (IOException e) { throw new SearchException(e); } } } private void createRAMWriter() { try { if (this.ramWriter != null) { this.ramWriter.close(); } this.ramDirectory = new RAMDirectory(); final IndexWriterConfig conf = new IndexWriterConfig(LuceneSettings.version, this.settings.analyzer()).setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); this.ramWriter = new IndexWriter(this.ramDirectory, conf); this.ramNumDocs = SystemGlobals.getIntValue(ConfigKeys.LUCENE_INDEXER_RAM_NUMDOCS); } catch (IOException e) { throw new SearchException(e); } } private void flushRAMDirectoryIfNecessary() { if (this.ramWriter.maxDoc() >= this.ramNumDocs) { this.flushRAMDirectory(); } } public void flushRAMDirectory() { synchronized (MUTEX) { IndexWriter writer = null; try { final IndexWriterConfig conf = new IndexWriterConfig(LuceneSettings.version, this.settings.analyzer()).setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); writer = new IndexWriter(this.settings.directory(), conf); this.ramWriter.commit(); this.ramWriter.close(); writer.addIndexes(new Directory[] { this.ramDirectory }); writer.forceMergeDeletes(); this.createRAMWriter(); } catch (IOException e) { throw new SearchException(e); } finally { if (writer != null) { try { writer.commit(); writer.close(); this.notifyNewDocumentAdded(); } catch (Exception e) { LOGGER.error(e.toString(), e); } } } } } public void create(final Post post) { synchronized (MUTEX) { IndexWriter writer = null; try { final IndexWriterConfig conf = new IndexWriterConfig(LuceneSettings.version, this.settings.analyzer()).setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); writer = new IndexWriter(this.settings.directory(), conf); final Document document = this.createDocument(post); writer.addDocument(document); this.optimize(writer); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Indexed " + document); } } catch (Exception e) { LOGGER.error(e.toString(), e); } finally { if (writer != null) { try { writer.commit(); writer.close(); this.notifyNewDocumentAdded(); } catch (Exception e) { LOGGER.error(e.toString(), e); } } } } } public void update(final Post post) { if (this.performDelete(post)) { this.create(post); } } private void optimize(final IndexWriter writer) throws Exception { if (writer.maxDoc() % 100 == 0) { LOGGER.info("Optimizing indexes. Current number of documents is " + writer.maxDoc()); writer.forceMergeDeletes(); LOGGER.debug("Indexes optimized"); } } private Document createDocument(final Post post) { Document doc = new Document(); doc.add(new Field(SearchFields.Keyword.POST_ID, String.valueOf(post.getId()), Store.YES, Index.NOT_ANALYZED)); doc.add(new Field(SearchFields.Keyword.FORUM_ID, String.valueOf(post.getForumId()), Store.YES, Index.NOT_ANALYZED)); doc.add(new Field(SearchFields.Keyword.TOPIC_ID, String.valueOf(post.getTopicId()), Store.YES, Index.NOT_ANALYZED)); doc.add(new Field(SearchFields.Keyword.USER_ID, String.valueOf(post.getUserId()), Store.YES, Index.NOT_ANALYZED)); doc.add(new Field(SearchFields.Keyword.DATE, this.settings.formatDateTime(post.getTime()), Store.YES, Index.NOT_ANALYZED)); // We add the subject and message text together because, when searching, we only care about the // matches, not where it was performed. The real subject and contents will be fetched from the database doc.add(new Field(SearchFields.Indexed.CONTENTS, post.getSubject() + " " + post.getText(), Store.NO, Index.ANALYZED)); return doc; } private void notifyNewDocumentAdded() { for (Iterator<NewDocumentAdded> iter = this.newDocumentAddedList.iterator(); iter.hasNext(); ) { iter.next().newDocumentAdded(); } } public void delete(final Post post) { this.performDelete(post); } private boolean performDelete(final Post post) { synchronized (MUTEX) { IndexReader reader = null; boolean status = false; try { reader = IndexReader.open(this.settings.directory(), false); reader.deleteDocuments(new Term(SearchFields.Keyword.POST_ID, String.valueOf(post.getId()))); status = true; } catch (IOException e) { LOGGER.error(e.toString(), e); } finally { if (reader != null) { try { reader.close(); this.flushRAMDirectory(); } catch (IOException e) { LOGGER.error(e.toString(), e); } } } return status; } } }
/* * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * sasl://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.hal.client.configuration.subsystem.elytron; import java.util.List; import elemental2.dom.HTMLElement; import org.jboss.gwt.elemento.core.IsElement; import org.jboss.hal.ballroom.Attachable; import org.jboss.hal.ballroom.Pages; import org.jboss.hal.ballroom.form.Form; import org.jboss.hal.ballroom.table.InlineAction; import org.jboss.hal.ballroom.table.Table; import org.jboss.hal.core.mbui.form.ModelNodeForm; import org.jboss.hal.core.mbui.table.ModelNodeTable; import org.jboss.hal.core.mbui.table.TableButtonFactory; import org.jboss.hal.core.mvp.HasPresenter; import org.jboss.hal.dmr.ModelNode; import org.jboss.hal.dmr.NamedNode; import org.jboss.hal.meta.Metadata; import org.jboss.hal.resources.Ids; import org.jboss.hal.resources.Names; import static org.jboss.gwt.elemento.core.Elements.h; import static org.jboss.gwt.elemento.core.Elements.p; import static org.jboss.gwt.elemento.core.Elements.section; import static org.jboss.hal.dmr.ModelDescriptionConstants.*; import static org.jboss.hal.dmr.ModelNodeHelper.failSafeList; import static org.jboss.hal.dmr.ModelNodeHelper.storeIndex; import static org.jboss.hal.resources.Ids.FORM; import static org.jboss.hal.resources.Ids.PAGE; import static org.jboss.hal.resources.Ids.PAGES; class SaslAuthenticationFactoryElement implements IsElement<HTMLElement>, Attachable, HasPresenter<FactoriesPresenter> { private final Table<NamedNode> factoryTable; private final Form<NamedNode> factoryForm; private final Table<ModelNode> mcTable; // mc = mechanism-configuration private final Form<ModelNode> mcForm; private final Table<ModelNode> mrcTable; // mrc = mechanism-realm-configurations private final Form<ModelNode> mrcForm; private final Pages pages; private FactoriesPresenter presenter; private String selectedFactory; private String selectedMc; private int mcIndex; private int mrcIndex; SaslAuthenticationFactoryElement(Metadata metadata, TableButtonFactory tableButtonFactory) { // SASL authentication factory factoryTable = new ModelNodeTable.Builder<NamedNode>(id(Ids.TABLE), metadata) .button(tableButtonFactory.add(id(Ids.ADD), Names.SASL_AUTHENTICATION_FACTORY, metadata.getTemplate(), (n, a) -> presenter.reloadSaslAuthenticationFactories())) .button(tableButtonFactory.remove(Names.SASL_AUTHENTICATION_FACTORY, metadata.getTemplate(), (table) -> table.selectedRow().getName(), () -> presenter.reloadSaslAuthenticationFactories())) .column(NAME, (cell, type, row, meta) -> row.getName()) .column(new InlineAction<>(Names.MECHANISM_CONFIGURATIONS, this::showMechanismConfiguration), "15em") .build(); factoryForm = new ModelNodeForm.Builder<NamedNode>(id(FORM), metadata) .onSave((form, changedValues) -> presenter.saveSaslAuthenticationFactory(form, changedValues)) .build(); HTMLElement factorySection = section() .add(h(1).textContent(Names.SASL_AUTHENTICATION_FACTORY)) .add(p().textContent(metadata.getDescription().getDescription())) .addAll(factoryTable, factoryForm) .get(); // mechanism configurations Metadata mcMetadata = metadata.forComplexAttribute(MECHANISM_CONFIGURATIONS); mcTable = new ModelNodeTable.Builder<>(id(MECHANISM_CONFIGURATIONS, TABLE), mcMetadata) .button(tableButtonFactory.add(mcMetadata.getTemplate(), table -> presenter.addSaslMechanismConfiguration(selectedFactory))) .button(tableButtonFactory.remove(mcMetadata.getTemplate(), table -> presenter.removeSaslMechanismConfiguration(selectedFactory, table.selectedRow().get(HAL_INDEX).asInt()))) .column(MECHANISM_NAME) .column(new InlineAction<>(Names.MECHANISM_REALM_CONFIGURATIONS, this::showMechanismRealmConfiguration), "20em") .build(); mcForm = new ModelNodeForm.Builder<>(id(MECHANISM_CONFIGURATIONS, FORM), mcMetadata) .onSave(((form, changedValues) -> presenter.saveSaslMechanismConfiguration(selectedFactory, form.getModel().get(HAL_INDEX).asInt(), changedValues))) .build(); HTMLElement mcSection = section() .add(h(1).textContent(Names.MECHANISM_CONFIGURATIONS)) .add(p().textContent(mcMetadata.getDescription().getDescription())) .addAll(mcTable, mcForm) .get(); // mechanism realm configurations Metadata mrcMetadata = mcMetadata.forComplexAttribute(MECHANISM_REALM_CONFIGURATIONS); mrcTable = new ModelNodeTable.Builder<>(id(MECHANISM_REALM_CONFIGURATIONS, Ids.TABLE), mrcMetadata) .button(tableButtonFactory.add(mrcMetadata.getTemplate(), table -> presenter.addSaslMechanismRealmConfiguration(selectedFactory, mcIndex))) .button(tableButtonFactory.remove(mrcMetadata.getTemplate(), table -> presenter.removeSaslMechanismRealmConfiguration(selectedFactory, mcIndex, table.selectedRow().get(HAL_INDEX).asInt()))) .column(REALM_NAME) .build(); mrcForm = new ModelNodeForm.Builder<>(id(MECHANISM_REALM_CONFIGURATIONS, FORM), mrcMetadata) .onSave(((form, changedValues) -> presenter.saveSaslMechanismRealmConfiguration(selectedFactory, mcIndex, mrcIndex, changedValues))) .build(); HTMLElement mrcSection = section() .add(h(1).textContent(Names.MECHANISM_REALM_CONFIGURATIONS)) .add(p().textContent(mrcMetadata.getDescription().getDescription())) .addAll(mrcTable, mrcForm) .get(); pages = new Pages(id(PAGES), id(PAGE), factorySection); pages.addPage(id(PAGE), id(MECHANISM_CONFIGURATIONS, PAGE), () -> Names.SASL_AUTHENTICATION_FACTORY + ": " + selectedFactory, () -> Names.MECHANISM_CONFIGURATIONS, mcSection); pages.addPage(id(MECHANISM_CONFIGURATIONS, PAGE), id(MECHANISM_REALM_CONFIGURATIONS, PAGE), () -> Names.MECHANISM_CONFIGURATIONS + ": " + selectedMc, () -> Names.MECHANISM_REALM_CONFIGURATIONS, mrcSection); } private String id(String... ids) { return Ids.build(Ids.ELYTRON_SASL_AUTHENTICATION_FACTORY, ids); } @Override public HTMLElement element() { return pages.element(); } @Override public void attach() { factoryTable.attach(); factoryForm.attach(); factoryTable.bindForm(factoryForm); mcTable.attach(); mcForm.attach(); mcTable.bindForm(mcForm); mcTable.onSelectionChange(table -> mcTable.enableButton(1, mcTable.hasSelection())); mrcTable.attach(); mrcForm.attach(); mrcTable.bindForm(mrcForm); mrcTable.onSelectionChange(table -> { mrcTable.enableButton(1, mrcTable.hasSelection()); if (table.hasSelection()) { mrcIndex = table.selectedRow().get(HAL_INDEX).asInt(); } }); } @Override public void setPresenter(FactoriesPresenter presenter) { this.presenter = presenter; } void update(List<NamedNode> nodes) { factoryForm.clear(); factoryTable.update(nodes); if (id(MECHANISM_CONFIGURATIONS, PAGE).equals(pages.getCurrentId())) { nodes.stream() .filter(factory -> selectedFactory.equals(factory.getName())) .findFirst() .ifPresent(this::showMechanismConfiguration); } else if (id(MECHANISM_REALM_CONFIGURATIONS, PAGE).equals(pages.getCurrentId())) { nodes.stream() .filter(factory -> selectedFactory.equals(factory.getName())) .findFirst() .ifPresent(factory -> { List<ModelNode> mcNodes = failSafeList(factory, MECHANISM_CONFIGURATIONS); storeIndex(mcNodes); mcForm.clear(); mcTable.update(mcNodes, modelNode -> modelNode.get(MECHANISM_NAME).asString()); mcNodes.stream() .filter(mc -> selectedMc.equals(mc.get(MECHANISM_NAME).asString())) .findFirst() .ifPresent(this::showMechanismRealmConfiguration); }); } } private void showMechanismConfiguration(NamedNode saslAuthenticationFactory) { selectedFactory = saslAuthenticationFactory.getName(); List<ModelNode> mcNodes = failSafeList(saslAuthenticationFactory, MECHANISM_CONFIGURATIONS); storeIndex(mcNodes); mcForm.clear(); mcTable.update(mcNodes, modelNode -> modelNode.get(MECHANISM_NAME).asString()); mcTable.enableButton(1, mcTable.hasSelection()); pages.showPage(id(MECHANISM_CONFIGURATIONS, PAGE)); } private void showMechanismRealmConfiguration(ModelNode mechanismConfiguration) { selectedMc = mechanismConfiguration.get(MECHANISM_NAME).asString(); mcIndex = mechanismConfiguration.get(HAL_INDEX).asInt(); List<ModelNode> mrcNodes = failSafeList(mechanismConfiguration, MECHANISM_REALM_CONFIGURATIONS); storeIndex(mrcNodes); mrcForm.clear(); mrcTable.update(mrcNodes, modelNode -> modelNode.get(REALM_NAME).asString()); mrcTable.enableButton(1, mrcTable.hasSelection()); pages.showPage(id(MECHANISM_REALM_CONFIGURATIONS, PAGE)); } }
package com.alibaba.weex.uitest.TC_Text; import android.app.Activity; import android.app.Application; import android.app.Instrumentation; import android.content.Intent; import android.test.ActivityInstrumentationTestCase2; import android.test.TouchUtils; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.ViewGroup; import com.alibaba.weex.R; import com.alibaba.weex.util.ScreenShot; import com.alibaba.weex.WXPageActivity; import com.alibaba.weex.WeappJsBaseTestCase; import com.alibaba.weex.constants.Constants; import com.alibaba.weex.util.ViewUtil; import com.taobao.weex.ui.view.WXTextView; import java.io.IOException; import java.util.ArrayList; /** * Created by admin on 16/3/23. */ public class WeexUiTestCaseTcTextStyleOwn extends ActivityInstrumentationTestCase2<WXPageActivity> { public final String TAG = "TestScript_Guide=="; public WeappJsBaseTestCase weappApplication; public WXPageActivity waTestPageActivity; public ViewGroup mViewGroup; public Application mApplication; public Instrumentation mInstrumentation; public ArrayList<View> mCaseListIndexView = new ArrayList<View>(); public WeexUiTestCaseTcTextStyleOwn() { super(WXPageActivity.class); } public void setUp() throws Exception{ Log.e("TestScript_Guide", "setUp test!!"); setActivityInitialTouchMode(false); weappApplication = new WeappJsBaseTestCase(); mInstrumentation = getInstrumentation(); Intent intent = new Intent(); intent.putExtra("bundleUrl", Constants.BUNDLE_URL); launchActivityWithIntent("com.alibaba.weex", WXPageActivity.class, intent); waTestPageActivity = getActivity(); // waTestPageActivity.getIntent().getData().toString(); Log.e(TAG,"activity1=" + waTestPageActivity.toString() ); Thread.sleep(3000); mViewGroup = (ViewGroup) waTestPageActivity.findViewById(R.id.container); setViewGroup(mViewGroup); Thread.sleep(2000); TouchUtils.scrollToBottom(this, waTestPageActivity, mViewGroup); mCaseListIndexView = ViewUtil.findViewWithText(mViewGroup, "TC_"); addAllTargetView("TC_"); Thread.sleep(3000); } // public void testPreConditions() // { // assertNotNull(waTestPageActivity); // assertNotNull(mViewGroup); // assertNotNull(mCaseListIndexView); // // } public void testTextType(){ for(final View caseView : mCaseListIndexView){ if (((WXTextView)caseView).getText().toString().equals("TC_Text")){ Log.e(TAG, "TC_Text find"); final WXTextView inputView = (WXTextView)caseView; mInstrumentation.runOnMainSync(new Runnable() { @Override public void run() { inputView.requestFocus(); inputView.performClick(); } }); sleep(2000); setActivity(WXPageActivity.wxPageActivityInstance); Activity activity2 = getActivity(); Log.e(TAG, "activity2 = " + activity2.toString()); ViewGroup myGroup = (ViewGroup)(activity2.findViewById(R.id.container)); Log.e(TAG, myGroup.toString()); ArrayList<View> inputListView = new ArrayList<View>(); inputListView = ViewUtil.findViewWithText(myGroup, "TC_Text_Style_Own"); // myGroup.findViewsWithText(inputListView, "TC_Text_Style_Own", View.FIND_VIEWS_WITH_TEXT); Log.e(TAG, "TC_Text_Style_Own size== " + inputListView.size()); if(inputListView.size()!=0){ final WXTextView inputTypeView = (WXTextView)inputListView.get(0); mInstrumentation.runOnMainSync(new Runnable() { @Override public void run() { inputTypeView.requestFocus(); inputTypeView.performClick(); Log.e(TAG, "TC_Text_Style_Own clcik!"); // screenShot("TC_Input_Type"); } }); sleep(1000); Log.e(TAG, "TC_Text_Style_Own snap!"); screenShot("TC_Text_Style_Own"); sleep(2000); } } } } /** * get tc list by text * @param byText * @return * @throws InterruptedException */ public ArrayList<View> getTestCaseListViewByText(String byText) throws InterruptedException { Log.e("TestScript_Guide", "byText ==" + byText); if(TextUtils.isEmpty(byText)){ return null; } ArrayList<View> outViews = new ArrayList<View>(); mViewGroup.findViewsWithText(outViews, byText, View.FIND_VIEWS_WITH_TEXT); for (View view : outViews){ String viewText = ((WXTextView)view).getText().toString(); Log.e(TAG, "viewText ==" + viewText); } return outViews; } /** * findMyCaseByText */ public View findMyCaseByText(String caseText){ if (mCaseListIndexView.size() == 0) return null; WXTextView view = null; for(int i=0; i<mCaseListIndexView.size();i++){ view = (WXTextView)mCaseListIndexView.get(i); if (view.getText().toString().toLowerCase().contains(caseText.toLowerCase())){ return view; } } return view; } /** * sleep */ public void sleep(long time){ try { Thread.sleep(time); } catch (InterruptedException e) { e.printStackTrace(); } } /** * snapshot */ public void screenShot(String shotName) { try { ScreenShot.shoot(WXPageActivity.wxPageActivityInstance, shotName); } catch (IOException e) { e.printStackTrace(); } } public void setViewGroup(ViewGroup viewGroup){ mViewGroup = viewGroup; } public void addAllTargetView(String target){ int max = 6; int count =0 ; while (count < max){ TouchUtils.dragQuarterScreenUp(this, this.getActivity()); mViewGroup = (ViewGroup) waTestPageActivity.findViewById(R.id.container); mCaseListIndexView = ViewUtil.findViewWithText(mViewGroup, target); mCaseListIndexView.addAll(mCaseListIndexView); count ++; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.bval.jsr.metadata; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.AnnotatedParameterizedType; import java.lang.reflect.AnnotatedType; import java.lang.reflect.Constructor; import java.lang.reflect.Executable; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Parameter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.TreeMap; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import javax.validation.ConstraintDeclarationException; import javax.validation.ConstraintTarget; import javax.validation.GroupSequence; import javax.validation.ParameterNameProvider; import javax.validation.Valid; import javax.validation.constraintvalidation.ValidationTarget; import javax.validation.groups.ConvertGroup; import org.apache.bval.jsr.ApacheValidatorFactory; import org.apache.bval.jsr.ConstraintAnnotationAttributes; import org.apache.bval.jsr.groups.GroupConversion; import org.apache.bval.jsr.util.AnnotationsManager; import org.apache.bval.jsr.util.Methods; import org.apache.bval.jsr.util.ToUnmodifiable; import org.apache.bval.util.Exceptions; import org.apache.bval.util.Lazy; import org.apache.bval.util.ObjectUtils; import org.apache.bval.util.Validate; import org.apache.bval.util.reflection.Reflection; import org.apache.commons.weaver.privilizer.Privilizing; import org.apache.commons.weaver.privilizer.Privilizing.CallTo; @Privilizing(@CallTo(Reflection.class)) public class ReflectionBuilder { private class ForBean<T> implements MetadataBuilder.ForBean<T> { private final Meta<Class<T>> meta; ForBean(Meta<Class<T>> meta) { super(); this.meta = Validate.notNull(meta, "meta"); } @Override public MetadataBuilder.ForClass<T> getClass(Meta<Class<T>> ignored) { return new ReflectionBuilder.ForClass<>(meta); } @Override public Map<String, MetadataBuilder.ForContainer<Field>> getFields(Meta<Class<T>> ignored) { final Field[] declaredFields = Reflection.getDeclaredFields(meta.getHost()); if (declaredFields.length == 0) { return Collections.emptyMap(); } return Stream.of(declaredFields).filter(f -> !(Modifier.isStatic(f.getModifiers()) || f.isSynthetic())) .collect( Collectors.toMap(Field::getName, f -> new ReflectionBuilder.ForContainer<>(new Meta.ForField(f)))); } @Override public Map<String, MetadataBuilder.ForContainer<Method>> getGetters(Meta<Class<T>> ignored) { final Method[] declaredMethods = Reflection.getDeclaredMethods(meta.getHost()); if (declaredMethods.length == 0) { return Collections.emptyMap(); } final Map<String, Set<Method>> getters = new HashMap<>(); for (Method m : declaredMethods) { if (Methods.isGetter(m)) { getters.computeIfAbsent(Methods.propertyName(m), k -> new LinkedHashSet<>()).add(m); } } final Map<String, MetadataBuilder.ForContainer<Method>> result = new TreeMap<>(); getters.forEach((k, methods) -> { if ("class".equals(k)) { return; } final List<MetadataBuilder.ForContainer<Method>> delegates = methods.stream() .map(g -> new ReflectionBuilder.ForContainer<>(new Meta.ForMethod(g))).collect(Collectors.toList()); if (delegates.isEmpty()) { return; } final MetadataBuilder.ForContainer<Method> builder; if (delegates.size() == 1) { builder = delegates.get(0); } else { builder = compositeBuilder.get().new ForContainer<>(delegates); } result.put(k, builder); }); return result; } @Override public Map<Signature, MetadataBuilder.ForExecutable<Constructor<? extends T>>> getConstructors(Meta<Class<T>> ignored) { final Constructor<? extends T>[] declaredConstructors = Reflection.getDeclaredConstructors(meta.getHost()); if (declaredConstructors.length == 0) { return Collections.emptyMap(); } return Stream.of(declaredConstructors).collect( Collectors.toMap(Signature::of, c -> new ReflectionBuilder.ForExecutable<>(new Meta.ForConstructor<>(c), ParameterNameProvider::getParameterNames))); } @Override public Map<Signature, MetadataBuilder.ForExecutable<Method>> getMethods(Meta<Class<T>> ignored) { final Method[] declaredMethods = Reflection.getDeclaredMethods(meta.getHost()); if (declaredMethods.length == 0) { return Collections.emptyMap(); } final Map<Signature, Set<Method>> methodsBySignature = new HashMap<>(); for (Method m : declaredMethods) { if (!Modifier.isStatic(m.getModifiers())) { methodsBySignature.computeIfAbsent(Signature.of(m), k -> new LinkedHashSet<>()).add(m); } } final Map<Signature, MetadataBuilder.ForExecutable<Method>> result = new TreeMap<>(); // we can't filter the getters since they can be validated, todo: read the config to know if we need or not methodsBySignature.forEach((sig, methods) -> { final List<MetadataBuilder.ForExecutable<Method>> delegates = methods.stream().map(g -> new ReflectionBuilder.ForExecutable<>(new Meta.ForMethod(g), ParameterNameProvider::getParameterNames)).collect(Collectors.toList()); if (delegates.isEmpty()) { return; } final MetadataBuilder.ForExecutable<Method> builder; if (delegates.size() == 1) { builder = delegates.get(0); } else { builder = compositeBuilder.get().new ForExecutable<MetadataBuilder.ForExecutable<Method>, Method>( delegates, ParameterNameProvider::getParameterNames); } result.put(sig, builder); }); return result; } } private abstract class ForElement<E extends AnnotatedElement> implements MetadataBuilder.ForElement<E> { final Meta<E> meta; ForElement(Meta<E> meta) { super(); this.meta = Validate.notNull(meta, "meta"); } @Override public Annotation[] getDeclaredConstraints(Meta<E> ignored) { return AnnotationsManager.getDeclaredConstraints(meta); } @Override public boolean equals(Object obj) { return obj == this || this.getClass().isInstance(obj) && ((ForElement<?>) obj).meta.equals(meta); } @Override public int hashCode() { return Objects.hash(getClass(), meta); } } private class ForClass<T> extends ForElement<Class<T>> implements MetadataBuilder.ForClass<T> { ForClass(Meta<Class<T>> meta) { super(meta); } @Override public List<Class<?>> getGroupSequence(Meta<Class<T>> ignored) { final GroupSequence groupSequence = AnnotationsManager.getAnnotation(meta.getHost(), GroupSequence.class); return groupSequence == null ? null : Collections.unmodifiableList(Arrays.asList(groupSequence.value())); } } private class ForContainer<E extends AnnotatedElement> extends ReflectionBuilder.ForElement<E> implements MetadataBuilder.ForContainer<E> { ForContainer(Meta<E> meta) { super(meta); } @Override public Map<ContainerElementKey, MetadataBuilder.ForContainer<AnnotatedType>> getContainerElementTypes( Meta<E> ignored) { final AnnotatedType annotatedType = meta.getAnnotatedType(); if (annotatedType instanceof AnnotatedParameterizedType) { final AnnotatedParameterizedType container = (AnnotatedParameterizedType) annotatedType; final Map<ContainerElementKey, MetadataBuilder.ForContainer<AnnotatedType>> result = new TreeMap<>(); final AnnotatedType[] typeArgs = container.getAnnotatedActualTypeArguments(); for (int i = 0; i < typeArgs.length; i++) { final ContainerElementKey key = new ContainerElementKey(container, i); result.put(key, new ReflectionBuilder.ForContainer<>(new Meta.ForContainerElement(meta, key))); } return result; } return Collections.emptyMap(); } @Override public boolean isCascade(Meta<E> ignored) { return AnnotationsManager.isAnnotationDirectlyPresent(meta.getHost(), Valid.class); } @Override public Set<GroupConversion> getGroupConversions(Meta<E> ignored) { return Stream.of(AnnotationsManager.getDeclaredAnnotationsByType(meta.getHost(), ConvertGroup.class)) .map(cg -> GroupConversion.from(cg.from()).to(cg.to())).collect(ToUnmodifiable.set()); } } private class ForExecutable<E extends Executable> implements MetadataBuilder.ForExecutable<E> { final Meta<E> meta; final BiFunction<ParameterNameProvider, E, List<String>> getParameterNames; ForExecutable(Meta<E> meta, BiFunction<ParameterNameProvider,E, List<String>> getParameterNames) { super(); this.meta = Validate.notNull(meta, "meta"); this.getParameterNames = Validate.notNull(getParameterNames, "getParameterNames"); } @Override public List<MetadataBuilder.ForContainer<Parameter>> getParameters(Meta<E> ignored) { final Parameter[] parameters = meta.getHost().getParameters(); if (parameters.length == 0) { return Collections.emptyList(); } final List<String> parameterNames = getParameterNames.apply(validatorFactory.getParameterNameProvider(),meta.getHost()); return IntStream.range(0, parameters.length).mapToObj( n -> new ReflectionBuilder.ForContainer<>(new Meta.ForParameter(parameters[n], parameterNames.get(n)))) .collect(ToUnmodifiable.list()); } @Override public ForContainer<E> getReturnValue(Meta<E> ignored) { return new ReflectionBuilder.ForContainer<E>(meta) { @Override public Annotation[] getDeclaredConstraints(Meta<E> meta) { return getConstraints(ConstraintTarget.RETURN_VALUE); } }; } @Override public MetadataBuilder.ForElement<E> getCrossParameter(Meta<E> ignored) { return new ReflectionBuilder.ForElement<E>(meta) { @Override public Annotation[] getDeclaredConstraints(Meta<E> meta) { return getConstraints(ConstraintTarget.PARAMETERS); } }; } private Annotation[] getConstraints(ConstraintTarget constraintTarget) { return Optional.of(getConstraintsByTarget()).map(m -> m.get(constraintTarget)) .map(l -> l.toArray(new Annotation[l.size()])).orElse(ObjectUtils.EMPTY_ANNOTATION_ARRAY); } private Map<ConstraintTarget, List<Annotation>> getConstraintsByTarget() { final Annotation[] declaredConstraints = AnnotationsManager.getDeclaredConstraints(meta); if (ObjectUtils.isEmptyArray(declaredConstraints)) { return Collections.emptyMap(); } final Map<ConstraintTarget, List<Annotation>> result = new EnumMap<>(ConstraintTarget.class); for (Annotation constraint : declaredConstraints) { final Class<? extends Annotation> constraintType = constraint.annotationType(); final Optional<ConstraintTarget> explicitTarget = Optional.of(ConstraintAnnotationAttributes.VALIDATION_APPLIES_TO.analyze(constraintType)) .filter(ConstraintAnnotationAttributes.Worker::isValid) .<ConstraintTarget> map(w -> w.read(constraint)).filter(et -> et != ConstraintTarget.IMPLICIT); final ConstraintTarget target; if (explicitTarget.isPresent()) { target = explicitTarget.get(); } else { final Set<ValidationTarget> supportedTargets = validatorFactory.getAnnotationsManager().supportedTargets(constraintType); if (supportedTargets.size() == 1) { final ValidationTarget validationTarget = supportedTargets.iterator().next(); switch (validationTarget) { case PARAMETERS: target = ConstraintTarget.PARAMETERS; break; case ANNOTATED_ELEMENT: target = ConstraintTarget.RETURN_VALUE; break; default: throw Exceptions.create(IllegalStateException::new, "Unknown %s %s for %s", ValidationTarget.class.getSimpleName(), validationTarget, constraintType); } } else { target = impliedConstraintTarget(); if (target == null) { Exceptions.raise(ConstraintDeclarationException::new, "Found %d possible %s types for constraint type %s and no explicit assignment via #%s()", supportedTargets.size(), ValidationTarget.class.getSimpleName(), constraintType.getName(), ConstraintAnnotationAttributes.VALIDATION_APPLIES_TO.getAttributeName()); } } } result.computeIfAbsent(target, k -> new ArrayList<>()).add(constraint); } return result; } private ConstraintTarget impliedConstraintTarget() { if (meta.getHost().getParameterCount() == 0) { return ConstraintTarget.RETURN_VALUE; } if (Void.TYPE.equals(meta.getType())) { return ConstraintTarget.PARAMETERS; } return null; } } private final ApacheValidatorFactory validatorFactory; private final Lazy<CompositeBuilder> compositeBuilder; public ReflectionBuilder(ApacheValidatorFactory validatorFactory) { super(); this.validatorFactory = Validate.notNull(validatorFactory, "validatorFactory"); this.compositeBuilder = new Lazy<>(() -> new CompositeBuilder(this.validatorFactory, x -> AnnotationBehavior.ABSTAIN)); } public <T> MetadataBuilder.ForBean<T> forBean(Class<T> beanClass) { return new ReflectionBuilder.ForBean<>(new Meta.ForClass<T>(beanClass)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model.cloud; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import org.apache.camel.spi.Metadata; @Metadata(label = "routing,cloud,service-discovery") @XmlRootElement(name = "kubernetesServiceDiscovery") @XmlAccessorType(XmlAccessType.FIELD) public class KubernetesServiceCallServiceDiscoveryConfiguration extends ServiceCallServiceDiscoveryConfiguration { @XmlAttribute @Metadata(defaultValue = "environment", enums = "environment,dns,client") private String lookup = "environment"; @XmlAttribute @Metadata(label = "dns,dnssrv") private String dnsDomain; @XmlAttribute @Metadata(label = "dns,dnssrv") private String portName; @XmlAttribute @Metadata(label = "dns,dnssrv") private String portProtocol = "tcp"; @XmlAttribute private String namespace; @XmlAttribute private String apiVersion; @XmlAttribute @Metadata(label = "client") private String masterUrl; @XmlAttribute @Metadata(label = "client") private String username; @XmlAttribute @Metadata(label = "client") private String password; @XmlAttribute @Metadata(label = "client") private String oauthToken; @XmlAttribute @Metadata(label = "client") private String caCertData; @XmlAttribute @Metadata(label = "client") private String caCertFile; @XmlAttribute @Metadata(label = "client") private String clientCertData; @XmlAttribute @Metadata(label = "client") private String clientCertFile; @XmlAttribute @Metadata(label = "client") private String clientKeyAlgo; @XmlAttribute @Metadata(label = "client") private String clientKeyData; @XmlAttribute @Metadata(label = "client") private String clientKeyFile; @XmlAttribute @Metadata(label = "client") private String clientKeyPassphrase; @XmlAttribute @Metadata(label = "client", javaType = "java.lang.Boolean") private String trustCerts; public KubernetesServiceCallServiceDiscoveryConfiguration() { this(null); } public KubernetesServiceCallServiceDiscoveryConfiguration(ServiceCallDefinition parent) { super(parent, "kubernetes-service-discovery"); } // ************************************************************************* // Properties // ************************************************************************* public String getMasterUrl() { return masterUrl; } /** * Sets the URL to the master when using client lookup */ public void setMasterUrl(String masterUrl) { this.masterUrl = masterUrl; } public String getNamespace() { return namespace; } /** * Sets the namespace to use. Will by default use namespace from the ENV * variable KUBERNETES_MASTER. */ public void setNamespace(String namespace) { this.namespace = namespace; } public String getApiVersion() { return apiVersion; } /** * Sets the API version when using client lookup */ public void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } public String getLookup() { return lookup; } /** * How to perform service lookup. Possible values: client, dns, environment. * <p/> * When using client, then the client queries the kubernetes master to * obtain a list of active pods that provides the service, and then random * (or round robin) select a pod. * <p/> * When using dns the service name is resolved as * <tt>name.namespace.svc.dnsDomain</tt>. * <p/> * When using dnssrv the service name is resolved with SRV query for * <tt>_<port_name>._<port_proto>.<serviceName>.<namespace>.svc.<zone>.</tt>. * <p/> * When using environment then environment variables are used to lookup the * service. * <p/> * By default environment is used. */ public void setLookup(String lookup) { this.lookup = lookup; } public String getDnsDomain() { return dnsDomain; } /** * Sets the DNS domain to use for DNS lookup. */ public void setDnsDomain(String dnsDomain) { this.dnsDomain = dnsDomain; } public String getPortName() { return portName; } /** * Sets the Port Name to use for DNS/DNSSRV lookup. */ public void setPortName(String portName) { this.portName = portName; } public String getPortProtocol() { return portProtocol; } /** * Sets the Port Protocol to use for DNS/DNSSRV lookup. */ public void setPortProtocol(String portProtocol) { this.portProtocol = portProtocol; } public String getUsername() { return username; } /** * Sets the username for authentication when using client lookup */ public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } /** * Sets the password for authentication when using client lookup */ public void setPassword(String password) { this.password = password; } public String getOauthToken() { return oauthToken; } /** * Sets the OAUTH token for authentication (instead of username/password) * when using client lookup */ public void setOauthToken(String oauthToken) { this.oauthToken = oauthToken; } public String getCaCertData() { return caCertData; } /** * Sets the Certificate Authority data when using client lookup */ public void setCaCertData(String caCertData) { this.caCertData = caCertData; } public String getCaCertFile() { return caCertFile; } /** * Sets the Certificate Authority data that are loaded from the file when * using client lookup */ public void setCaCertFile(String caCertFile) { this.caCertFile = caCertFile; } public String getClientCertData() { return clientCertData; } /** * Sets the Client Certificate data when using client lookup */ public void setClientCertData(String clientCertData) { this.clientCertData = clientCertData; } public String getClientCertFile() { return clientCertFile; } /** * Sets the Client Certificate data that are loaded from the file when using * client lookup */ public void setClientCertFile(String clientCertFile) { this.clientCertFile = clientCertFile; } public String getClientKeyAlgo() { return clientKeyAlgo; } /** * Sets the Client Keystore algorithm, such as RSA when using client lookup */ public void setClientKeyAlgo(String clientKeyAlgo) { this.clientKeyAlgo = clientKeyAlgo; } public String getClientKeyData() { return clientKeyData; } /** * Sets the Client Keystore data when using client lookup */ public void setClientKeyData(String clientKeyData) { this.clientKeyData = clientKeyData; } public String getClientKeyFile() { return clientKeyFile; } /** * Sets the Client Keystore data that are loaded from the file when using * client lookup */ public void setClientKeyFile(String clientKeyFile) { this.clientKeyFile = clientKeyFile; } public String getClientKeyPassphrase() { return clientKeyPassphrase; } /** * Sets the Client Keystore passphrase when using client lookup */ public void setClientKeyPassphrase(String clientKeyPassphrase) { this.clientKeyPassphrase = clientKeyPassphrase; } public String getTrustCerts() { return trustCerts; } /** * Sets whether to turn on trust certificate check when using client lookup */ public void setTrustCerts(String trustCerts) { this.trustCerts = trustCerts; } // ************************************************************************* // Fluent API // ************************************************************************* /** * Sets the URL to the master when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration masterUrl(String masterUrl) { setMasterUrl(masterUrl); return this; } /** * Sets the namespace to use. Will by default use namespace from the ENV * variable KUBERNETES_MASTER. */ public KubernetesServiceCallServiceDiscoveryConfiguration namespace(String namespace) { setNamespace(namespace); return this; } /** * Sets the API version when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration apiVersion(String apiVersion) { setApiVersion(apiVersion); return this; } /** * How to perform service lookup, @see {@link #setLookup(String)}. */ public KubernetesServiceCallServiceDiscoveryConfiguration lookup(String lookup) { setLookup(lookup); return this; } /** * Sets the DNS domain to use for DNS/SNDSRV lookup. */ public KubernetesServiceCallServiceDiscoveryConfiguration dnsDomain(String dnsDomain) { setDnsDomain(dnsDomain); return this; } /** * Sets Port Name to use for DNS/SNDSRV lookup. */ public KubernetesServiceCallServiceDiscoveryConfiguration portName(String portName) { setPortName(portName); return this; } /** * Sets Port Protocol to use for DNS/SNDSRV lookup. */ public KubernetesServiceCallServiceDiscoveryConfiguration portProtocol(String portProtocol) { setPortProtocol(portProtocol); return this; } /** * Sets the username for authentication when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration username(String username) { setUsername(username); return this; } /** * Sets the password for authentication when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration password(String password) { setPassword(password); return this; } /** * Sets the OAUTH token for authentication (instead of username/password) * when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration oauthToken(String oauthToken) { setOauthToken(oauthToken); return this; } /** * Sets the Certificate Authority data when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration caCertData(String caCertData) { setCaCertData(caCertData); return this; } /** * Sets the Certificate Authority data that are loaded from the file when * using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration caCertFile(String caCertFile) { setCaCertFile(caCertFile); return this; } /** * Sets the Client Certificate data when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration clientCertData(String clientCertData) { setClientCertData(clientCertData); return this; } /** * Sets the Client Certificate data that are loaded from the file when using * client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration clientCertFile(String clientCertFile) { setClientCertFile(clientCertFile); return this; } /** * Sets the Client Keystore algorithm, such as RSA when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration clientKeyAlgo(String clientKeyAlgo) { setClientKeyAlgo(clientKeyAlgo); return this; } /** * Sets the Client Keystore data when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration clientKeyData(String clientKeyData) { setClientKeyData(clientKeyData); return this; } /** * Sets the Client Keystore data that are loaded from the file when using * client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration clientKeyFile(String clientKeyFile) { setClientKeyFile(clientKeyFile); return this; } /** * Sets the Client Keystore passphrase when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration clientKeyPassphrase(String clientKeyPassphrase) { setClientKeyPassphrase(clientKeyPassphrase); return this; } /** * Sets whether to turn on trust certificate check when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration trustCerts(boolean trustCerts) { return trustCerts(Boolean.toString(trustCerts)); } /** * Sets whether to turn on trust certificate check when using client lookup */ public KubernetesServiceCallServiceDiscoveryConfiguration trustCerts(String trustCerts) { setTrustCerts(trustCerts); return this; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.diff; import com.intellij.diff.contents.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.testFramework.BinaryLightVirtualFile; import com.intellij.util.LineSeparator; import com.intellij.util.PathUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.datatransfer.DataFlavor; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; public class DiffContentFactoryImpl extends DiffContentFactory { public final Logger LOG = Logger.getInstance(DiffContentFactoryImpl.class); @NotNull public static DiffContentFactoryImpl getInstanceImpl() { return (DiffContentFactoryImpl)DiffContentFactory.getInstance(); } @Override @NotNull public EmptyContent createEmpty() { return new EmptyContent(); } @Override @NotNull public DocumentContent create(@NotNull String text) { return create(text, (FileType)null); } @Override @NotNull public DocumentContent create(@NotNull String text, @Nullable FileType type) { return create(text, type, true); } @Override @NotNull public DocumentContent create(@NotNull String text, @Nullable FileType type, boolean respectLineSeparators) { return createImpl(text, type, null, null, respectLineSeparators, true); } @NotNull public DocumentContent create(@NotNull String text, @Nullable VirtualFile highlightFile) { return createImpl(text, highlightFile != null ? highlightFile.getFileType() : null, highlightFile, null, true, true); } @Override @NotNull public DocumentContent create(@Nullable Project project, @NotNull Document document) { return create(project, document, (FileType)null); } @Override @NotNull public DocumentContent create(@Nullable Project project, @NotNull Document document, @Nullable FileType fileType) { VirtualFile file = FileDocumentManager.getInstance().getFile(document); if (file == null) return new DocumentContentImpl(document, fileType, null, null, null); return create(project, document, file); } @Override @NotNull public DocumentContent create(@Nullable Project project, @NotNull Document document, @Nullable VirtualFile file) { if (file != null) return new FileDocumentContentImpl(project, document, file); return new DocumentContentImpl(document); } @Override @NotNull public DiffContent create(@Nullable Project project, @NotNull VirtualFile file) { if (file.isDirectory()) return new DirectoryContentImpl(project, file); DocumentContent content = createDocument(project, file); if (content != null) return content; return new FileContentImpl(project, file); } @Override @Nullable public DocumentContent createDocument(@Nullable Project project, @NotNull final VirtualFile file) { // TODO: add notification, that file is decompiled ? if (file.isDirectory()) return null; Document document = ApplicationManager.getApplication().runReadAction(new Computable<Document>() { @Override public Document compute() { return FileDocumentManager.getInstance().getDocument(file); } }); if (document == null) return null; return new FileDocumentContentImpl(project, document, file); } @Override @Nullable public FileContent createFile(@Nullable Project project, @NotNull VirtualFile file) { if (file.isDirectory()) return null; return (FileContent)create(project, file); } @Override @NotNull public DiffContent createClipboardContent() { return createClipboardContent(null); } @Override @NotNull public DocumentContent createClipboardContent(@Nullable DocumentContent mainContent) { String text = CopyPasteManager.getInstance().getContents(DataFlavor.stringFlavor); FileType type = mainContent != null ? mainContent.getContentType() : null; VirtualFile highlightFile = mainContent != null ? mainContent.getHighlightFile() : null; return createImpl(StringUtil.notNullize(text), type, highlightFile, null, true, false); } @NotNull private static DocumentContent createImpl(@NotNull String text, @Nullable FileType type, @Nullable VirtualFile highlightFile, @Nullable Charset charset, boolean respectLineSeparators, boolean readOnly) { // TODO: detect invalid (different across the file) separators ? LineSeparator separator = respectLineSeparators ? StringUtil.detectSeparators(text) : null; Document document = EditorFactory.getInstance().createDocument(StringUtil.convertLineSeparators(text)); if (readOnly) document.setReadOnly(true); return new DocumentContentImpl(document, type, highlightFile, separator, charset); } @NotNull public DiffContent createFromBytes(@Nullable Project project, @NotNull FilePath filePath, @NotNull byte[] content) throws IOException { if (filePath.getFileType().isBinary()) { return DiffContentFactory.getInstance().createBinary(project, filePath.getName(), filePath.getFileType(), content); } return FileAwareDocumentContent.create(project, content, filePath); } @Override @NotNull public DiffContent createFromBytes(@Nullable Project project, @NotNull VirtualFile highlightFile, @NotNull byte[] content) throws IOException { // TODO: check if FileType.UNKNOWN is actually a text ? if (highlightFile.getFileType().isBinary()) { return DiffContentFactory.getInstance().createBinary(project, highlightFile.getName(), highlightFile.getFileType(), content); } return FileAwareDocumentContent.create(project, content, highlightFile); } @Override @NotNull public DiffContent createBinary(@Nullable Project project, @NotNull String name, @NotNull FileType type, @NotNull byte[] content) throws IOException { boolean useTemporalFile = true; // TODO: workaround for Decompiler //boolean useTemporalFile = type instanceof ArchiveFileType; // workaround - our JarFileSystem can't process non-local files VirtualFile file; if (useTemporalFile) { if (type.getDefaultExtension().isEmpty()) { file = createTemporalFile(project, "tmp_", "_" + name, content); } else { file = createTemporalFile(project, name + "_", "." + type.getDefaultExtension(), content); } } else { file = new BinaryLightVirtualFile(name, type, content); } return create(project, file); } @NotNull public static VirtualFile createTemporalFile(@Nullable Project project, @NotNull String prefix, @NotNull String suffix, @NotNull byte[] content) throws IOException { File tempFile = FileUtil.createTempFile(PathUtil.suggestFileName(prefix + "_", true, false), PathUtil.suggestFileName("_" + suffix, true, false), true); if (content.length != 0) { FileUtil.writeToFile(tempFile, content); } VirtualFile file = VfsUtil.findFileByIoFile(tempFile, true); if (file == null) { throw new IOException("Can't create temp file for revision content"); } VfsUtil.markDirtyAndRefresh(true, true, true, file); return file; } }
/* * Copyright (c) 2002-2021, City of Paris * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright notice * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice * and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * License 1.0 */ package fr.paris.lutece.portal.service.cache; import fr.paris.lutece.portal.service.page.PageEvent; import fr.paris.lutece.portal.service.page.PageEventListener; import fr.paris.lutece.portal.service.page.PageService; import net.sf.ehcache.Cache; import net.sf.ehcache.CacheException; import net.sf.ehcache.CacheManager; import net.sf.ehcache.Ehcache; import net.sf.ehcache.constructs.blocking.BlockingCache; import net.sf.ehcache.constructs.blocking.LockTimeoutException; import net.sf.ehcache.constructs.web.AlreadyCommittedException; import net.sf.ehcache.constructs.web.AlreadyGzippedException; import net.sf.ehcache.constructs.web.filter.FilterNonReentrantException; import net.sf.ehcache.constructs.web.filter.SimpleCachingHeadersPageCachingFilter; import org.apache.log4j.Logger; import java.util.List; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * Headers Page Caching Filter based on EHCACHE WEB */ public class HeadersPageCachingFilter extends SimpleCachingHeadersPageCachingFilter implements CacheableService, PageEventListener { private static final String BLOCKING_TIMEOUT_MILLIS = "blockingTimeoutMillis"; private static final String INIT_PARAM_CACHE_NAME = "cacheName"; private Cache _cache; private Logger _logger = Logger.getLogger( "lutece.cache" ); private boolean _bInit; private boolean _bEnable = true; private String _strCacheName; /** * {@inheritDoc } */ @Override public void doInit( FilterConfig filterConfig ) { // Override to inhibate the startup call made too early // The original treatment is done at the first doFilter call // through the init method below } /** * Initialization of the filter */ protected void init( ) { // Execute the doInit synchronized( HeadersPageCachingFilter.class ) { if ( blockingCache == null ) { _strCacheName = filterConfig.getInitParameter( INIT_PARAM_CACHE_NAME ); CacheService.getInstance( ).createCache( _strCacheName ); _cache = CacheManager.getInstance( ).getCache( _strCacheName ); CacheService.registerCacheableService( this ); _logger.debug( "Initializing cache : " + _strCacheName ); setCacheNameIfAnyConfigured( filterConfig ); final String localCacheName = getCacheName( ); Ehcache cache = getCacheManager( ).getEhcache( localCacheName ); if ( cache == null ) { throw new CacheException( "cache '" + localCacheName + "' not found in configuration" ); } if ( !( cache instanceof BlockingCache ) ) { // decorate and substitute BlockingCache newBlockingCache = new BlockingCache( cache ); getCacheManager( ).replaceCacheWithDecoratedCache( cache, newBlockingCache ); } blockingCache = (BlockingCache) getCacheManager( ).getEhcache( localCacheName ); Integer blockingTimeoutMillis = parseBlockingCacheTimeoutMillis( filterConfig ); if ( ( blockingTimeoutMillis != null ) && ( blockingTimeoutMillis > 0 ) ) { blockingCache.setTimeoutMillis( blockingTimeoutMillis ); } } PageService.addPageEventListener( this ); } _bInit = true; } /** * Returns a boolean describing whether the filter has been initialized * * @return True if the filter is initialized */ protected boolean getInit( ) { return _bInit; } /** * Reads the filterConfig for the parameter "blockingTimeoutMillis", and if found, set the blocking timeout. If there is a parsing exception, no timeout is * set. * * @param filterConfig * The filter config * @return The timeout value */ private Integer parseBlockingCacheTimeoutMillis( FilterConfig filterConfig ) { String timeout = filterConfig.getInitParameter( BLOCKING_TIMEOUT_MILLIS ); try { return Integer.parseInt( timeout ); } catch( NumberFormatException e ) { return null; } } /** * {@inheritDoc } This method is overriden to provide the cache name */ @Override protected String getCacheName( ) { return _strCacheName; } /** * {@inheritDoc } */ @Override protected void doFilter( HttpServletRequest request, HttpServletResponse response, FilterChain chain ) throws AlreadyGzippedException, AlreadyCommittedException, FilterNonReentrantException, LockTimeoutException, Exception { if ( !_bInit ) { init( ); } if ( _bEnable ) { super.doFilter( request, response, chain ); _logger.debug( "URI served from cache : " + request.getRequestURI( ) ); } else { chain.doFilter( request, response ); } } // Cacheable Service implementation /** * {@inheritDoc } */ @Override public boolean isCacheEnable( ) { return _bEnable; } /** * {@inheritDoc } */ @Override public int getCacheSize( ) { return _cache.getSize( ); } /** * {@inheritDoc } */ @Override public void resetCache( ) { _cache.removeAll( ); } /** * {@inheritDoc } */ @Override public String getName( ) { return _strCacheName; } /** * {@inheritDoc } */ @Override public void enableCache( boolean bEnable ) { _bEnable = bEnable; if ( ( !_bEnable ) && ( _cache != null ) ) { _cache.removeAll( ); } CacheService.updateCacheStatus( this ); } /** * {@inheritDoc } */ @Override public List<String> getKeys( ) { return _cache.getKeys( ); } /** * {@inheritDoc } */ @Override public int getMaxElements( ) { return _cache.getCacheConfiguration( ).getMaxElementsInMemory( ); } /** * {@inheritDoc } */ @Override public long getTimeToLive( ) { return _cache.getCacheConfiguration( ).getTimeToLiveSeconds( ); } /** * {@inheritDoc } */ @Override public long getMemorySize( ) { return _cache.calculateInMemorySize( ); } /** * {@inheritDoc } */ @Override public String getInfos( ) { return CacheService.getInfos( _cache ); } /** * {@inheritDoc } */ @Override public void processPageEvent( PageEvent event ) { String strPattern = "page_id=" + event.getPage( ).getId( ); for ( String strKey : (List<String>) blockingCache.getKeys( ) ) { if ( strKey.contains( strPattern ) && ( event.getEventType( ) != PageEvent.PAGE_CREATED ) ) { blockingCache.remove( strKey ); } } } }
package org.deidentifier.arx.benchmark; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.deidentifier.arx.ARXAnonymizer; import org.deidentifier.arx.ARXConfiguration; import org.deidentifier.arx.ARXPopulationModel; import org.deidentifier.arx.ARXResult; import org.deidentifier.arx.ARXSolverConfiguration; import org.deidentifier.arx.AttributeType.Hierarchy; import org.deidentifier.arx.Data; import org.deidentifier.arx.criteria.AverageReidentificationRisk; import org.deidentifier.arx.criteria.KAnonymity; import org.deidentifier.arx.criteria.PopulationUniqueness; import org.deidentifier.arx.io.CSVHierarchyInput; import org.deidentifier.arx.metric.Metric; import org.deidentifier.arx.metric.Metric.AggregateFunction; import org.deidentifier.arx.risk.RiskModelPopulationUniqueness.PopulationUniquenessModel; import de.linearbits.subframe.Benchmark; import de.linearbits.subframe.analyzer.ValueBuffer; /* * ARX: Powerful Data Anonymization * Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Test for data transformations. * * @author Fabian Prasser * @author Florian Kohlmayer */ public abstract class BenchmarkExperimentScalability { /** The benchmark instance */ private static final Benchmark BENCHMARK = new Benchmark(new String[] { "Dataset", "Rows", "Columns"}); /** TOTAL */ public static final int TIME_UNIQUENESS = BENCHMARK.addMeasure("time-(0.01)-uniqueness"); /** TOTAL */ public static final int TIME_STRICT_AVERAGE = BENCHMARK.addMeasure("time-(3,5)-strict-average-risk"); /** TOTAL */ public static final int TIME_ANONYMITY = BENCHMARK.addMeasure("time-(5)-anonymity"); /** TOTAL */ public static final int UTILITY_UNIQUENESS = BENCHMARK.addMeasure("utility-(0.01)-uniqueness"); /** TOTAL */ public static final int UTILITY_STRICT_AVERAGE = BENCHMARK.addMeasure("utility-(3,5)-strict-average-risk"); /** TOTAL */ public static final int UTILITY_ANONYMITY = BENCHMARK.addMeasure("utility-(5)-anonymity"); /** VALUE */ private static final double[][] SOLVER_START_VALUES = getSolverStartValues(); /** VALUE */ private static final double POPULATION_USA = 318.9 * Math.pow(10d, 6d); /** VALUE */ private static final int REPETITIONS = 5; /** START_INDEX */ private static int START_INDEX = 0; public static void main(String[] args) throws IOException { // Parse commandline if (args != null && args.length != 0) { int index = -1; try { index = Integer.parseInt(args[0]); } catch (Exception e) { index = -1; } if (index != -1) { START_INDEX = index; } else { START_INDEX = 0; } } // Init BENCHMARK.addAnalyzer(TIME_UNIQUENESS, new ValueBuffer()); BENCHMARK.addAnalyzer(TIME_STRICT_AVERAGE, new ValueBuffer()); BENCHMARK.addAnalyzer(TIME_ANONYMITY, new ValueBuffer()); BENCHMARK.addAnalyzer(UTILITY_UNIQUENESS, new ValueBuffer()); BENCHMARK.addAnalyzer(UTILITY_STRICT_AVERAGE, new ValueBuffer()); BENCHMARK.addAnalyzer(UTILITY_ANONYMITY, new ValueBuffer()); // Perform String[] datasets = new String[] { "adult", "cup", "fars", "atus", "ihis" }; for (int i = START_INDEX; i < datasets.length; i++) { analyze(datasets[i]); } } private static void analyze(String dataset) throws IOException { Data data = getDataObject(dataset); int allColumns = data.getHandle().getNumColumns(); int allRows = data.getHandle().getNumRows(); System.out.println("Running: " + dataset + " - column scaling benchmark"); // Foreach set of columns for (int columns = 3; columns <= allColumns; columns++) { System.out.println(" - Columns: " + columns + "/" + allColumns); // Run & Store BENCHMARK.addRun(dataset, allRows, columns); analyze(dataset, allRows, columns); BENCHMARK.getResults().write(new File("results/scalability.csv")); } System.out.println("Running: " + dataset + " - row scaling benchmark"); // Foreach set of rows int offset = allRows / 10; for (int index = 1; index <= 10; index++) { System.out.println(" - Step: " + index + "/" + 10); // Compute rows int rows = index * offset; if (index == 10) rows = allRows; // Run & Store BENCHMARK.addRun(dataset, rows, allColumns); analyze(dataset, rows, allColumns); BENCHMARK.getResults().write(new File("results/scalability.csv")); } } private static void analyze(String dataset, int rows, int columns) throws IOException { Data data = getDataObject(dataset, rows, columns); // Uniqueness ARXConfiguration config = ARXConfiguration.create(); config.setMetric(Metric.createPrecomputedLossMetric(1.0d, 0.5d, AggregateFunction.GEOMETRIC_MEAN)); config.setMaxOutliers(1d); config.addCriterion(new PopulationUniqueness(0.01d, PopulationUniquenessModel.PITMAN, new ARXPopulationModel(data.getHandle(), POPULATION_USA), ARXSolverConfiguration.create().preparedStartValues(SOLVER_START_VALUES) .iterationsPerTry(15))); ARXAnonymizer anonymizer = new ARXAnonymizer(); // Warmup ARXResult result = anonymizer.anonymize(data, config); double utility = 1d - Double.valueOf(result.getGlobalOptimum().getMaximumInformationLoss().toString()); data.getHandle().release(); long time = System.currentTimeMillis(); for (int i=0; i<REPETITIONS; i++) { anonymizer.anonymize(data, config); data.getHandle().release(); } double timeUniqueness = (double)(System.currentTimeMillis() - time) / (double)REPETITIONS; double utilityUniqueness = utility; // Strict average config = ARXConfiguration.create(); config.setMetric(Metric.createPrecomputedLossMetric(1.0d, 0.5d, AggregateFunction.GEOMETRIC_MEAN)); config.setMaxOutliers(1d); config.addCriterion(new KAnonymity(3)); config.addCriterion(new AverageReidentificationRisk(0.2d)); // Warmup result = anonymizer.anonymize(data, config); utility = 1d - Double.valueOf(result.getGlobalOptimum().getMaximumInformationLoss().toString()); data.getHandle().release(); time = System.currentTimeMillis(); for (int i=0; i<REPETITIONS; i++) { anonymizer.anonymize(data, config); data.getHandle().release(); } double timeStrictAverage = (double)(System.currentTimeMillis() - time) / (double)REPETITIONS; double utilityStrictAverage = utility; // K-anonymity config = ARXConfiguration.create(); config.setMetric(Metric.createPrecomputedLossMetric(1.0d, 0.5d, AggregateFunction.GEOMETRIC_MEAN)); config.setMaxOutliers(1d); config.addCriterion(new KAnonymity(5)); // Warmup result = anonymizer.anonymize(data, config); utility = 1d - Double.valueOf(result.getGlobalOptimum().getMaximumInformationLoss().toString()); data.getHandle().release(); time = System.currentTimeMillis(); for (int i=0; i<REPETITIONS; i++) { anonymizer.anonymize(data, config); data.getHandle().release(); } double timeAnonymity = (double)(System.currentTimeMillis() - time) / (double)REPETITIONS; double utilityAnonymity = utility; BENCHMARK.addValue(TIME_UNIQUENESS, timeUniqueness); BENCHMARK.addValue(TIME_STRICT_AVERAGE, timeStrictAverage); BENCHMARK.addValue(TIME_ANONYMITY, timeAnonymity); BENCHMARK.addValue(UTILITY_UNIQUENESS, utilityUniqueness); BENCHMARK.addValue(UTILITY_STRICT_AVERAGE, utilityStrictAverage); BENCHMARK.addValue(UTILITY_ANONYMITY, utilityAnonymity); } /** * Returns the data object for the test case. * * @param dataset * @return * @throws IOException */ private static Data getDataObject(final String dataset) throws IOException { // Load dataset final Data data = Data.create("./data/"+dataset+".csv", ';'); // Load hierarchies prepareDataObject(dataset, data, Integer.MAX_VALUE); return data; } /** * Returns the data object for the test case. * * @param dataset * @param rows * @param columns * @return * @throws IOException */ private static Data getDataObject(final String dataset, int rows, int columns) throws IOException { // Load dataset Data data = Data.create("./data/"+dataset+".csv", ';'); // Select rows Iterator<String[]> iter = data.getHandle().iterator(); List<String[]> selection = new ArrayList<String[]>(); // Add header selection.add(iter.next()); // Add payload for (int i=0; i<rows; i++) { String[] row = iter.next(); selection.add(row); } // Create data object data = Data.create(selection); // Load hierarchies and project prepareDataObject(dataset, data, columns); return data; } /** * Creates start values for the solver * @return */ private static double[][] getSolverStartValues() { double[][] result = new double[100][]; int index = 0; for (double d1 = 10d; d1 <=100d; d1 += 10d) { for (double d2 = 1000000d; d2 <= 10000000d; d2 += 1000000d) { result[index++] = new double[] { d1, d2 }; } } return result; } /** * Loads hierarchies * @param dataset * @param data * @param columns * @return * @throws IOException */ private static void prepareDataObject(final String dataset, final Data data, int columns) throws IOException { // Read generalization hierachies final FilenameFilter hierarchyFilter = new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { if (name.matches(dataset+"_hierarchy_(.)+.csv")) { return true; } else { return false; } } }; final File testDir = new File("./hierarchies"); final File[] genHierFiles = testDir.listFiles(hierarchyFilter); final Pattern pattern = Pattern.compile("_hierarchy_(.*?).csv"); for (final File file : genHierFiles) { final Matcher matcher = pattern.matcher(file.getName()); if (matcher.find()) { final CSVHierarchyInput hier = new CSVHierarchyInput(file, ';'); final String attributeName = matcher.group(1); if (data.getHandle().getColumnIndexOf(attributeName) < columns) { data.getDefinition().setAttributeType(attributeName, Hierarchy.create(hier.getHierarchy())); } } } } }
package net.tomp2p.relay; import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.Unpooled; import net.tomp2p.connection.*; import net.tomp2p.futures.BaseFutureAdapter; import net.tomp2p.futures.FutureChannelCreator; import net.tomp2p.futures.FutureResponse; import net.tomp2p.message.Buffer; import net.tomp2p.message.Decoder; import net.tomp2p.message.Encoder; import net.tomp2p.message.Message; import net.tomp2p.p2p.Peer; import net.tomp2p.peers.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.SignatureException; import java.security.spec.InvalidKeySpecException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; //import net.tomp2p.storage.AlternativeCompositeByteBuf; public class RelayUtils { private static final Logger LOG = LoggerFactory.getLogger(RelayUtils.class); private static Charset charset = Charset.forName("UTF-8"); private static CharsetEncoder encoder = charset.newEncoder(); private static CharsetDecoder decoder = charset.newDecoder(); private RelayUtils() { // only static methods } public static List<Map<Number160, PeerStatistic>> unflatten(Collection<PeerAddress> map, PeerAddress sender) { PeerMapConfiguration peerMapConfiguration = new PeerMapConfiguration(sender.peerId()); PeerMap peerMap = new PeerMap(peerMapConfiguration); for (PeerAddress peerAddress : map) { LOG.debug("found peer in unflatten for relaying, {}", peerAddress); peerMap.peerFound(peerAddress, null, null, null); } return peerMap.peerMapVerified(); } public static Collection<PeerAddress> flatten(List<Map<Number160, PeerStatistic>> maps) { Collection<PeerAddress> result = new ArrayList<PeerAddress>(); for (Map<Number160, PeerStatistic> map : maps) { for (PeerStatistic peerStatistic : map.values()) { result.add(peerStatistic.peerAddress()); } } return result; } /** * Composes all messages of a list into a single buffer object, ready to be transmitted over the network. * The composing happens in-order. Alternatively, the message size and then the message is written to the * buffer. Use {@link MessageBuffer#decomposeCompositeBuffer(ByteBuf)} to disassemble. * * @param messages the messages to compose * @param signatureFactory the signature factory, necessary for encoding the messages * @return a single buffer holding all messages of the list */ public static ByteBuf composeMessageBuffer(List<Message> messages, SignatureFactory signatureFactory) { ByteBuf buffer = Unpooled.buffer(); for (Message msg : messages) { try { msg.restoreContentReferences(); msg.restoreBuffers(); Buffer encoded = encodeMessage(msg, signatureFactory); buffer.writeInt(encoded.length()); buffer.writeBytes(encoded.buffer()); } catch (Exception e) { LOG.error("Cannot encode the buffered message. Skip it.", e); } } return buffer; } /** * Decomposes a buffer containing multiple buffers into an (ordered) list of small buffers. Alternating, * the size of the message and the message itself are encoded in the message buffer. First, the size is * read, then k bytes are read from the buffer (the message). Then again, the size of the next messages is * determined. * * @param messageBuffer the message buffer * @return a list of buffers */ public static List<Message> decomposeCompositeBuffer(ByteBuf messageBuffer, InetSocketAddress recipient, InetSocketAddress sender, SignatureFactory signatureFactory) { List<Message> messages = new ArrayList<Message>(); while (messageBuffer.readableBytes() > 0) { int size = messageBuffer.readInt(); ByteBuf message = messageBuffer.readBytes(size); try { Message decodedMessage = decodeMessage(message, recipient, sender, signatureFactory); messages.add(decodedMessage); } catch (Exception e) { LOG.error("Cannot decode buffered message. Skip it.", e); } } return messages; } /** * Encodes a message into a buffer, such that it can be used as a message payload (piggybacked), stored, etc. */ public static Buffer encodeMessage(Message message, SignatureFactory signatureFactory) throws InvalidKeyException, SignatureException, IOException { Encoder e = new Encoder(signatureFactory); CompositeByteBuf buf = Unpooled.compositeBuffer(); e.write(buf, message, message.receivedSignature()); System.err.println("got: "+buf); return new Buffer(buf); } /** * Decodes a message which was encoded using {{@link #encodeMessage(Message, SignatureFactory)}}. */ public static Message decodeMessage(ByteBuf buf, InetSocketAddress recipient, InetSocketAddress sender, SignatureFactory signatureFactory) throws InvalidKeyException, NoSuchAlgorithmException, InvalidKeySpecException, SignatureException, IOException { Decoder d = new Decoder(signatureFactory); final int readerBefore = buf.readerIndex(); d.decodeHeader(buf, recipient, sender); final boolean donePayload = d.decodePayload(buf); d.decodeSignature(buf, readerBefore, donePayload); return d.message(); } /** * Basically does the same as * {@link MessageUtils#decodeMessage(Buffer, InetSocketAddress, InetSocketAddress, SignatureFactory)}, but * in addition checks that the relay peers of the decoded message are set correctly */ public static Message decodeRelayedMessage(ByteBuf buf, InetSocketAddress recipient, InetSocketAddress sender, SignatureFactory signatureFactory) throws InvalidKeyException, NoSuchAlgorithmException, InvalidKeySpecException, SignatureException, IOException { final Message decodedMessage = decodeMessage(buf, recipient, sender, signatureFactory); return decodedMessage; } /** * Calculates the size of the message */ public static int getMessageSize(Message message, SignatureFactory signatureFactory) throws InvalidKeyException, SignatureException, IOException { // TODO instead of real encoding, calculate it using the content references int size = encodeMessage(message, signatureFactory).length(); message.restoreContentReferences(); message.restoreBuffers(); return size; } /** * Encodes any String into a buffer to send it with a message * * @param content the String to encode into a buffer * @return a buffer containing the (encoded) String. */ public static Buffer encodeString(String content) { if (content == null) { return null; } ByteBuffer byteBuffer; synchronized (encoder) { encoder.reset(); try { byteBuffer = encoder.encode(CharBuffer.wrap(content)); } catch (CharacterCodingException e) { return null; } encoder.flush(byteBuffer); } ByteBuf wrappedBuffer = Unpooled.wrappedBuffer(byteBuffer); return new Buffer(wrappedBuffer); } /** * Decodes buffer containing a String * * @param buffer the buffer received in a message * @return the encoded String */ public static String decodeString(Buffer buffer) { if (buffer == null || buffer.buffer() == null) { return null; } ByteBuffer nioBuffer = buffer.buffer().nioBuffer(); synchronized (decoder) { decoder.reset(); CharBuffer decoded; try { decoded = decoder.decode(nioBuffer); } catch (CharacterCodingException e) { return null; } decoder.flush(decoded); return decoded.toString(); } } /** * Send a Message from one Peer to another Peer internally. This avoids the * overhead of sendDirect. */ private static void send(final PeerConnection peerConnection, PeerBean peerBean, ConnectionBean connectionBean, final FutureResponse futureResponse) { final RequestHandler requestHandler = new RequestHandler(futureResponse, peerBean, connectionBean, connectionBean.channelServer().channelServerConfiguration()); //TOOD: enable: //final FutureChannelCreator fcc = peerConnection.acquire(futureResponse); //fcc.addListener(new BaseFutureAdapter<FutureChannelCreator>() { // @Override // public void operationComplete(FutureChannelCreator future) throws Exception { // if (future.isSuccess()) { // requestHandler.sendTCP(peerConnection.channelCreator(), peerConnection); // } else { // futureResponse.failed(future); // } // } //}); } /** * Send a Message from one Peer to another Peer internally. This avoids the * overhead of sendDirect. This Method is used for relaying and reverse * Connection setup. * @return the response */ public static FutureResponse send(final PeerConnection peerConnection, PeerBean peerBean, ConnectionBean connectionBean, Message message) { final FutureResponse futureResponse = new FutureResponse(message); send(peerConnection, peerBean, connectionBean, futureResponse); return futureResponse; } /** * Opens a new peer connection to the receiver and sends the message through it. * @param peer * @param message * @return */ public static FutureResponse connectAndSend(final Peer peer, final Message message) { final FutureResponse futureResponse = new FutureResponse(message); final RequestHandler requestHandler = new RequestHandler(futureResponse, peer.peerBean(), peer.connectionBean(), peer.connectionBean().channelServer().channelServerConfiguration()); final FutureChannelCreator fpc = peer.connectionBean().reservation().create(0, 1); fpc.addListener(new BaseFutureAdapter<FutureChannelCreator>() { public void operationComplete(final FutureChannelCreator futureChannelCreator) throws Exception { if (futureChannelCreator.isSuccess()) { requestHandler.sendTCP(fpc.channelCreator()); } else { futureResponse.failed(fpc); } } }); return futureResponse; } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.rules.android.ndkcrosstools; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.bazel.rules.android.ndkcrosstools.r10e.NdkMajorRevisionR10; import com.google.devtools.build.lib.bazel.rules.android.ndkcrosstools.r12.NdkMajorRevisionR12; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.util.ResourceFileLoader; import com.google.devtools.build.lib.view.config.crosstool.CrosstoolConfig.CToolchain; import com.google.devtools.build.lib.view.config.crosstool.CrosstoolConfig.CrosstoolRelease; import com.google.devtools.build.lib.view.config.crosstool.CrosstoolConfig.DefaultCpuToolchain; import com.google.devtools.build.lib.view.config.crosstool.CrosstoolConfig.ToolPath; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.Map.Entry; import java.util.Set; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; /** Tests for {@link AndroidNdkCrosstools}. */ @RunWith(Parameterized.class) public class AndroidNdkCrosstoolsTest { private static final String HOST_PLATFORM = "linux-x86_64"; private static final String REPOSITORY_NAME = "testrepository"; private static class AndroidNdkCrosstoolsTestParams { private final ApiLevel apiLevel; private final NdkRelease ndkRelease; // ndkfiles.txt contains a list of every file in the ndk, created using this command at the // root of the Android NDK for version r10e (64-bit): // find . -xtype f | sed 's|^\./||' | sort // and similarly for ndkdirectories, except "-xtype d" is used. // // It's unfortunate to have files like these, since they're large and brittle, but since the // whole NDK can't be checked in to test against, it's about the most that can be done right // now. private final String ndkFilesFilename; private final String ndkDirectoriesFilename; AndroidNdkCrosstoolsTestParams( ApiLevel apiLevel, NdkRelease ndkRelease, String ndkFilesFilename, String ndkDirectoriesFilename) { this.apiLevel = apiLevel; this.ndkRelease = ndkRelease; this.ndkFilesFilename = ndkFilesFilename; this.ndkDirectoriesFilename = ndkDirectoriesFilename; } NdkMajorRevision getNdkMajorRevision() { return AndroidNdkCrosstools.KNOWN_NDK_MAJOR_REVISIONS.get(ndkRelease.majorRevision); } ImmutableSet<String> getNdkFiles() throws IOException { String ndkFilesFileContent = ResourceFileLoader.loadResource(AndroidNdkCrosstoolsTest.class, ndkFilesFilename); ImmutableSet.Builder<String> ndkFiles = ImmutableSet.builder(); for (String line : ndkFilesFileContent.split("\n")) { // The contents of the NDK are placed at "external/%repositoryName%/ndk". // The "external/%repositoryName%" part is removed using NdkPaths.stripRepositoryPrefix, // but to make it easier the "ndk/" part is added here. ndkFiles.add("ndk/" + line); } return ndkFiles.build(); } ImmutableSet<String> getNdkDirectories() throws IOException { String ndkFilesFileContent = ResourceFileLoader.loadResource(AndroidNdkCrosstoolsTest.class, ndkDirectoriesFilename); ImmutableSet.Builder<String> ndkDirectories = ImmutableSet.builder(); for (String line : ndkFilesFileContent.split("\n")) { ndkDirectories.add("ndk/" + line); } return ndkDirectories.build(); } } @Parameters public static Collection<AndroidNdkCrosstoolsTestParams[]> data() { return ImmutableList.of( new AndroidNdkCrosstoolsTestParams[] { new AndroidNdkCrosstoolsTestParams( new NdkMajorRevisionR10() .apiLevel(NullEventHandler.INSTANCE, REPOSITORY_NAME, "21"), NdkRelease.create("r10e (64-bit)"), "ndkfiles.txt", "ndkdirectories.txt" ) }, new AndroidNdkCrosstoolsTestParams[] { new AndroidNdkCrosstoolsTestParams( new NdkMajorRevisionR12() .apiLevel(NullEventHandler.INSTANCE, REPOSITORY_NAME, "21"), NdkRelease.create("Pkg.Desc = Android NDK\nPkg.Revision = 12.1.297705\n"), "ndk12bfiles.txt", "ndk12bdirectories.txt" ) }); } private final ImmutableSet<String> ndkFiles; private final ImmutableSet<String> ndkDirectories; private final ImmutableList<CrosstoolRelease> crosstoolReleases; private final ImmutableMap<String, String> stlFilegroups; public AndroidNdkCrosstoolsTest(AndroidNdkCrosstoolsTestParams params) throws IOException { // NDK test data is based on the x86 64-bit Linux Android NDK. NdkPaths ndkPaths = new NdkPaths(REPOSITORY_NAME, HOST_PLATFORM, params.apiLevel); ImmutableList.Builder<CrosstoolRelease> crosstools = ImmutableList.builder(); ImmutableMap.Builder<String, String> stlFilegroupsBuilder = ImmutableMap.builder(); for (StlImpl ndkStlImpl : StlImpls.get(ndkPaths)) { // Protos are immutable, so this can be shared between tests. CrosstoolRelease crosstool = params.getNdkMajorRevision().crosstoolRelease(ndkPaths, ndkStlImpl, HOST_PLATFORM); crosstools.add(crosstool); stlFilegroupsBuilder.putAll(ndkStlImpl.getFilegroupNamesAndFilegroupFileGlobPatterns()); } crosstoolReleases = crosstools.build(); stlFilegroups = stlFilegroupsBuilder.build(); ndkFiles = params.getNdkFiles(); ndkDirectories = params.getNdkDirectories(); } @Test public void testPathsExist() throws Exception { for (CrosstoolRelease crosstool : crosstoolReleases) { for (CToolchain toolchain : crosstool.getToolchainList()) { // Test that all tool paths exist. for (ToolPath toolpath : toolchain.getToolPathList()) { assertThat(ndkFiles).contains(toolpath.getPath()); } // Test that all cxx_builtin_include_directory paths exist. for (String includeDirectory : toolchain.getCxxBuiltinIncludeDirectoryList()) { // Special case for builtin_sysroot. if (!includeDirectory.equals("%sysroot%/usr/include")) { String path = NdkPaths.stripRepositoryPrefix(includeDirectory); assertThat(ndkDirectories).contains(path); } } // Test that the builtin_sysroot path exists. { String builtinSysroot = NdkPaths.stripRepositoryPrefix(toolchain.getBuiltinSysroot()); assertThat(ndkDirectories).contains(builtinSysroot); } // Test that all include directories added through unfiltered_cxx_flag exist. for (String flag : toolchain.getUnfilteredCxxFlagList()) { if (!flag.equals("-isystem")) { flag = NdkPaths.stripRepositoryPrefix(flag); assertThat(ndkDirectories).contains(flag); } } } } } @Test public void testStlFilegroupPathsExist() throws Exception { for (String fileglob : stlFilegroups.values()) { String fileglobNoWildcard = fileglob.substring(0, fileglob.lastIndexOf('/')); assertThat(ndkDirectories).contains(fileglobNoWildcard); assertThat(findFileByPattern(fileglob)).isTrue(); } } private boolean findFileByPattern(String globPattern) { String start = globPattern.substring(0, globPattern.indexOf('*')); String end = globPattern.substring(globPattern.lastIndexOf('.')); for (String f : ndkFiles) { if (f.startsWith(start) && f.endsWith(end)) { return true; } } return false; } @Test public void testAllToolchainsHaveRuntimesFilegroup() { for (CrosstoolRelease crosstool : crosstoolReleases) { for (CToolchain toolchain : crosstool.getToolchainList()) { assertThat(toolchain.getDynamicRuntimesFilegroup()).isNotEmpty(); assertThat(toolchain.getStaticRuntimesFilegroup()).isNotEmpty(); } } } @Test public void testDefaultToolchainsExist() { for (CrosstoolRelease crosstool : crosstoolReleases) { Set<String> toolchainNames = new HashSet<>(); for (CToolchain toolchain : crosstool.getToolchainList()) { toolchainNames.add(toolchain.getToolchainIdentifier()); } for (DefaultCpuToolchain defaultCpuToolchain : crosstool.getDefaultToolchainList()) { assertThat(toolchainNames).contains(defaultCpuToolchain.getToolchainIdentifier()); } } } /** * Tests that each (cpu, compiler, glibc) triple in each crosstool is unique in that crosstool. */ @Test public void testCrosstoolTriples() { StringBuilder errorBuilder = new StringBuilder(); for (CrosstoolRelease crosstool : crosstoolReleases) { // Create a map of (cpu, compiler, glibc) triples -> toolchain. ImmutableMultimap.Builder<String, CToolchain> triples = ImmutableMultimap.builder(); for (CToolchain toolchain : crosstool.getToolchainList()) { String triple = "(" + Joiner.on(", ").join( toolchain.getTargetCpu(), toolchain.getCompiler(), toolchain.getTargetLibc()) + ")"; triples.put(triple, toolchain); } // Collect all the duplicate triples. for (Entry<String, Collection<CToolchain>> entry : triples.build().asMap().entrySet()) { if (entry.getValue().size() > 1) { errorBuilder.append(entry.getKey() + ": " + Joiner.on(", ").join( Collections2.transform(entry.getValue(), new Function<CToolchain, String>() { @Override public String apply(CToolchain toolchain) { return toolchain.getToolchainIdentifier(); } }))); errorBuilder.append("\n"); } } errorBuilder.append("\n"); } // This is a rather awkward condition to test on, but collecting all the duplicates first is // the only way to make a useful error message rather than finding the errors one by one. String error = errorBuilder.toString().trim(); if (!error.isEmpty()) { fail("Toolchains contain duplicate (cpu, compiler, glibc) triples:\n" + error); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.protocol.datatransfer; import static org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil.toProto; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.util.Arrays; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.StripedBlockInfo; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferTraceInfoProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpBlockChecksumProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpBlockGroupChecksumProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpCopyBlockProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpReadBlockProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpReplaceBlockProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpRequestShortCircuitAccessProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpTransferBlockProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReleaseShortCircuitAccessRequestProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ShortCircuitShmRequestProto; import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.server.datanode.CachingStrategy; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; import org.apache.htrace.core.SpanId; import org.apache.htrace.core.Tracer; import com.google.protobuf.Message; /** Sender */ @InterfaceAudience.Private @InterfaceStability.Evolving public class Sender implements DataTransferProtocol { private final DataOutputStream out; /** Create a sender for DataTransferProtocol with a output stream. */ public Sender(final DataOutputStream out) { this.out = out; } /** Initialize a operation. */ private static void op(final DataOutput out, final Op op) throws IOException { out.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION); op.write(out); } private static void send(final DataOutputStream out, final Op opcode, final Message proto) throws IOException { LOG.trace("Sending DataTransferOp {}: {}", proto.getClass().getSimpleName(), proto); op(out, opcode); proto.writeDelimitedTo(out); out.flush(); } static private CachingStrategyProto getCachingStrategy( CachingStrategy cachingStrategy) { CachingStrategyProto.Builder builder = CachingStrategyProto.newBuilder(); if (cachingStrategy.getReadahead() != null) { builder.setReadahead(cachingStrategy.getReadahead()); } if (cachingStrategy.getDropBehind() != null) { builder.setDropBehind(cachingStrategy.getDropBehind()); } return builder.build(); } @Override public void readBlock(final ExtendedBlock blk, final Token<BlockTokenIdentifier> blockToken, final String clientName, final long blockOffset, final long length, final boolean sendChecksum, final CachingStrategy cachingStrategy) throws IOException { OpReadBlockProto proto = OpReadBlockProto.newBuilder() .setHeader(DataTransferProtoUtil.buildClientHeader(blk, clientName, blockToken)) .setOffset(blockOffset) .setLen(length) .setSendChecksums(sendChecksum) .setCachingStrategy(getCachingStrategy(cachingStrategy)) .build(); send(out, Op.READ_BLOCK, proto); } @Override public void writeBlock(final ExtendedBlock blk, final StorageType storageType, final Token<BlockTokenIdentifier> blockToken, final String clientName, final DatanodeInfo[] targets, final StorageType[] targetStorageTypes, final DatanodeInfo source, final BlockConstructionStage stage, final int pipelineSize, final long minBytesRcvd, final long maxBytesRcvd, final long latestGenerationStamp, DataChecksum requestedChecksum, final CachingStrategy cachingStrategy, final boolean allowLazyPersist, final boolean pinning, final boolean[] targetPinnings, final String storageId, final String[] targetStorageIds) throws IOException { ClientOperationHeaderProto header = DataTransferProtoUtil.buildClientHeader( blk, clientName, blockToken); ChecksumProto checksumProto = DataTransferProtoUtil.toProto(requestedChecksum); OpWriteBlockProto.Builder proto = OpWriteBlockProto.newBuilder() .setHeader(header) .setStorageType(PBHelperClient.convertStorageType(storageType)) .addAllTargets(PBHelperClient.convert(targets, 1)) .addAllTargetStorageTypes( PBHelperClient.convertStorageTypes(targetStorageTypes, 1)) .setStage(toProto(stage)) .setPipelineSize(pipelineSize) .setMinBytesRcvd(minBytesRcvd) .setMaxBytesRcvd(maxBytesRcvd) .setLatestGenerationStamp(latestGenerationStamp) .setRequestedChecksum(checksumProto) .setCachingStrategy(getCachingStrategy(cachingStrategy)) .setAllowLazyPersist(allowLazyPersist) .setPinning(pinning) .addAllTargetPinnings(PBHelperClient.convert(targetPinnings, 1)) .addAllTargetStorageIds(PBHelperClient.convert(targetStorageIds, 1)); if (source != null) { proto.setSource(PBHelperClient.convertDatanodeInfo(source)); } if (storageId != null) { proto.setStorageId(storageId); } send(out, Op.WRITE_BLOCK, proto.build()); } @Override public void transferBlock(final ExtendedBlock blk, final Token<BlockTokenIdentifier> blockToken, final String clientName, final DatanodeInfo[] targets, final StorageType[] targetStorageTypes, final String[] targetStorageIds) throws IOException { OpTransferBlockProto proto = OpTransferBlockProto.newBuilder() .setHeader(DataTransferProtoUtil.buildClientHeader( blk, clientName, blockToken)) .addAllTargets(PBHelperClient.convert(targets)) .addAllTargetStorageTypes( PBHelperClient.convertStorageTypes(targetStorageTypes)) .addAllTargetStorageIds(Arrays.asList(targetStorageIds)) .build(); send(out, Op.TRANSFER_BLOCK, proto); } @Override public void requestShortCircuitFds(final ExtendedBlock blk, final Token<BlockTokenIdentifier> blockToken, SlotId slotId, int maxVersion, boolean supportsReceiptVerification) throws IOException { OpRequestShortCircuitAccessProto.Builder builder = OpRequestShortCircuitAccessProto.newBuilder() .setHeader(DataTransferProtoUtil.buildBaseHeader( blk, blockToken)).setMaxVersion(maxVersion); if (slotId != null) { builder.setSlotId(PBHelperClient.convert(slotId)); } builder.setSupportsReceiptVerification(supportsReceiptVerification); OpRequestShortCircuitAccessProto proto = builder.build(); send(out, Op.REQUEST_SHORT_CIRCUIT_FDS, proto); } @Override public void releaseShortCircuitFds(SlotId slotId) throws IOException { ReleaseShortCircuitAccessRequestProto.Builder builder = ReleaseShortCircuitAccessRequestProto.newBuilder(). setSlotId(PBHelperClient.convert(slotId)); SpanId spanId = Tracer.getCurrentSpanId(); if (spanId.isValid()) { builder.setTraceInfo(DataTransferTraceInfoProto.newBuilder(). setTraceId(spanId.getHigh()). setParentId(spanId.getLow())); } ReleaseShortCircuitAccessRequestProto proto = builder.build(); send(out, Op.RELEASE_SHORT_CIRCUIT_FDS, proto); } @Override public void requestShortCircuitShm(String clientName) throws IOException { ShortCircuitShmRequestProto.Builder builder = ShortCircuitShmRequestProto.newBuilder(). setClientName(clientName); SpanId spanId = Tracer.getCurrentSpanId(); if (spanId.isValid()) { builder.setTraceInfo(DataTransferTraceInfoProto.newBuilder(). setTraceId(spanId.getHigh()). setParentId(spanId.getLow())); } ShortCircuitShmRequestProto proto = builder.build(); send(out, Op.REQUEST_SHORT_CIRCUIT_SHM, proto); } @Override public void replaceBlock(final ExtendedBlock blk, final StorageType storageType, final Token<BlockTokenIdentifier> blockToken, final String delHint, final DatanodeInfo source, final String storageId) throws IOException { OpReplaceBlockProto.Builder proto = OpReplaceBlockProto.newBuilder() .setHeader(DataTransferProtoUtil.buildBaseHeader(blk, blockToken)) .setStorageType(PBHelperClient.convertStorageType(storageType)) .setDelHint(delHint) .setSource(PBHelperClient.convertDatanodeInfo(source)); if (storageId != null) { proto.setStorageId(storageId); } send(out, Op.REPLACE_BLOCK, proto.build()); } @Override public void copyBlock(final ExtendedBlock blk, final Token<BlockTokenIdentifier> blockToken) throws IOException { OpCopyBlockProto proto = OpCopyBlockProto.newBuilder() .setHeader(DataTransferProtoUtil.buildBaseHeader(blk, blockToken)) .build(); send(out, Op.COPY_BLOCK, proto); } @Override public void blockChecksum(final ExtendedBlock blk, final Token<BlockTokenIdentifier> blockToken) throws IOException { OpBlockChecksumProto proto = OpBlockChecksumProto.newBuilder() .setHeader(DataTransferProtoUtil.buildBaseHeader(blk, blockToken)) .build(); send(out, Op.BLOCK_CHECKSUM, proto); } @Override public void blockGroupChecksum(StripedBlockInfo stripedBlockInfo, Token<BlockTokenIdentifier> blockToken, long requestedNumBytes) throws IOException { OpBlockGroupChecksumProto proto = OpBlockGroupChecksumProto.newBuilder() .setHeader(DataTransferProtoUtil.buildBaseHeader( stripedBlockInfo.getBlock(), blockToken)) .setDatanodes(PBHelperClient.convertToProto( stripedBlockInfo.getDatanodes())) .addAllBlockTokens(PBHelperClient.convert( stripedBlockInfo.getBlockTokens())) .addAllBlockIndices(PBHelperClient .convertBlockIndices(stripedBlockInfo.getBlockIndices())) .setEcPolicy(PBHelperClient.convertErasureCodingPolicy( stripedBlockInfo.getErasureCodingPolicy())) .setRequestedNumBytes(requestedNumBytes) .build(); send(out, Op.BLOCK_GROUP_CHECKSUM, proto); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.typeCook.deductive.resolver; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.refactoring.typeCook.Settings; import com.intellij.refactoring.typeCook.Util; import com.intellij.refactoring.typeCook.deductive.PsiExtendedTypeVisitor; import com.intellij.refactoring.typeCook.deductive.builder.Constraint; import com.intellij.refactoring.typeCook.deductive.builder.ReductionSystem; import com.intellij.refactoring.typeCook.deductive.builder.Subtype; import com.intellij.util.containers.EmptyIterator; import com.intellij.util.graph.DFSTBuilder; import com.intellij.util.graph.Graph; import gnu.trove.TIntArrayList; import gnu.trove.TIntProcedure; import gnu.trove.TObjectIntHashMap; import java.util.*; /** * @author db */ public class ResolverTree { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.typeCook.deductive.resolver.ResolverTree"); private ResolverTree[] mySons = new ResolverTree[0]; private final BindingFactory myBindingFactory; private Binding myCurrentBinding; private final SolutionHolder mySolutions; private final Project myProject; private final TObjectIntHashMap<PsiTypeVariable> myBindingDegree; //How many times this type variable is bound in the system private final Settings mySettings; private boolean mySolutionFound; private Set<Constraint> myConstraints; public ResolverTree(final ReductionSystem system) { myBindingFactory = new BindingFactory(system); mySolutions = new SolutionHolder(); myCurrentBinding = myBindingFactory.create(); myConstraints = system.getConstraints(); myProject = system.getProject(); myBindingDegree = calculateDegree(); mySettings = system.getSettings(); reduceCyclicVariables(); } private ResolverTree(final ResolverTree parent, final Set<Constraint> constraints, final Binding binding) { myBindingFactory = parent.myBindingFactory; myCurrentBinding = binding; mySolutions = parent.mySolutions; myConstraints = constraints; myProject = parent.myProject; myBindingDegree = calculateDegree(); mySettings = parent.mySettings; } private static class PsiTypeVarCollector extends PsiExtendedTypeVisitor { final Set<PsiTypeVariable> mySet = new HashSet<>(); @Override public Object visitTypeVariable(final PsiTypeVariable var) { mySet.add(var); return null; } public Set<PsiTypeVariable> getSet(final PsiType type) { type.accept(this); return mySet; } } private boolean isBoundElseWhere(final PsiTypeVariable var) { return myBindingDegree.get(var) != 1; } private boolean canBePruned(final Binding b) { if (mySettings.exhaustive()) return false; for (final PsiTypeVariable var : b.getBoundVariables()) { final PsiType type = b.apply(var); if (!(type instanceof PsiTypeVariable) && isBoundElseWhere(var)) { return false; } } return true; } private TObjectIntHashMap<PsiTypeVariable> calculateDegree() { final TObjectIntHashMap<PsiTypeVariable> result = new TObjectIntHashMap<>(); for (final Constraint constr : myConstraints) { final PsiTypeVarCollector collector = new PsiTypeVarCollector(); setDegree(collector.getSet(constr.getRight()), result); } return result; } private void setDegree(final Set<PsiTypeVariable> set, TObjectIntHashMap<PsiTypeVariable> result) { for (final PsiTypeVariable var : set) { result.increment(var); } } private Set<Constraint> apply(final Binding b) { final Set<Constraint> result = new HashSet<>(); for (final Constraint constr : myConstraints) { result.add(constr.apply(b)); } return result; } private Set<Constraint> apply(final Binding b, final Set<Constraint> additional) { final Set<Constraint> result = new HashSet<>(); for (final Constraint constr : myConstraints) { result.add(constr.apply(b)); } for (final Constraint constr : additional) { result.add(constr.apply(b)); } return result; } private ResolverTree applyRule(final Binding b) { final Binding newBinding = b != null ? myCurrentBinding.compose(b) : null; return newBinding == null ? null : new ResolverTree(this, apply(b), newBinding); } private ResolverTree applyRule(final Binding b, final Set<Constraint> additional) { final Binding newBinding = b != null ? myCurrentBinding.compose(b) : null; return newBinding == null ? null : new ResolverTree(this, apply(b, additional), newBinding); } private void reduceCyclicVariables() { final Set<PsiTypeVariable> nodes = new HashSet<>(); final Set<Constraint> candidates = new HashSet<>(); final Map<PsiTypeVariable, Set<PsiTypeVariable>> ins = new HashMap<>(); final Map<PsiTypeVariable, Set<PsiTypeVariable>> outs = new HashMap<>(); for (final Constraint constraint : myConstraints) { final PsiType left = constraint.getLeft(); final PsiType right = constraint.getRight(); if (left instanceof PsiTypeVariable && right instanceof PsiTypeVariable) { final PsiTypeVariable leftVar = (PsiTypeVariable)left; final PsiTypeVariable rightVar = (PsiTypeVariable)right; candidates.add(constraint); nodes.add(leftVar); nodes.add(rightVar); Set<PsiTypeVariable> in = ins.get(leftVar); Set<PsiTypeVariable> out = outs.get(rightVar); if (in == null) { final Set<PsiTypeVariable> newIn = new HashSet<>(); newIn.add(rightVar); ins.put(leftVar, newIn); } else { in.add(rightVar); } if (out == null) { final Set<PsiTypeVariable> newOut = new HashSet<>(); newOut.add(leftVar); outs.put(rightVar, newOut); } else { out.add(leftVar); } } } final DFSTBuilder<PsiTypeVariable> dfstBuilder = new DFSTBuilder<>(new Graph<PsiTypeVariable>() { @Override public Collection<PsiTypeVariable> getNodes() { return nodes; } @Override public Iterator<PsiTypeVariable> getIn(final PsiTypeVariable n) { final Set<PsiTypeVariable> in = ins.get(n); if (in == null) { return EmptyIterator.getInstance(); } return in.iterator(); } @Override public Iterator<PsiTypeVariable> getOut(final PsiTypeVariable n) { final Set<PsiTypeVariable> out = outs.get(n); if (out == null) { return EmptyIterator.getInstance(); } return out.iterator(); } }); final TIntArrayList sccs = dfstBuilder.getSCCs(); final Map<PsiTypeVariable, Integer> index = new HashMap<>(); sccs.forEach(new TIntProcedure() { int myTNumber; @Override public boolean execute(int size) { for (int j = 0; j < size; j++) { index.put(dfstBuilder.getNodeByTNumber(myTNumber + j), myTNumber); } myTNumber += size; return true; } }); for (final Constraint constraint : candidates) { if (index.get(constraint.getLeft()).equals(index.get(constraint.getRight()))) { myConstraints.remove(constraint); } } Binding binding = myBindingFactory.create(); for (final PsiTypeVariable fromVar : index.keySet()) { final PsiTypeVariable toVar = dfstBuilder.getNodeByNNumber(index.get(fromVar).intValue()); if (!fromVar.equals(toVar)) { binding = binding.compose(myBindingFactory.create(fromVar, toVar)); if (binding == null) { break; } } } if (binding != null && binding.nonEmpty()) { myCurrentBinding = myCurrentBinding.compose(binding); myConstraints = apply(binding); } } private void reduceTypeType(final Constraint constr) { final PsiType left = constr.getLeft(); final PsiType right = constr.getRight(); final Set<Constraint> addendumRise = new HashSet<>(); final Set<Constraint> addendumSink = new HashSet<>(); final Set<Constraint> addendumWcrd = new HashSet<>(); int numSons = 0; Binding riseBinding = myBindingFactory.rise(left, right, addendumRise); if (riseBinding != null) numSons++; Binding sinkBinding = myBindingFactory.sink(left, right, addendumSink); if (sinkBinding != null) numSons++; Binding wcrdBinding = mySettings.cookToWildcards() ? myBindingFactory.riseWithWildcard(left, right, addendumWcrd) : null; if (wcrdBinding != null) numSons++; Binding omitBinding = null; if (mySettings.exhaustive()) { final PsiClassType.ClassResolveResult rightResult = Util.resolveType(right); final PsiClassType.ClassResolveResult leftResult = Util.resolveType(left); final PsiClass rightClass = rightResult.getElement(); final PsiClass leftClass = leftResult.getElement(); if (rightClass != null && leftClass != null && rightClass.getManager().areElementsEquivalent(rightClass, leftClass)) { if (PsiUtil.typeParametersIterator(rightClass).hasNext()) { omitBinding = myBindingFactory.create(); numSons++; for (PsiType type : rightResult.getSubstitutor().getSubstitutionMap().values()) { if (! (type instanceof Bottom)) { numSons--; omitBinding = null; break; } } } } } if (numSons == 0) return; if ((riseBinding != null && sinkBinding != null && riseBinding.equals(sinkBinding)) || canBePruned(riseBinding)) { numSons--; sinkBinding = null; } if (riseBinding != null && wcrdBinding != null && riseBinding.equals(wcrdBinding)) { numSons--; wcrdBinding = null; } myConstraints.remove(constr); mySons = new ResolverTree[numSons]; int n = 0; if (riseBinding != null) { mySons[n++] = applyRule(riseBinding, addendumRise); } if (wcrdBinding != null) { mySons[n++] = applyRule(wcrdBinding, addendumWcrd); } if (omitBinding != null) { mySons[n++] = applyRule(omitBinding, addendumWcrd); } if (sinkBinding != null) { mySons[n++] = applyRule(sinkBinding, addendumSink); } } private void fillTypeRange(final PsiType lowerBound, final PsiType upperBound, final Set<PsiType> holder) { if (lowerBound instanceof PsiClassType && upperBound instanceof PsiClassType) { final PsiClassType.ClassResolveResult resultLower = ((PsiClassType)lowerBound).resolveGenerics(); final PsiClassType.ClassResolveResult resultUpper = ((PsiClassType)upperBound).resolveGenerics(); final PsiClass lowerClass = resultLower.getElement(); final PsiClass upperClass = resultUpper.getElement(); if (lowerClass != null && upperClass != null && !lowerClass.equals(upperClass)) { final PsiSubstitutor upperSubst = resultUpper.getSubstitutor(); final PsiClass[] parents = upperClass.getSupers(); final PsiElementFactory factory = JavaPsiFacade.getInstance(myProject).getElementFactory(); for (final PsiClass parent : parents) { final PsiSubstitutor superSubstitutor = TypeConversionUtil.getClassSubstitutor(parent, upperClass, upperSubst); if (superSubstitutor != null) { final PsiClassType type = factory.createType(parent, superSubstitutor); holder.add(type); fillTypeRange(lowerBound, type, holder); } } } } else if (lowerBound instanceof PsiArrayType && upperBound instanceof PsiArrayType) { fillTypeRange(((PsiArrayType)lowerBound).getComponentType(), ((PsiArrayType)upperBound).getComponentType(), holder); } } private PsiType[] getTypeRange(final PsiType lowerBound, final PsiType upperBound) { Set<PsiType> range = new HashSet<>(); range.add(lowerBound); range.add(upperBound); fillTypeRange(lowerBound, upperBound, range); return range.toArray(PsiType.createArray(range.size())); } private void reduceInterval(final Constraint left, final Constraint right) { final PsiType leftType = left.getLeft(); final PsiType rightType = right.getRight(); final PsiTypeVariable var = (PsiTypeVariable)left.getRight(); if (leftType.equals(rightType)) { final Binding binding = myBindingFactory.create(var, leftType); myConstraints.remove(left); myConstraints.remove(right); mySons = new ResolverTree[]{applyRule(binding)}; return; } Binding riseBinding = myBindingFactory.rise(leftType, rightType, null); Binding sinkBinding = myBindingFactory.sink(leftType, rightType, null); int indicator = (riseBinding == null ? 0 : 1) + (sinkBinding == null ? 0 : 1); if (indicator == 0) { return; } else if ((indicator == 2 && riseBinding.equals(sinkBinding)) || canBePruned(riseBinding)) { indicator = 1; sinkBinding = null; } PsiType[] riseRange = PsiType.EMPTY_ARRAY; PsiType[] sinkRange = PsiType.EMPTY_ARRAY; if (riseBinding != null) { riseRange = getTypeRange(riseBinding.apply(rightType), riseBinding.apply(leftType)); } if (sinkBinding != null) { sinkRange = getTypeRange(sinkBinding.apply(rightType), sinkBinding.apply(leftType)); } if (riseRange.length + sinkRange.length > 0) { myConstraints.remove(left); myConstraints.remove(right); } mySons = new ResolverTree[riseRange.length + sinkRange.length]; for (int i = 0; i < riseRange.length; i++) { final PsiType type = riseRange[i]; mySons[i] = applyRule(riseBinding.compose(myBindingFactory.create(var, type))); } for (int i = 0; i < sinkRange.length; i++) { final PsiType type = sinkRange[i]; mySons[i + riseRange.length] = applyRule(sinkBinding.compose(myBindingFactory.create(var, type))); } } private void reduce() { if (myConstraints.isEmpty()) { return; } if (myCurrentBinding.isCyclic()) { reduceCyclicVariables(); } final Map<PsiTypeVariable, Constraint> myTypeVarConstraints = new HashMap<>(); final Map<PsiTypeVariable, Constraint> myVarTypeConstraints = new HashMap<>(); for (final Constraint constr : myConstraints) { final PsiType left = constr.getLeft(); final PsiType right = constr.getRight(); switch ((left instanceof PsiTypeVariable ? 0 : 1) + (right instanceof PsiTypeVariable ? 0 : 2)) { case 0: continue; case 1: { final Constraint c = myTypeVarConstraints.get(right); if (c == null) { final Constraint d = myVarTypeConstraints.get(right); if (d != null) { reduceInterval(constr, d); return; } myTypeVarConstraints.put((PsiTypeVariable)right, constr); } else { reduceTypeVar(constr, c); return; } } break; case 2: { final Constraint c = myVarTypeConstraints.get(left); if (c == null) { final Constraint d = myTypeVarConstraints.get(left); if (d != null) { reduceInterval(d, constr); return; } myVarTypeConstraints.put((PsiTypeVariable)left, constr); } else { reduceVarType(constr, c); return; } break; } case 3: reduceTypeType(constr); return; } } //T1 < a < b ... < T2 { for (final Constraint constr : myConstraints) { final PsiType left = constr.getLeft(); final PsiType right = constr.getRight(); if (!(left instanceof PsiTypeVariable) && right instanceof PsiTypeVariable) { Set<PsiTypeVariable> bound = new PsiTypeVarCollector().getSet(left); if (bound.contains(right)) { myConstraints.remove(constr); mySons = new ResolverTree[]{applyRule(myBindingFactory.create(((PsiTypeVariable)right), Bottom.BOTTOM))}; return; } final PsiManager manager = PsiManager.getInstance(myProject); final PsiType leftType = left instanceof PsiWildcardType ? ((PsiWildcardType)left).getBound() : left; final PsiType[] types = getTypeRange(PsiType.getJavaLangObject(manager, GlobalSearchScope.allScope(myProject)), leftType); mySons = new ResolverTree[types.length]; if (types.length > 0) { myConstraints.remove(constr); } for (int i = 0; i < types.length; i++) { final PsiType type = types[i]; mySons[i] = applyRule(myBindingFactory.create(((PsiTypeVariable)right), type)); } return; } } } //T1 < a < b < ... { Set<PsiTypeVariable> haveLeftBound = new HashSet<>(); Constraint target = null; Set<PsiTypeVariable> boundVariables = new HashSet<>(); for (final Constraint constr : myConstraints) { final PsiType leftType = constr.getLeft(); final PsiType rightType = constr.getRight(); if (leftType instanceof PsiTypeVariable) { boundVariables.add((PsiTypeVariable)leftType); if (rightType instanceof PsiTypeVariable) { boundVariables.add((PsiTypeVariable)rightType); haveLeftBound.add(((PsiTypeVariable)rightType)); } else if (!Util.bindsTypeVariables(rightType)) { target = constr; } } } if (target == null) { if (mySettings.exhaustive()) { for (final Constraint constr : myConstraints) { final PsiType left = constr.getLeft(); final PsiType right = constr.getRight(); PsiType[] range = null; PsiTypeVariable var = null; if (left instanceof PsiTypeVariable && !(right instanceof PsiTypeVariable)) { range = getTypeRange(PsiType.getJavaLangObject(PsiManager.getInstance(myProject), GlobalSearchScope.allScope(myProject)), right); var = (PsiTypeVariable)left; } if (range == null && right instanceof PsiTypeVariable && !(left instanceof PsiTypeVariable)) { range = new PsiType[]{right}; var = (PsiTypeVariable)right; } if (range != null) { mySons = new ResolverTree[range.length]; for (int i = 0; i < range.length; i++) { mySons[i] = applyRule(myBindingFactory.create(var, range[i])); } return; } } } Binding binding = myBindingFactory.create(); for (final PsiTypeVariable var : myBindingFactory.getBoundVariables()) { if (!myCurrentBinding.binds(var) && !boundVariables.contains(var)) { binding = binding.compose(myBindingFactory.create(var, Bottom.BOTTOM)); } } if (!binding.nonEmpty()) { myConstraints.clear(); } mySons = new ResolverTree[]{applyRule(binding)}; } else { final PsiType type = target.getRight(); final PsiTypeVariable var = (PsiTypeVariable)target.getLeft(); final Binding binding = (haveLeftBound.contains(var) || type instanceof PsiWildcardType) || !mySettings.cookToWildcards() ? myBindingFactory.create(var, type) : myBindingFactory.create(var, PsiWildcardType.createExtends(PsiManager.getInstance(myProject), type)); myConstraints.remove(target); mySons = new ResolverTree[]{applyRule(binding)}; } } } private void logSolution() { LOG.debug("Reduced system:"); for (final Constraint constr : myConstraints) { LOG.debug(constr.toString()); } LOG.debug("End of Reduced system."); LOG.debug("Reduced binding:"); LOG.debug(myCurrentBinding.toString()); LOG.debug("End of Reduced binding."); } private interface Reducer { LinkedList<Pair<PsiType, Binding>> unify(PsiType x, PsiType y); Constraint create(PsiTypeVariable var, PsiType type); PsiType getType(Constraint c); PsiTypeVariable getVar(Constraint c); } private void reduceTypeVar(final Constraint x, final Constraint y) { reduceSideVar(x, y, new Reducer() { @Override public LinkedList<Pair<PsiType, Binding>> unify(final PsiType x, final PsiType y) { return myBindingFactory.intersect(x, y); } @Override public Constraint create(final PsiTypeVariable var, final PsiType type) { return new Subtype(type, var); } @Override public PsiType getType(final Constraint c) { return c.getLeft(); } @Override public PsiTypeVariable getVar(final Constraint c) { return (PsiTypeVariable)c.getRight(); } }); } private void reduceVarType(final Constraint x, final Constraint y) { reduceSideVar(x, y, new Reducer() { @Override public LinkedList<Pair<PsiType, Binding>> unify(final PsiType x, final PsiType y) { return myBindingFactory.union(x, y); } @Override public Constraint create(final PsiTypeVariable var, final PsiType type) { return new Subtype(var, type); } @Override public PsiType getType(final Constraint c) { return c.getRight(); } @Override public PsiTypeVariable getVar(final Constraint c) { return (PsiTypeVariable)c.getLeft(); } }); } private void reduceSideVar(final Constraint x, final Constraint y, final Reducer reducer) { final PsiTypeVariable var = reducer.getVar(x); final PsiType xType = reducer.getType(x); final PsiType yType = reducer.getType(y); final LinkedList<Pair<PsiType, Binding>> union = reducer.unify(xType, yType); if (union.isEmpty()) { return; } myConstraints.remove(x); myConstraints.remove(y); mySons = new ResolverTree[union.size()]; int i = 0; Constraint prev = null; for (final Pair<PsiType, Binding> pair : union) { if (prev != null) { myConstraints.remove(prev); } prev = reducer.create(var, pair.getFirst()); myConstraints.add(prev); mySons[i++] = applyRule(pair.getSecond()); } } public void resolve() { reduce(); if (mySons.length > 0) { for (int i = 0; i < mySons.length; i++) { if (mySons[i] != null) { mySons[i].resolve(); if (!mySettings.exhaustive() && mySettings.cookToWildcards() && mySons[i].mySolutionFound) break; mySons[i] = null; } } } else { if (myConstraints.isEmpty()) { logSolution(); mySolutions.putSolution(myCurrentBinding); mySolutionFound = true; } } } public Binding getBestSolution() { return mySolutions.getBestSolution(); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.09.05 at 06:41:40 PM IST // package com.mozu.qbintegration.model.qbmodel.productquery; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ItemPaymentRetType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ItemPaymentRetType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="ListID" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="TimeCreated" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="TimeModified" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="EditSequence" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Name" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BarCodeValue" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="IsActive" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ClassRef" type="{}ClassRefType"/> * &lt;element name="ItemDesc" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="DepositToAccountRef" type="{}DepositToAccountRefType"/> * &lt;element name="PaymentMethodRef" type="{}PaymentMethodRefType"/> * &lt;element name="ExternalGUID" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="DataExtRet" type="{}DataExtRetType"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ItemPaymentRetType", propOrder = { "listID", "timeCreated", "timeModified", "editSequence", "name", "barCodeValue", "isActive", "classRef", "itemDesc", "depositToAccountRef", "paymentMethodRef", "externalGUID", "dataExtRet" }) public class ItemPaymentRetType { @XmlElement(name = "ListID", required = true) protected String listID; @XmlElement(name = "TimeCreated", required = true) protected String timeCreated; @XmlElement(name = "TimeModified", required = true) protected String timeModified; @XmlElement(name = "EditSequence", required = true) protected String editSequence; @XmlElement(name = "Name", required = true) protected String name; @XmlElement(name = "BarCodeValue", required = true) protected String barCodeValue; @XmlElement(name = "IsActive", required = true) protected String isActive; @XmlElement(name = "ClassRef", required = true) protected ClassRefType classRef; @XmlElement(name = "ItemDesc", required = true) protected String itemDesc; @XmlElement(name = "DepositToAccountRef", required = true) protected DepositToAccountRefType depositToAccountRef; @XmlElement(name = "PaymentMethodRef", required = true) protected PaymentMethodRefType paymentMethodRef; @XmlElement(name = "ExternalGUID", required = true) protected String externalGUID; @XmlElement(name = "DataExtRet", required = true) protected DataExtRetType dataExtRet; /** * Gets the value of the listID property. * * @return * possible object is * {@link String } * */ public String getListID() { return listID; } /** * Sets the value of the listID property. * * @param value * allowed object is * {@link String } * */ public void setListID(String value) { this.listID = value; } /** * Gets the value of the timeCreated property. * * @return * possible object is * {@link String } * */ public String getTimeCreated() { return timeCreated; } /** * Sets the value of the timeCreated property. * * @param value * allowed object is * {@link String } * */ public void setTimeCreated(String value) { this.timeCreated = value; } /** * Gets the value of the timeModified property. * * @return * possible object is * {@link String } * */ public String getTimeModified() { return timeModified; } /** * Sets the value of the timeModified property. * * @param value * allowed object is * {@link String } * */ public void setTimeModified(String value) { this.timeModified = value; } /** * Gets the value of the editSequence property. * * @return * possible object is * {@link String } * */ public String getEditSequence() { return editSequence; } /** * Sets the value of the editSequence property. * * @param value * allowed object is * {@link String } * */ public void setEditSequence(String value) { this.editSequence = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the barCodeValue property. * * @return * possible object is * {@link String } * */ public String getBarCodeValue() { return barCodeValue; } /** * Sets the value of the barCodeValue property. * * @param value * allowed object is * {@link String } * */ public void setBarCodeValue(String value) { this.barCodeValue = value; } /** * Gets the value of the isActive property. * * @return * possible object is * {@link String } * */ public String getIsActive() { return isActive; } /** * Sets the value of the isActive property. * * @param value * allowed object is * {@link String } * */ public void setIsActive(String value) { this.isActive = value; } /** * Gets the value of the classRef property. * * @return * possible object is * {@link ClassRefType } * */ public ClassRefType getClassRef() { return classRef; } /** * Sets the value of the classRef property. * * @param value * allowed object is * {@link ClassRefType } * */ public void setClassRef(ClassRefType value) { this.classRef = value; } /** * Gets the value of the itemDesc property. * * @return * possible object is * {@link String } * */ public String getItemDesc() { return itemDesc; } /** * Sets the value of the itemDesc property. * * @param value * allowed object is * {@link String } * */ public void setItemDesc(String value) { this.itemDesc = value; } /** * Gets the value of the depositToAccountRef property. * * @return * possible object is * {@link DepositToAccountRefType } * */ public DepositToAccountRefType getDepositToAccountRef() { return depositToAccountRef; } /** * Sets the value of the depositToAccountRef property. * * @param value * allowed object is * {@link DepositToAccountRefType } * */ public void setDepositToAccountRef(DepositToAccountRefType value) { this.depositToAccountRef = value; } /** * Gets the value of the paymentMethodRef property. * * @return * possible object is * {@link PaymentMethodRefType } * */ public PaymentMethodRefType getPaymentMethodRef() { return paymentMethodRef; } /** * Sets the value of the paymentMethodRef property. * * @param value * allowed object is * {@link PaymentMethodRefType } * */ public void setPaymentMethodRef(PaymentMethodRefType value) { this.paymentMethodRef = value; } /** * Gets the value of the externalGUID property. * * @return * possible object is * {@link String } * */ public String getExternalGUID() { return externalGUID; } /** * Sets the value of the externalGUID property. * * @param value * allowed object is * {@link String } * */ public void setExternalGUID(String value) { this.externalGUID = value; } /** * Gets the value of the dataExtRet property. * * @return * possible object is * {@link DataExtRetType } * */ public DataExtRetType getDataExtRet() { return dataExtRet; } /** * Sets the value of the dataExtRet property. * * @param value * allowed object is * {@link DataExtRetType } * */ public void setDataExtRet(DataExtRetType value) { this.dataExtRet = value; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simplesystemsmanagement.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/DescribeAssociationExecutions" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeAssociationExecutionsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * A list of the executions for the specified association ID. * </p> */ private com.amazonaws.internal.SdkInternalList<AssociationExecution> associationExecutions; /** * <p> * The token for the next set of items to return. Use this token to get the next set of results. * </p> */ private String nextToken; /** * <p> * A list of the executions for the specified association ID. * </p> * * @return A list of the executions for the specified association ID. */ public java.util.List<AssociationExecution> getAssociationExecutions() { if (associationExecutions == null) { associationExecutions = new com.amazonaws.internal.SdkInternalList<AssociationExecution>(); } return associationExecutions; } /** * <p> * A list of the executions for the specified association ID. * </p> * * @param associationExecutions * A list of the executions for the specified association ID. */ public void setAssociationExecutions(java.util.Collection<AssociationExecution> associationExecutions) { if (associationExecutions == null) { this.associationExecutions = null; return; } this.associationExecutions = new com.amazonaws.internal.SdkInternalList<AssociationExecution>(associationExecutions); } /** * <p> * A list of the executions for the specified association ID. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setAssociationExecutions(java.util.Collection)} or * {@link #withAssociationExecutions(java.util.Collection)} if you want to override the existing values. * </p> * * @param associationExecutions * A list of the executions for the specified association ID. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeAssociationExecutionsResult withAssociationExecutions(AssociationExecution... associationExecutions) { if (this.associationExecutions == null) { setAssociationExecutions(new com.amazonaws.internal.SdkInternalList<AssociationExecution>(associationExecutions.length)); } for (AssociationExecution ele : associationExecutions) { this.associationExecutions.add(ele); } return this; } /** * <p> * A list of the executions for the specified association ID. * </p> * * @param associationExecutions * A list of the executions for the specified association ID. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeAssociationExecutionsResult withAssociationExecutions(java.util.Collection<AssociationExecution> associationExecutions) { setAssociationExecutions(associationExecutions); return this; } /** * <p> * The token for the next set of items to return. Use this token to get the next set of results. * </p> * * @param nextToken * The token for the next set of items to return. Use this token to get the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token for the next set of items to return. Use this token to get the next set of results. * </p> * * @return The token for the next set of items to return. Use this token to get the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token for the next set of items to return. Use this token to get the next set of results. * </p> * * @param nextToken * The token for the next set of items to return. Use this token to get the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeAssociationExecutionsResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAssociationExecutions() != null) sb.append("AssociationExecutions: ").append(getAssociationExecutions()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeAssociationExecutionsResult == false) return false; DescribeAssociationExecutionsResult other = (DescribeAssociationExecutionsResult) obj; if (other.getAssociationExecutions() == null ^ this.getAssociationExecutions() == null) return false; if (other.getAssociationExecutions() != null && other.getAssociationExecutions().equals(this.getAssociationExecutions()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAssociationExecutions() == null) ? 0 : getAssociationExecutions().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public DescribeAssociationExecutionsResult clone() { try { return (DescribeAssociationExecutionsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package View; import java.util.ArrayList; import java.util.List; import SuperAwesomeDemo.CollisionEngineAwesome; import SuperAwesomeDemo.RangeEngineAwesome; import javafx.application.Application; import javafx.stage.Stage; import xml.DataManager; import engine.events.ConcreteQueue; import engine.events.ConstantSpacingWave; import engine.events.GameObjectQueue; import engine.events.RandomSpanWave; import engine.events.TimedEvent; import engine.game.ConcreteGame; import engine.game.ConcreteLevel; import engine.game.ConcreteLevelBoard; import engine.game.Game; import engine.game.Level; import engine.game.Player; import engine.game.PlayerUnit; import engine.game.StoryBoard; import engine.game.Timer; import engine.game.TimerConcrete; import engine.gameobject.GameObject; import engine.gameobject.GameObjectSimple; import engine.gameobject.GameObjectSimpleTest; import engine.gameobject.MoverUser; import engine.gameobject.PointSimple; import engine.gameobject.behaviors.PlayerChangeBehavior; import engine.gameobject.test.TestTower; import engine.gameobject.test.bloons.BlueBloon; import engine.gameobject.weapon.firingstrategy.UserStrategy; import engine.goals.Goal; import engine.goals.HealthGoal; import engine.goals.NoCurrentEventGoal; import engine.goals.ScoreGoal; import engine.goals.TimerGoal; import engine.pathfinding.PathFixed; import engine.shop.ShopModel; import engine.shop.ShopModelSimple; import engine.shop.wallet.ConcreteWallet; import engine.shop.wallet.Wallet; import gameworld.FixedWorld; import gameworld.GameWorld; import gameworld.StructurePlacementException; public class GameWriter extends Application { static GameWriter myWriter; private static final String FILE_DESTINATION = "src/SuperAwesomeDemo/SuperAwesomeGame.xml"; /** * @param world * @return */ private StoryBoard makeStoryBoard (GameWorld world, Player player) { List<GameObject> waveObjects = new ArrayList<>(); PlayerChangeBehavior pointBehavior = new PlayerChangeBehavior(); PlayerChangeBehavior healthBehavior = new PlayerChangeBehavior(); healthBehavior.addPlayer(player); healthBehavior.setHealth(10); pointBehavior.addPlayer(player); pointBehavior.setMoney(10); pointBehavior.setPoint(10); for (int i = 0; i < 10; i++) { BlueBloon toAdd = new BlueBloon(); toAdd.addOnDeathBehavior(pointBehavior); toAdd.addEndOfPathBehavior(healthBehavior); waveObjects.add(toAdd); } GameObjectQueue q = new ConcreteQueue(waveObjects); TimedEvent wave = new RandomSpanWave(2, 20, q, world); wave.setEndingAction(e -> player.changeScore(57)); List<GameObject> waveObjects2 = new ArrayList<>(); for (int i = 0; i < 10; i++) { waveObjects2.add(new GameObjectSimpleTest()); } GameObjectQueue q2 = new ConcreteQueue(waveObjects2); TimedEvent wave2 = new ConstantSpacingWave(2, q2, world); StoryBoard story = makeStoryBoard(wave); story.addEvent(wave2); return story; } /** * @param world * @param story * @param myPlayer * @return */ private ConcreteLevelBoard makeLevelBoard (GameWorld world, StoryBoard story, Player myPlayer) { ConcreteLevelBoard board = new ConcreteLevelBoard(); HealthGoal healthy = new HealthGoal(myPlayer, 0); Timer t = new TimerConcrete(3, 5, "time"); List<Goal> list = new ArrayList<Goal>(); list.add(healthy); list.add(new TimerGoal(t, 0)); ScoreGoal score = new ScoreGoal(myPlayer, 200); List<Goal> list2 = new ArrayList<Goal>(); list2.add(score); List<Goal> list3 = new ArrayList<Goal>(); ScoreGoal score2 = new ScoreGoal(myPlayer, 300); list3.add(score2); Level levelOne = new ConcreteLevel("images/Park_Path.png", list2, list, world, story); levelOne.addTimer(t); board.addLevel(levelOne); board.addLevel(new ConcreteLevel("images/example_path.jpeg", list3, list, new FixedWorld(10,10), new StoryBoard())); return board; } /** * @return */ public Player makePlayer () { PlayerUnit health = new PlayerUnit(100, "Health"); PlayerUnit scoreUnit = new PlayerUnit(100, "Score"); Wallet wallet = new ConcreteWallet(scoreUnit); Player myPlayer = new Player("PlayerName", health, scoreUnit, wallet); return myPlayer; } /** * @param wave * @return */ private StoryBoard makeStoryBoard (TimedEvent wave) { StoryBoard story = new StoryBoard(wave); return story; } /** * @return */ public GameWorld makeWorld () { FixedWorld world = new FixedWorld(10,10); world.setCollisionEngine(new CollisionEngineAwesome()); world.setRangeEngine(new RangeEngineAwesome()); // world.addObject(new TestTower(2, 330, 130)); //world.addObject(new TestTower(5, 270, 270)); // world.addObject(new TestTower(3, 355, 455)); GameObjectSimple g = new TestTower(2, 330, 330); MoverUser m = new MoverUser(); m.setGraphic(g.getGraphic()); g.setMover(m); UserStrategy pewpew = new UserStrategy(); pewpew.setGraphic(g.getGraphic()); g.getWeapon().setFiringStrategy(pewpew); g.getWeapon().setFiringRate(100); try { world.addObject(g, new PointSimple(300,300)); } catch (StructurePlacementException e) { // TODO Auto-generated catch block e.printStackTrace(); } //TODO wtf? world.setPath(DataManager.readFromXML(PathFixed.class, "src/xml/Path.xml")); return world; } public static void main (String[] args) { myWriter = new GameWriter(); myWriter.writeGame(); } private void writeGame () { Player myPlayer = makePlayer(); GameWorld myWorld = makeWorld(); ShopModel myShop = new ShopModelSimple(myWorld, myPlayer, 1); myShop.addPurchasable(new TestTower(0,0,0)); myShop.addPurchasable(new TestTower(1,0,0)); Game myGame = makeGame(myPlayer, myWorld, myShop); DataManager.writeToXML(myGame, FILE_DESTINATION); System.out.println("Written"); System.exit(0); } public ShopModel makeShop (Player player, GameWorld world) { return new ShopModelSimple(world, player, 1); } /** * @param myPlayer * @param myWorld * @return */ public Game makeGame (Player myPlayer, GameWorld myWorld, ShopModel myShop) { StoryBoard myStory = makeStoryBoard(myWorld, myPlayer); Game myGame = new ConcreteGame(myShop, myPlayer, makeLevelBoard(myWorld, myStory, myPlayer), new ArrayList<ButtonWrapper>()); ButtonWrapper wrap = new ButtonWrapper("wave", e -> myStory.startNextEvent(), new NoCurrentEventGoal(myStory)); myGame.addButton(wrap); return myGame; } @Override public void start (Stage primaryStage) throws Exception { // TODO Auto-generated method stub } }
package de.tudarmstadt.lt.structuredtopics; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.tudarmstadt.lt.structuredtopics.Main.InputMode; import de.tudarmstadt.lt.structuredtopics.ddts.ClusterWord; import de.tudarmstadt.lt.structuredtopics.ddts.Sense; import de.tudarmstadt.lt.structuredtopics.ddts.SenseCluster; import de.tudarmstadt.lt.structuredtopics.ddts.SingleWord; public class Utils { private static final Logger LOG = LoggerFactory.getLogger(Utils.class); private static interface Filter { boolean filter(List<SingleWord> words); } protected static class PosTagFilter implements Filter { private Set<String> tags; public PosTagFilter(Set<String> tags) { this.tags = tags; } @Override public boolean filter(List<SingleWord> words) { for (String tag : tags) { for (SingleWord w : words) if (!tag.equalsIgnoreCase(w.getPos())) { return true; } } return false; } } protected static class RegexFilter implements Filter { private Matcher matcher; public RegexFilter(String regex) { this.matcher = Pattern.compile(regex).matcher(""); } @Override public boolean filter(List<SingleWord> words) { for (SingleWord cw : words) { String word = cw.getText(); int indexOfFirstHash = word.indexOf("#"); if (indexOfFirstHash != -1) { String withoutPosTag = word.substring(0, indexOfFirstHash); matcher.reset(withoutPosTag); } else { matcher.reset(word); } if (!matcher.matches()) { return true; } } return false; } } public static BufferedReader openReader(File file) throws IOException { InputStream in = new FileInputStream(file); if (file.getName().endsWith(".gz")) { in = new GZIPInputStream(in); } Reader reader = new InputStreamReader(in, "UTF-8"); return new BufferedReader(reader); } public static BufferedWriter openWriter(File file, boolean append) throws IOException { OutputStream out = new FileOutputStream(file, append); if (file.getName().endsWith(".gz")) { out = new GZIPOutputStream(out); } Writer writer = new OutputStreamWriter(out, "UTF-8"); return new BufferedWriter(writer); } @Deprecated public static BufferedReader openReader(File input, InputMode mode) throws IOException { InputStream in = new FileInputStream(input); if (mode == InputMode.GZ) { in = new GZIPInputStream(in); } Reader reader = new InputStreamReader(in, "UTF-8"); return new BufferedReader(reader); } @Deprecated public static BufferedWriter openGzipWriter(File output) throws IOException { OutputStream out = new FileOutputStream(output); out = new GZIPOutputStream(out); Writer writer = new OutputStreamWriter(out, "UTF-8"); return new BufferedWriter(writer); } public static void filterClustersByPosTag(List<SenseCluster> clusters, Set<String> tags) { LOG.info("Filtering by POS-Tag {}", StringUtils.join(tags)); filterClusters(clusters, new PosTagFilter(tags)); } public static void filterClustersByRegEx(List<SenseCluster> clusters, String regex) { LOG.info("Filtering by regex {}", regex); filterClusters(clusters, new RegexFilter(regex)); } private static void filterClusters(List<SenseCluster> clusters, Filter filter) { int removedClusterWords = 0; int removedSenses = 0; for (int i = clusters.size() - 1; i >= 0; i--) { if (i % 1000 == 0) { LOG.info("Filtering cluster {}/{}", clusters.size() - 1 - i, clusters.size()); } SenseCluster cluster = clusters.get(i); Sense sense = cluster.getSense(); boolean keepSenseWord = false; try { if (!filter.filter(sense.getWords())) { keepSenseWord = true; } } catch (Exception e) { LOG.error("Filter {} threw an exeption while filtering word {}. Word will be removed", filter.getClass(), sense.getFullWord(), e); } if (keepSenseWord) { // filter cluster words List<ClusterWord> clusterWords = cluster.getClusterWords(); for (int j = clusterWords.size() - 1; j >= 0; j--) { ClusterWord clusterWord = clusterWords.get(j); if (filter.filter(clusterWord.getWords())) { clusterWords.remove(j); removedClusterWords++; } } // if no words left -> remove sense if (clusterWords.isEmpty()) { clusters.remove(i); removedSenses++; } } else { clusters.remove(i); removedSenses++; } } LOG.info("Filtered {} cluster words and {} entire senses", removedClusterWords, removedSenses); } public static void writeClustersToFile(Map<String, Map<Integer, List<Feature>>> clusters, File out) throws IOException { try (BufferedWriter writer = openGzipWriter(out)) { for (Entry<String, Map<Integer, List<Feature>>> senseClusters : clusters.entrySet()) { String senseWord = senseClusters.getKey(); for (Entry<Integer, List<Feature>> senseCluster : senseClusters.getValue().entrySet()) { Integer senseId = senseCluster.getKey(); writer.write(senseWord); writer.write("\t"); writer.write(senseId.toString()); writer.write("\t"); for (Feature f : senseCluster.getValue()) { if (f.getSenseId() != null) { writer.write(f.getWord() + "#" + f.getSenseId() + ":" + f.getWeight()); } else { writer.write(f.getWord()); } writer.write(", "); } writer.write("\n"); } } } } public static void writeClustersToFile(List<SenseCluster> clusters, File file) throws IOException { try (BufferedWriter out = openWriter(file, false)) { int count = 0; for (SenseCluster cluster : clusters) { count++; if (count % 1000 == 0) { LOG.info("Writing cluster {}/{}", count, clusters.size()); } StringBuilder b = new StringBuilder(); Sense sense = cluster.getSense(); List<SingleWord> words = sense.getWords(); appendSingleWords(b, words); b.append("\t").append(sense.getSenseId()).append("\t"); for (int i = 0; i < cluster.getClusterWords().size(); i++) { ClusterWord clusterWord = cluster.getClusterWords().get(i); appendSingleWords(b, clusterWord.getWords()); Integer relatedSenseId = clusterWord.getRelatedSenseId(); if (relatedSenseId != null) { b.append("#").append(relatedSenseId); } Double weight = clusterWord.getWeight(); if (weight != null) { b.append(":").append(weight); } if (i < cluster.getClusterWords().size() - 1) { b.append(", "); } } b.append("\n"); out.write(b.toString()); } } } private static void appendSingleWords(StringBuilder b, List<SingleWord> words) { for (int i = 0; i < words.size(); i++) { SingleWord word = words.get(i); b.append(word.getText()); String pos = word.getPos(); if (pos != null) { b.append("#").append(pos); } if (i < words.size() - 1) { b.append(" "); } } } public static int countLines(File file) { int count = 0; try (BufferedReader in = Utils.openReader(file)) { while (in.readLine() != null) { count++; } } catch (IOException e) { return -1; } return count; } public static LinkedHashSet<String> loadUniqueLines(File file) { LinkedHashSet<String> set = new LinkedHashSet<>(); if (file.exists()) { try (BufferedReader in = Utils.openReader(file)) { String line = null; while ((line = in.readLine()) != null) { set.add(line); } } catch (Exception e) { LOG.error("Error while reading {}", file, e); } LOG.info("Loaded {} lines from {}", set.size(), file.getAbsolutePath()); } else { LOG.info("{} does not exist, using empty set", file.getAbsolutePath()); } return set; } }
package fsb.explore; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.Stack; import net.sf.javabdd.BDD; //import org.jgrapht.IntegerNameProvider; import org.jgrapht.graph.DirectedWeightedMultigraph; import ags.constraints.BDDFormula; import ags.constraints.Formula; import ags.graph.EdgeDecorator; import ags.graph.EdgeEmptyNameProvider; import ags.graph.StateDecorator; import ags.graph.StateLabelProvider; import ags.graph.StyledDotExporter; import fsb.ast.Statement.StatType; import fsb.utils.Debug; import fsb.utils.Options; import gnu.trove.map.TMap; import gnu.trove.map.hash.THashMap; import gnu.trove.set.hash.THashSet; public class TransitionSystem { private static final String DOT_EXTENSION = ".dt"; static private FileWriter m_writeBuff = null; // Graph // TODO: Make private again protected DirectedWeightedMultigraph<Vertex, Transition> graph = new DirectedWeightedMultigraph<Vertex, Transition>( Transition.class); // private Map<State, String> stateLab = new HashMap<State, String>(); private Vertex initial; private Set<Vertex> cachedErrorStates; private TMap<State, Vertex> stateToVertex = new THashMap<State, Vertex>(); static FileWriter writeBuff = null; @SuppressWarnings("unchecked") protected void write(String fileName) { write(fileName, new THashSet<Vertex>(), Collections.EMPTY_SET, null); } private void write(String fileName, Set<Vertex> doomedStates, Set<Transition> cutEdges, Transition stepEdge) { FileWriter fw; fileName = fileName + DOT_EXTENSION; System.out.println("Writing: " + fileName); try { fw = new FileWriter(fileName); write(fw, doomedStates, cutEdges, stepEdge); fw.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private void write(Writer w, Set<Vertex> doomedStates, Set<Transition> cutEdges, Transition stepEdge) { // StyledDotExporter<Vertex, Transition> de = new StyledDotExporter<Vertex, Transition>( // new IntegerNameProvider<Vertex>(), new StateLabelProvider(), // new EdgeEmptyNameProvider<Transition>(), new StateDecorator( // initial), new EdgeDecorator(cutEdges, stepEdge)); // // TODO: FIX! // de.export(w, graph); } protected void addState(Vertex s) { if (Debug.DEBUG_LEVEL > 3) { System.out.println("TS Adding " + s); } graph.addVertex(s); stateToVertex.put(s.getState(), s); // stateLab.put(s, s.toString()); } protected void removeAllVerices(Collection<? extends Vertex> s) { if (Debug.DEBUG_LEVEL > 3) { System.out.println("TS Removing " + s); } graph.removeAllVertices(s); for(Vertex v : s){ stateToVertex.remove(v.getState()); } } protected void replacellVerices(Collection<? extends State> stateRemovedSet , Vertex replaced) { if (Debug.DEBUG_LEVEL > 3) { System.out.println("TS Replacing " + stateRemovedSet + " with " + replaced); } if(graph.containsVertex(replaced)){ throw new RuntimeException("replacing with a vertex already in graph"); } graph.addVertex(replaced); stateToVertex.put(replaced.getState(), replaced); for(State state : stateRemovedSet){ Vertex vertex = stateToVertex.get(state); for(Transition e : graph.incomingEdgesOf(vertex)){ Transition se = new Transition(e.getSrc(), replaced, e.getAction()); graph.addEdge(e.getSrc(), replaced, se); } for(Transition e : graph.outgoingEdgesOf(vertex)){ Transition se = new Transition(replaced, e.getDest(), e.getAction()); graph.addEdge(replaced, e.getDest(), se); } graph.removeVertex(vertex); stateToVertex.remove(vertex.getState()); } } protected void removeAllStates(Collection<? extends State> stateSet) { if (Debug.DEBUG_LEVEL > 3) { System.out.println("TS Removing " + stateSet); } for(State s : stateSet){ graph.removeVertex(stateToVertex.get(s)); stateToVertex.remove(s); } } protected void addTransition(Vertex src, Vertex dest, Action act) { Transition se = new Transition(src, dest, act); if (graph.containsEdge(src, dest)) return; // This is completely redundant, addEdge will throw an exception if (!graph.containsVertex(src)) { System.out.println("Does not contain src " + src); } if (!graph.containsVertex(dest)) { System.out.println("Does not contain dest " + dest); } graph.addEdge(src, dest, se); // graph.setEdgeWeight(se, computeWeight(src.getState(), // dest.getState(), inst)); } public Set<Vertex> getErrorStates() { if (cachedErrorStates == null) { cachedErrorStates = new HashSet<Vertex>(); for (Vertex s : graph.vertexSet()) { if (s.getState().isErr()) { cachedErrorStates.add(s); } } } return cachedErrorStates; } public void setInitial(Vertex initialState) { initial = initialState; } public Vertex getInitial() { return initial; } protected Vertex getVertexForState(State s) { if(null == stateToVertex.get(s)){ int i=0; i++; } return stateToVertex.get(s); } public int computeWeight(State current, State next, Action t) { return 0; } /* IBM! */ @SuppressWarnings("unchecked") Formula analyzeFixedPoint(Formula initialConstraint) { System.out.println("Total states : " + graph.vertexSet().size()); System.out.println("Error states : " + getErrorStates().size()); // Mark all unavoidable states with false formulas... /* if (!markUnavoidable()) return initialConstraint; */ Deque<Vertex> deq = new LinkedList<Vertex>(); // Purely an optimization... hopefully actually does some good. HashSet<Vertex> onDeq = new HashSet<Vertex>(); HashSet<Vertex> onStack = new HashSet<Vertex>(); Formula allErrPaths = initialConstraint; for (Vertex v : graph.vertexSet()) v.setVisited(false); // Put the whole transition system on the dequeue in post-order DFS // order. // Nothing interesting here, except possible bugs. Stack<Vertex> stack = new Stack<Vertex>(); stack.push(initial); onDeq.add(initial); onStack.add(initial); for (Transition t : graph.outgoingEdgesOf(initial)) { stack.push(t.getDest()); onStack.add(t.getDest()); } initial.setVisited(true); while (!stack.isEmpty()) { Vertex curr = stack.peek(); State currState = curr.getState(); if (currState.isErr()) { stack.pop(); deq.push(curr); onDeq.add(curr); onStack.remove(curr); curr.setVisited(true); } else if (currState.isFinal()) { stack.pop(); deq.push(curr); onDeq.add(curr); onStack.remove(curr); curr.setVisited(true); curr.setSafe(true); } else if (curr.isVisited()) { stack.pop(); deq.push(curr); onDeq.add(curr); onStack.remove(curr); } else { for (Transition t : graph.outgoingEdgesOf(curr)) { Vertex dest = t.getDest(); if (!onStack.contains(dest) && !dest.isVisited()) { stack.push(dest); onStack.add(dest); } } curr.setVisited(true); } } if (getErrorStates().isEmpty()) { System.out.println("No error states, nothing to infer!"); System.out.println("Avoid formula: " + allErrPaths); return allErrPaths; } allErrPaths = synthesisOfCodeCorrectionSuggestion(deq, onDeq, allErrPaths); return allErrPaths; } private Formula synthesisOfCodeCorrectionSuggestion(Deque<Vertex> deq, HashSet<Vertex> onDeq, Formula allErrPaths) { System.out.println("Start generation"); // And now, the actual formula generation int min = deq.size(); int maxChanged = 0; while (!deq.isEmpty()) { Vertex v = deq.pop(); onDeq.remove(v); // Avoiding a state is a conjunction of disjunctions: // For every parent you need to either the parent or the // parent->child egde, and you must avoid ALL (parent | edge) pairs. Formula conj = Formula.trueValue(); // So, for each incoming edge, add a clause to the conjunction for (Transition t : graph.incomingEdgesOf(v)) { Formula disj = Formula.falseValue(); // DisjunctionFormula disj = new DisjunctionFormula(); Formula parentFormula = t.getSrc().getFormula(); State srcState = t.getSrc().getState(); if (srcState.disjPredicates()) { // Avoiding an edge is actually also a disjunction - since // an edge may create (and usually does create) // several violations. Set<Formula> badPredicates = srcState.getPredicates(t .getAction()); // TODO: This is debug only, shouldn't happen with any sane // semantics. if (badPredicates == null) continue; for (Formula bad : badPredicates) disj = disj.or(bad); //System.out.println("Bad Predicates: " + disj); if (!badPredicates.isEmpty()) { // Formula parentPlusEdge = parentFormula.deepCopy(); Formula parentPlusEdge = parentFormula.or(disj); conj = conj.and(parentPlusEdge); } else { conj = conj.and(parentFormula); } } else { Formula edge = srcState.getAvoidFormula(t.getAction()); // System.out.println("Edge:" + edge); // System.out.println("Parent:" + parentFormula); // System.out.println(parentFormula + " OR " + edge); Formula parentPlusEdge = parentFormula.or(edge); //System.out.println("P+E = " + parentPlusEdge); //System.out.println("conj = " + conj); conj = conj.and(parentPlusEdge); //System.out.println("conj&PE = " + conj); } } // If the formula changed, this means we need to propagate the // change to the children, // so add them back to the stack (if they're not already there) conj = conj.and(v.getFormula()); //System.out.println("conj & v-formula:" + conj); if (!conj.equals(v.getFormula())) { v.setFormula(conj); if (v.formulaChanged > maxChanged) { maxChanged = v.formulaChanged; System.out.println("Max formula changes per vertex: " + maxChanged); //System.out.println(v.getFormula()); if (!Formula.useBDD()) { BDD bdd = BDDFormula.getBDD(v.getFormula()); System.out.println(bdd); System.out.println("from BDD: " + BDDFormula.getFormula(bdd)); } } for (Transition out : graph.outgoingEdgesOf(v)) { Vertex dest = out.getDest(); if (!onDeq.contains(dest)) { if (!dest.isSafe()) { deq.push(dest); onDeq.add(dest); } } } } if (deq.size() < min) { min = deq.size(); if (min % 1000 == 0) { System.out.println("Minimum size of dequeue: " + min); } } } for (Vertex v : getErrorStates()) allErrPaths = allErrPaths.and(v.getFormula()); System.out.println("Avoid formula: " + allErrPaths); List<Set> l = ((BDDFormula)allErrPaths).getSolutions(); for(Set s : l) { System.out.println("Solution: " + s); } return allErrPaths; } private boolean markUnavoidable() { System.out.println("Marking unavoidable states"); for (Vertex v : graph.vertexSet()) v.setVisited(false); // Put the whole transition system on the dequeue in post-order DFS // order. Stack<Vertex> stack = new Stack<Vertex>(); stack.push(initial); initial.setVisited(true); int count = 0; // DFS while (!stack.isEmpty()) { Vertex curr = stack.pop(); for (Transition t : graph.outgoingEdgesOf(curr)) { if (!t.getSrc().getState().isAvoidable(t.getAction()) && !t.getDest().isVisited()) { count++; stack.push(t.getDest()); t.getDest().setVisited(true); t.getDest().setFormula(Vertex.initStateFormula); if (t.getDest().getState().isErr()) { System.out.println("Found unavoidable error state!"); return false; } } } } System.out.println("Marked " + count + " unavoidable states"); return true; } /** * Quick and dirty recursive version. Write an iterative one if this one causes problems. * @param source * @param target */ public void printTrace(Vertex source, Vertex target) { for (Vertex v : graph.vertexSet()) v.setVisited(false); Deque<Transition> path = new LinkedList<Transition>(); printTraceHelper2(source, target, path); } /** * * searches the first trace from source to target in a DFS manner. * @param source * @param target * @param path * @return */ private boolean printTraceHelper(Vertex source, Vertex target, Deque<Transition> path) { String fileToWriteTraceTo = Options.fileNameForErrorTrace; FileWriter writeBuff = getWriteBuffOrNull(fileToWriteTraceTo); if (source.equals(target)) { printFoundErrorTrace(path, fileToWriteTraceTo, writeBuff); System.out.println(target.getState()); writeToFile(target.getState().toString(), fileToWriteTraceTo, writeBuff); return true; } else { source.setVisited(true); for (Transition t: graph.outgoingEdgesOf(source)) { Vertex nextSource = t.getDest(); if (!nextSource.isVisited()) { path.addLast(t); boolean found = printTraceHelper(nextSource, target, path); if (found) return true; path.removeLast(); } } return false; } } private boolean printTraceHelper2(Vertex source, Vertex target, Deque<Transition> path) { String fileToWriteTraceTo = Options.fileNameForErrorTrace; FileWriter writeBuff = getWriteBuffOrNull(fileToWriteTraceTo); Stack<Iterator<Transition>> stack = new Stack<Iterator<Transition>>(); Iterator<Transition> it = graph.outgoingEdgesOf(source).iterator(); stack.push(it); while(!stack.isEmpty()) { if (source.equals(target)) { printFoundErrorTrace(path, fileToWriteTraceTo, writeBuff); System.out.println(target.getState()); writeToFile(target.getState().toString(), fileToWriteTraceTo, writeBuff); return true; } //if reached here source is not equal to target updating trace source.setVisited(true); Iterator<Transition> top = stack.peek(); if(!top.hasNext()){ path.removeLast(); stack.pop(); continue; } Transition current_transition = top.next(); Vertex nextSource = current_transition.getDest(); if (!nextSource.isVisited()){ path.addLast(current_transition); source=nextSource; Iterator<Transition> it2 = graph.outgoingEdgesOf(source).iterator(); stack.push(it2); } } //reached here the stack was empty all possible paths exhausted, error trace not found. return false; } private void printFoundErrorTrace(Deque<Transition> path, String fileToWriteTraceTo, FileWriter writeBuff) { for (Transition t : path) { if(Options.PRINT_ONLY_COMMENTS_IN_ERROR_TRACE && ! (t.getAction().getStatement().getType() == StatType.C_COMMENT) ){ if(!Options.SUCCINT_ERROR_TRACE){ System.out.println("\n"); System.out.println(""); writeToFile("\n", fileToWriteTraceTo, writeBuff); writeToFile("", fileToWriteTraceTo, writeBuff); } }else{ if(!Options.SUCCINT_ERROR_TRACE){ System.out.println(t.getAction().getSource()); writeToFile(t.getAction().getSource().toString(), fileToWriteTraceTo, writeBuff); } if(!Options.SUCCINT_ERROR_TRACE || t.getAction().toString().contains("Statement")){ System.out.println(t.getAction()); writeToFile(t.getAction().toString(), fileToWriteTraceTo, writeBuff); } } } } private void writeToFile(String s, String fileToWriteTraceTo, FileWriter writeBuff) { if(null != fileToWriteTraceTo){ try { writeBuff.write(s); writeBuff.write("\n"); writeBuff.flush(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } private FileWriter getWriteBuffOrNull(String fileToWriteTraceTo) { if(null == m_writeBuff){ if(fileToWriteTraceTo != null){ try { m_writeBuff = new FileWriter(fileToWriteTraceTo); } catch (IOException e) { m_writeBuff = null; e.printStackTrace(); } } } return m_writeBuff; } /* * first step towards symbolic execution I want to run the error trace in the concrete program, * my approach is use the comments: * 1. get the predicates from a file * 2. collect all the assert statements * 3. parse from the comment assignment and load statements. */ public void printPathEquasion(Vertex source, Vertex target) { for (Vertex v : graph.vertexSet()) v.setVisited(false); Deque<Transition> path = new LinkedList<Transition>(); printTraceHelper(source, target, path); } }
package com.hazelcast.simulator.coordinator; import com.hazelcast.simulator.cluster.ClusterLayout; import com.hazelcast.simulator.cluster.WorkerConfigurationConverter; import com.hazelcast.simulator.common.JavaProfiler; import com.hazelcast.simulator.common.SimulatorProperties; import com.hazelcast.simulator.protocol.registry.ComponentRegistry; import com.hazelcast.simulator.test.TestException; import com.hazelcast.simulator.utils.Bash; import com.hazelcast.simulator.utils.CommandLineExitException; import com.hazelcast.simulator.utils.jars.HazelcastJARs; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.File; import java.util.HashSet; import static com.hazelcast.simulator.common.JavaProfiler.NONE; import static com.hazelcast.simulator.common.JavaProfiler.YOURKIT; import static com.hazelcast.simulator.utils.FileUtils.deleteQuiet; import static com.hazelcast.simulator.utils.FileUtils.ensureExistingDirectory; import static com.hazelcast.simulator.utils.FormatUtils.NEW_LINE; import static com.hazelcast.simulator.utils.jars.HazelcastJARs.OUT_OF_THE_BOX; import static java.util.Collections.singleton; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.contains; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class CoordinatorUploaderTest { private ComponentRegistry componentRegistry = new ComponentRegistry(); private ClusterLayout clusterLayout; private Bash bash = mock(Bash.class); private HazelcastJARs hazelcastJARs = mock(HazelcastJARs.class); private WorkerParameters workerParameters = mock(WorkerParameters.class); private String testSuiteId = "testSuiteId"; private File notExists = new File("/notExists"); private File uploadDirectory = new File("upload"); private File workerClassPathFile = new File("workerClassPath"); private String workerClassPath = workerClassPathFile.getAbsolutePath(); private CoordinatorUploader coordinatorUploader; @Before public void setUp() { componentRegistry.addAgent("192.168.0.1", "192.168.0.1"); componentRegistry.addAgent("192.168.0.2", "192.168.0.2"); when(workerParameters.getHazelcastVersionSpec()).thenReturn(OUT_OF_THE_BOX); when(workerParameters.getProfiler()).thenReturn(JavaProfiler.NONE); ClusterLayoutParameters clusterLayoutParameters = new ClusterLayoutParameters(null, null, 2, 0, 0, 2); clusterLayout = new ClusterLayout(componentRegistry, workerParameters, clusterLayoutParameters); ensureExistingDirectory(uploadDirectory); ensureExistingDirectory(workerClassPathFile); coordinatorUploader = new CoordinatorUploader(bash, componentRegistry, clusterLayout, hazelcastJARs, true, false, workerClassPath, YOURKIT, testSuiteId); } @After public void tearDown() { deleteQuiet(uploadDirectory); deleteQuiet(workerClassPathFile); } @Test public void testRun() { coordinatorUploader.run(); } @Test public void testUploadHazelcastJARs() { coordinatorUploader.uploadHazelcastJARs(); verify(hazelcastJARs, times(1)).prepare(false); verify(hazelcastJARs, times(2)).upload(contains("192.168.0."), anyString(), eq(singleton(OUT_OF_THE_BOX))); verifyNoMoreInteractions(hazelcastJARs); verifyNoMoreInteractions(bash); } @Test public void testUploadHazelcastJARs_withClusterXml() { String xml = "<clusterConfiguration>" + NEW_LINE + "\t<workerConfiguration name=\"hz351\" type=\"MEMBER\" hzVersion=\"maven=3.5.1\"/>" + NEW_LINE + "\t<workerConfiguration name=\"hz352\" type=\"MEMBER\" hzVersion=\"maven=3.5.2\"/>" + NEW_LINE + "\t<nodeConfiguration>" + NEW_LINE + "\t\t<workerGroup configuration=\"hz351\" count=\"1\"/>" + NEW_LINE + "\t\t<workerGroup configuration=\"hz352\" count=\"1\"/>" + NEW_LINE + "\t</nodeConfiguration>" + NEW_LINE + "\t<nodeConfiguration>" + NEW_LINE + "\t\t<workerGroup configuration=\"hz352\" count=\"1\"/>" + NEW_LINE + "\t</nodeConfiguration>" + NEW_LINE + "</clusterConfiguration>"; SimulatorProperties simulatorProperties = mock(SimulatorProperties.class); WorkerConfigurationConverter converter = new WorkerConfigurationConverter(5701, null, workerParameters, simulatorProperties, componentRegistry); ClusterLayoutParameters clusterLayoutParameters = new ClusterLayoutParameters(xml, converter, 0, 0, 0, 2); clusterLayout = new ClusterLayout(componentRegistry, workerParameters, clusterLayoutParameters); coordinatorUploader = new CoordinatorUploader(bash, componentRegistry, clusterLayout, hazelcastJARs, true, false, workerClassPath, YOURKIT, testSuiteId); coordinatorUploader.uploadHazelcastJARs(); HashSet<String> versionSpecs = new HashSet<String>(2); versionSpecs.add("maven=3.5.1"); versionSpecs.add("maven=3.5.2"); verify(hazelcastJARs, times(1)).prepare(false); verify(hazelcastJARs, times(1)).upload(contains("192.168.0.1"), anyString(), eq(versionSpecs)); verify(hazelcastJARs, times(1)).upload(contains("192.168.0.2"), anyString(), eq(singleton("maven=3.5.2"))); verifyNoMoreInteractions(hazelcastJARs); verifyNoMoreInteractions(bash); } @Test public void testUploadHazelcastJARs_isNull() { coordinatorUploader = new CoordinatorUploader(bash, componentRegistry, clusterLayout, null, true, false, workerClassPath, YOURKIT, testSuiteId); coordinatorUploader.uploadHazelcastJARs(); verifyNoMoreInteractions(bash); } @Test public void testUploadUploadDirectory() { coordinatorUploader.uploadUploadDirectory(); verify(bash, times(2)).ssh(contains("192.168.0."), anyString()); verify(bash, times(2)).uploadToRemoteSimulatorDir(contains("192.168.0."), anyString(), anyString()); verifyNoMoreInteractions(bash); } @Test public void testUploadUploadDirectory_uploadDirectoryNotExists() { deleteQuiet(uploadDirectory); coordinatorUploader.uploadUploadDirectory(); verifyNoMoreInteractions(bash); } @Test(expected = CommandLineExitException.class) public void testUploadUploadDirectory_withException() { TestException exception = new TestException("expected"); doThrow(exception).when(bash).uploadToRemoteSimulatorDir(contains("192.168.0."), anyString(), anyString()); coordinatorUploader.uploadUploadDirectory(); } @Test public void testUploadWorkerClassPath() { coordinatorUploader.uploadWorkerClassPath(); verify(bash, times(2)).ssh(contains("192.168.0."), anyString()); verify(bash, times(2)).uploadToRemoteSimulatorDir(contains("192.168.0."), anyString(), anyString()); verifyNoMoreInteractions(bash); } @Test public void testUploadWorkerClassPath_workerClassPathIsNull() { coordinatorUploader = new CoordinatorUploader(bash, componentRegistry, clusterLayout, hazelcastJARs, true, false, null, YOURKIT, testSuiteId); coordinatorUploader.uploadWorkerClassPath(); verifyNoMoreInteractions(bash); } @Test(expected = CommandLineExitException.class) public void testUploadWorkerClassPath_workerClassPathNotExists() { coordinatorUploader = new CoordinatorUploader(bash, componentRegistry, clusterLayout, hazelcastJARs, true, false, notExists.getAbsolutePath(), YOURKIT, testSuiteId); coordinatorUploader.uploadWorkerClassPath(); } @Test public void testUploadYourKit() { coordinatorUploader.uploadYourKit(); verify(bash, times(2)).ssh(contains("192.168.0."), anyString()); verify(bash, times(2)).uploadToRemoteSimulatorDir(contains("192.168.0."), anyString(), anyString()); verifyNoMoreInteractions(bash); } @Test public void testUploadYourKit_noYourKitProfiler() { coordinatorUploader = new CoordinatorUploader(bash, componentRegistry, clusterLayout, hazelcastJARs, true, false, workerClassPath, NONE, testSuiteId); coordinatorUploader.uploadYourKit(); verifyNoMoreInteractions(bash); } }
package org.scm4j.releaser; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.scm4j.commons.Version; import org.scm4j.releaser.actions.IAction; import org.scm4j.releaser.branch.DevelopBranch; import org.scm4j.releaser.branch.ReleaseBranchCurrent; import org.scm4j.releaser.branch.ReleaseBranchFactory; import org.scm4j.releaser.branch.ReleaseBranchPatch; import org.scm4j.releaser.cli.CLICommand; import org.scm4j.releaser.cli.Option; import org.scm4j.releaser.conf.*; import org.scm4j.releaser.exceptions.EDelayingDelayed; import org.scm4j.releaser.exceptions.ENoDelayedTags; import org.scm4j.vcs.api.IVCS; import org.scm4j.vcs.api.VCSCommit; import org.scm4j.vcs.api.VCSTag; import org.scm4j.vcs.api.WalkDirection; import java.util.List; import java.util.Map; import static org.junit.Assert.*; public class WorkflowDelayedTagTest extends WorkflowTestBase { private final DelayedTagsFile dtf = new DelayedTagsFile(); @Before @After public void setUpTearDown() { dtf.delete(); } @Test public void testDelayedTagOnPatch() throws Exception { forkAndBuild(compUnTill); // add feature to unTill Component compUnTillVersioned = compUnTill.clone(env.getUnTillVer().toReleaseZeroPatch()); ReleaseBranchPatch rb = ReleaseBranchFactory.getReleaseBranchPatch(compUnTillVersioned.getVersion(), repoUnTillDb); env.generateFeatureCommit(env.getUnTillVCS(), rb.getName(), "patch feature merged"); // build all patches, delayed tag IAction action = execAndGetActionBuildDelayedTag(compUnTillVersioned); assertActionDoesBuildDelayedTag(action, compUnTillVersioned); // check root component patch tag is delayed Assert.assertEquals(1, env.getUblVCS().getTags().size()); Assert.assertEquals(1, env.getUnTillDbVCS().getTags().size()); Assert.assertEquals(1, env.getUnTillVCS().getTags().size()); // check component with delayed tag is considered as tagged (DONE) on build action = execAndGetActionBuild(compUnTillVersioned); assertActionDoesNothing(action); // check no exceptions on status command with --delayed-tag option execAndGetNode(null, CLICommand.STATUS.getCmdLineStr(), compUnTillVersioned.getCoords().toString(), Option.DELAYED_TAG.getCmdLineStr()); // check Delayed Tags file DelayedTag delayedTag = dtf.getDelayedTagByUrl(repoUnTill.getUrl()); assertEquals(env.getUnTillVer().toReleaseZeroPatch().toNextPatch(), delayedTag.getVersion()); ReleaseBranchPatch patchRB = ReleaseBranchFactory.getReleaseBranchPatch(compUnTillVersioned.getVersion(), repoUnTill); VCSCommit commitToTag = env.getUnTillVCS().getHeadCommit(patchRB.getName()); assertEquals(delayedTag.getRevision(), commitToTag.getRevision()); // create tag which was delayed action = execAndGetActionTag(compUnTill, null); assertActionDoesTag(action, compUnTill); // check tags assertTrue(isPreHeadCommitTaggedWithVersion(repoUBL, env.getUblVer())); assertTrue(isPreHeadCommitTaggedWithVersion(repoUnTillDb, env.getUnTillDbVer())); assertTrue(isPreHeadCommitTaggedWithVersion(repoUnTill, env.getUnTillVer())); // check Dealyed Tags file assertTrue(dtf.getContent().isEmpty()); } @Test public void testDelayedTagOnMinor() throws Exception { fork(compUnTill); IAction action = execAndGetActionBuildDelayedTag(compUnTill); assertActionDoesBuildAllDelayedTag(action); // check root component tag is delayed assertTrue(env.getUnTillVCS().getTags().isEmpty()); assertTrue(env.getUnTillDbVCS().getTags().size() == 1); assertTrue(env.getUblVCS().getTags().size() == 1); // check component with delayed tag is considered as tagged (DONE) on build action = execAndGetActionBuild(compUnTill); assertActionDoesNothing(action); // expect no exceptions on status command with --delayed-tag option execAndGetNode(null, CLICommand.STATUS.getCmdLineStr(), compUnTill.getCoords().toString(), Option.DELAYED_TAG.getCmdLineStr()); // check Delayed Tags file assertNull(dtf.getDelayedTagByUrl(repoUnTillDb.getUrl())); assertNotNull(dtf.getDelayedTagByUrl(repoUnTill.getUrl())); assertNull(dtf.getDelayedTagByUrl(repoUBL.getUrl())); // check delayed tag DelayedTag delayedTag = dtf.getDelayedTagByUrl(repoUnTill.getUrl()); assertEquals(env.getUnTillVer().toReleaseZeroPatch(), delayedTag.getVersion()); ReleaseBranchCurrent crb = ReleaseBranchFactory.getCRB(repoUnTill); VCSCommit commitToTag = env.getUnTillVCS().getHeadCommit(crb.getName()); assertEquals(delayedTag.getRevision(), commitToTag.getRevision()); // create tag which was delayed action = execAndGetActionTag(compUnTill, null); assertActionDoesTag(action, compUnTill); // check tags assertTrue(isPreHeadCommitTaggedWithVersion(repoUBL, env.getUblVer())); assertTrue(isPreHeadCommitTaggedWithVersion(repoUnTillDb, env.getUnTillDbVer())); assertTrue(isPreHeadCommitTaggedWithVersion(repoUnTill, env.getUnTillVer())); // check Dealyed Tags file assertTrue(dtf.getContent().isEmpty()); } @Test public void testTagFileUnexpectedlyDeleted() throws Exception { // build all, root tag delayed fork(compUnTill); execAndGetActionBuildDelayedTag(compUnTill); // simulate delayed tags file is deleted right before action execution. try { execAndGetActionTag(compUnTill, () -> assertTrue(dtf.delete())); fail(); } catch (ENoDelayedTags e) { } // check no tags assertTrue(env.getUnTillVCS().getTags().isEmpty()); assertTrue(env.getUnTillDbVCS().getTags().size() == 1); assertTrue(env.getUblVCS().getTags().size() == 1); } @Test public void testTagExistsOnExecute() { // build all fork(compUnTill); execAndGetActionBuildDelayedTag(compUnTill); // all is going to tag IAction action = execAndGetActionTag(compUnTill, () -> { // simulate tag exists already // tagging should be skipped with no exceptions Map<String, DelayedTag> content = dtf.getContent(); for (Map.Entry<String, DelayedTag> entry : content.entrySet()) { if (repoUnTill.getUrl().equals(entry.getKey())) { DelayedTag dt = entry.getValue(); TagDesc tagDesc = Utils.getTagDesc(dt.getVersion().toString()); String branchName = Utils.getReleaseBranchName(repoUnTill, dt.getVersion()); env.getUnTillVCS().createTag(branchName, tagDesc.getName(), tagDesc.getMessage(), dt.getRevision()); } } try { Thread.sleep(1000); // TODO: test fails without sleep } catch (InterruptedException e) { throw new RuntimeException(e); } }); assertActionDoesTag(action, compUnTill); // check tags assertTrue(isPreHeadCommitTaggedWithVersion(repoUBL, env.getUblVer())); assertTrue(isPreHeadCommitTaggedWithVersion(repoUnTillDb, env.getUnTillDbVer())); assertTrue(isPreHeadCommitTaggedWithVersion(repoUnTill, env.getUnTillVer())); // check Dealyed Tags file assertTrue(dtf.getContent().isEmpty()); } @Test public void testExceptionIfNoDelayedTags() { try { execAndGetActionTag(compUnTill, null); fail(); } catch (ENoDelayedTags e) { } // check no tags assertTrue(env.getUnTillVCS().getTags().isEmpty()); assertTrue(env.getUnTillDbVCS().getTags().isEmpty()); assertTrue(env.getUblVCS().getTags().isEmpty()); } @Test public void testTagExistsOnGetActionTree() throws Exception { fork(compUnTillDb); IAction action = execAndGetActionBuildDelayedTag(compUnTillDb); assertActionDoesBuildDelayedTag(action, compUnTillDb); DelayedTag delayedTag = dtf.getDelayedTagByUrl(repoUnTillDb.getUrl()); String branchName = Utils.getReleaseBranchName(repoUnTillDb, delayedTag.getVersion()); env.getUnTillDbVCS().createTag(branchName, "other-tag", "other tag message", delayedTag.getRevision()); // simulate tag exists TagDesc tagDesc = Utils.getTagDesc(delayedTag.getVersion().toString()); env.getUnTillDbVCS().createTag(branchName, tagDesc.getName(), tagDesc.getMessage(), delayedTag.getRevision()); Thread.sleep(1000); // TODO: test fails without sleep // check version tag is detected -> tagging skipped action = execAndGetActionTag(compUnTillDb, null); assertActionDoesTag(action, compUnTillDb); // check no new tags assertTrue(env.getUnTillDbVCS().getTags().size() == 2); } @Test public void testMDepTagDelayed() { fork(compUnTillDb); IAction action = execAndGetActionBuildDelayedTag(compUnTillDb); assertActionDoesBuildDelayedTag(action, compUnTillDb); // fork unTill. All should be forked except of unTillDb action = execAndGetActionFork(compUnTill); checkUnTillForked(1); assertActionDoesFork(action, compUnTill, compUBL); assertActionDoesNothing(action, compUnTillDb); // build unTill. All should be built except of unTillDb action = execAndGetActionBuild(compUnTill); checkUnTillBuilt(1); assertActionDoesBuild(action, compUnTill, BuildStatus.BUILD_MDEPS); assertActionDoesBuild(action, compUBL, BuildStatus.BUILD); assertActionDoesNothing(action, compUnTillDb); // check nothing happens on next fork action = execAndGetActionFork(compUnTill); assertActionDoesNothing(action, compUnTill, compUnTillDb, compUBL); // set tag on unTillDb assertTrue(env.getUnTillDbVCS().getTags().isEmpty()); action = execAndGetActionTag(compUnTillDb, null); assertActionDoesTag(action, compUnTillDb); assertFalse(env.getUnTillDbVCS().getTags().isEmpty()); // check nothing happens on next fork action = execAndGetActionFork(compUnTill); assertActionDoesNothing(action, compUnTill, compUnTillDb, compUBL); } @Test public void testDelayingDelayed() { forkAndBuild(compUnTillDb); ReleaseBranchCurrent crb = ReleaseBranchFactory.getCRB(repoUnTillDb); env.generateFeatureCommit(env.getUnTillDbVCS(), crb.getName(), "feature merged"); Component compUnTillDbPatch = compUnTillDb.clone(env.getUnTillDbVer().toReleaseZeroPatch()); IAction action = execAndGetActionBuildDelayedTag(compUnTillDbPatch); assertActionDoesBuildDelayedTag(action, compUnTillDbPatch); env.generateFeatureCommit(env.getUnTillDbVCS(), crb.getName(), "feature merged"); // try to build next untillDb patch with delayed tag try { execAndGetActionBuildDelayedTag(compUnTillDbPatch); fail(); } catch (EDelayingDelayed e) { assertEquals(repoUnTillDb.getUrl(), e.getUrl()); } } @Test public void testDelayedTagVersionUsageIfTrunkBumped() { fork(compUnTillDb); IAction action = execAndGetActionBuildDelayedTag(compUnTillDb); assertActionDoesBuildDelayedTag(action, compUnTillDb); new DevelopBranch(compUnTillDb, repoUnTillDb).getVersion(); // simulate version is reaised already in trunk (e.g. built manually) IVCS vcs = repoUnTillDb.getVCS(); vcs.setFileContent(repoUnTillDb.getDevelopBranch(), Constants.VER_FILE_NAME, new Version(vcs.getFileContent(repoUnTillDb.getDevelopBranch(), Constants.VER_FILE_NAME, null)).toNextMinor().toString(), "minor bumped"); // tag delayed action = execAndGetActionTag(compUnTillDb, null); assertActionDoesTag(action, compUnTillDb); // check right version is used in right release branch ReleaseBranchPatch patchBranch = ReleaseBranchFactory.getReleaseBranchPatch(env.getUnTillDbVer(), repoUnTillDb); assertEquals(env.getUnTillDbVer().toReleaseZeroPatch().toNextPatch(), patchBranch.getVersion()); // check tags assertTrue(isPreHeadCommitTaggedWithVersion(repoUnTillDb, env.getUnTillDbVer())); // check Dealyed Tags file assertTrue(dtf.getContent().isEmpty()); } @Test public void testDelayedTagVersionUsageIfPatchBumped() { fork(compUnTillDb); IAction action = execAndGetActionBuildDelayedTag(compUnTillDb); assertActionDoesBuildDelayedTag(action, compUnTillDb); new DevelopBranch(compUnTillDb, repoUnTillDb).getVersion(); // simulate version is reaised already in release branch (e.g. built manually) IVCS vcs = repoUnTillDb.getVCS(); ReleaseBranchPatch patchBranch = ReleaseBranchFactory.getReleaseBranchPatch(env.getUnTillDbVer(), repoUnTillDb); VCSCommit patchBranchHeadCommit = vcs.setFileContent(patchBranch.getName(), Constants.VER_FILE_NAME, new Version(vcs.getFileContent(patchBranch.getName(), Constants.VER_FILE_NAME, null)).toNextPatch().toNextPatch().toString(), "patch bumped"); // tag delayed action = execAndGetActionTag(compUnTillDb, null); assertActionDoesTag(action, compUnTillDb); assertEquals(env.getUnTillDbVer().toReleaseZeroPatch().toString(), vcs.getTags().get(0).getTagName()); // check version is not bumped because it is bumped already assertEquals(patchBranchHeadCommit, env.getUnTillDbVCS().getHeadCommit(patchBranch.getName())); } @Test public void testDealyedTagVersionUsageOnDifferentCRB() { fork(compUnTillDb); IAction action = execAndGetActionBuildDelayedTag(compUnTillDb); assertActionDoesBuildDelayedTag(action, compUnTillDb); // make next build env.generateFeatureCommit(env.getUnTillDbVCS(), repoUnTillDb.getDevelopBranch(), "feature added to dev branch"); fork(compUnTillDb, 2); execAndGetActionBuild(compUnTillDb); // generate next feature to make CRB differ env.generateFeatureCommit(env.getUnTillDbVCS(), repoUnTillDb.getDevelopBranch(), "feature added to dev branch"); // make a tag for version which is not current CRB action = execAndGetActionTag(compUnTillDb, null); assertActionDoesTag(action, compUnTillDb); // ensure the version for delayed tag is used ReleaseBranchPatch patch = ReleaseBranchFactory.getReleaseBranchPatch(env.getUnTillDbVer(), repoUnTillDb); assertEquals(env.getUnTillDbVer().toReleaseZeroPatch().toNextPatch(), patch.getVersion()); // check tags assertTrue(isPreHeadCommitTaggedWithVersion(repoUnTillDb, env.getUnTillDbVer())); // check Delayed Tags file assertTrue(dtf.getContent().isEmpty()); } private boolean isPreHeadCommitTaggedWithVersion(VCSRepository repo, Version forVersion) { ReleaseBranchPatch rb = ReleaseBranchFactory.getReleaseBranchPatch(forVersion, repo); List<VCSTag> tags = repo.getVCS().getTagsOnRevision(repo.getVCS().getCommitsRange(rb.getName(), null, WalkDirection.DESC, 2).get(1).getRevision()); for (VCSTag tag : tags) { if (tag.getTagName().equals(rb.getVersion().toPreviousPatch().toReleaseString())) { return true; } } return false; } }
package com.github.irshulx.Components; import android.app.Activity; import android.graphics.Color; import android.text.TextUtils; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; import com.github.irshulx.EditorCore; import com.github.irshulx.EditorComponent; import com.github.irshulx.R; import com.github.irshulx.Utilities.Utilities; import com.github.irshulx.models.EditorContent; import com.github.irshulx.models.EditorControl; import com.github.irshulx.models.EditorType; import com.github.irshulx.models.Node; import com.github.irshulx.models.RenderType; import org.jsoup.nodes.Attribute; import org.jsoup.nodes.Element; import java.util.HashMap; import java.util.List; import java.util.Map; public class MacroExtensions extends EditorComponent { private final EditorCore editorCore; public MacroExtensions(EditorCore editorCore) { super(editorCore); this.editorCore = editorCore; } public void insertMacro(String name, View view, Map<String, Object> settings, int index) { final FrameLayout frameLayout = new FrameLayout(editorCore.getContext()); frameLayout.addView(view); final FrameLayout overlay = new FrameLayout(frameLayout.getContext()); overlay.setVisibility(View.GONE); overlay.setPadding(0, 0, 20, 0); overlay.setBackgroundColor(Color.argb(50, 0, 0, 0)); ImageView imageView = new ImageView(overlay.getContext()); FrameLayout.LayoutParams params = new FrameLayout.LayoutParams(Utilities.dpToPx(frameLayout.getContext(), 40), Utilities.dpToPx(frameLayout.getContext(), 40)); params.gravity = Gravity.RIGHT | Gravity.CENTER_VERTICAL; imageView.setLayoutParams(params); imageView.setImageResource(R.drawable.ic_close_white_36dp); overlay.addView(imageView); frameLayout.addView(overlay); imageView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { editorCore.getParentView().removeView(frameLayout); } }); EditorControl control = editorCore.createTag(EditorType.macro); control.macroSettings = settings; control.macroName = name; if (index == -1) { index = editorCore.determineIndex(EditorType.macro); } frameLayout.setTag(control); // frameLayout.setOnClickListener(new View.OnClickListener() { // @Override // public void onClick(View view) { // if(overlay.getVisibility()==View.VISIBLE){ // overlay.setVisibility(View.GONE); // }else{ // overlay.setVisibility(View.VISIBLE); // } // } // }); editorCore.getParentView().addView(frameLayout, index); if(editorCore.getRenderType() == RenderType.Renderer) return; view.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_UP) { int paddingTop = view.getPaddingTop(); int paddingBottom = view.getPaddingBottom(); int height = view.getHeight(); if (event.getY() < paddingTop) { editorCore.___onViewTouched(0, editorCore.getParentView().indexOfChild(frameLayout)); } else if (event.getY() > height - paddingBottom) { editorCore.___onViewTouched(1, editorCore.getParentView().indexOfChild(frameLayout)); } else { if (overlay.getVisibility() == View.VISIBLE) { overlay.setVisibility(View.GONE); } else { overlay.setVisibility(View.VISIBLE); } } return false; } return true;//hmmmm.... } }); frameLayout.setOnFocusChangeListener(new View.OnFocusChangeListener() { @Override public void onFocusChange(View view, boolean b) { if(!b){ overlay.setVisibility(View.GONE); } } }); } @Override public Node getContent(View view) { Node node = this.getNodeInstance(view); EditorControl macroTag = (EditorControl) view.getTag(); node.content.add(macroTag.macroName); node.macroSettings = macroTag.macroSettings; return node; } @Override public String getContentAsHTML(Node node, EditorContent content) { return getAsHtml(node.content.get(0), node.macroSettings); } private String getAsHtml(String name, Map<String, Object> macroSettings){ String template = "<{{$tag}} data-tag=\"macro\" {{$settings}}></{{$tag}}>"; template = template.replace("{{$tag}}", name); StringBuilder dataTags = new StringBuilder(); for (Map.Entry<String, Object> item : macroSettings.entrySet()) { if(item.getKey().equalsIgnoreCase("data-tag")) continue; dataTags.append(" "); if(item.getKey().contains("data-")){ dataTags.append(item.getKey()); }else { dataTags.append("data-" + item.getKey()); } dataTags.append("=\"").append(String.valueOf(item.getValue())).append("\""); } if (TextUtils.isEmpty(dataTags)) { template = template.replace("{{$settings}}", ""); } else { template = template.replace("{{$settings}}", dataTags.toString()); } return template; } @Override public void renderEditorFromState(Node node, EditorContent content) { int index = editorCore.getChildCount(); View view = editorCore.getEditorListener().onRenderMacro(node.content.get(0), node.macroSettings, editorCore.getChildCount()); if(view == null) view = getEmptyMacro(node.content.get(0),node.macroSettings); insertMacro(node.content.get(0), view, node.macroSettings, index); } private View getEmptyMacro(String name, Map<String, Object> macroSettings){ final View layout = ((Activity) editorCore.getContext()).getLayoutInflater().inflate(R.layout.default_macro, null); TextView message = layout.findViewById(R.id.txtMessage); message.setText("Unhandled macro "+ "\""+getAsHtml(name,macroSettings)+"\""); return layout; } @Override public Node buildNodeFromHTML(Element element) { String tag = element.tagName().toLowerCase(); Node node = getNodeInstance(EditorType.macro); node.content.add(tag); List<Attribute> attrs = element.attributes().asList(); if (!attrs.isEmpty()) { node.macroSettings = new HashMap<>(); for (Attribute attr : attrs) { node.macroSettings.put(attr.getKey(), attr.getValue()); } } int index = editorCore.getChildCount(); View view = editorCore.getEditorListener().onRenderMacro(tag, node.macroSettings, editorCore.getChildCount()); if(view == null) view = getEmptyMacro(node.content.get(0), node.macroSettings); insertMacro(tag, view, node.macroSettings, index); return null; } @Override public void init(ComponentsWrapper componentsWrapper) { this.componentsWrapper = componentsWrapper; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.contrib.cassandra; import java.lang.reflect.Field; import java.math.BigDecimal; import java.util.*; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.datastax.driver.core.*; import com.datastax.driver.core.exceptions.DriverException; import com.datatorrent.api.AutoMetric; import com.datatorrent.api.Context; import com.datatorrent.api.DefaultInputPort; import com.datatorrent.api.DefaultOutputPort; import com.datatorrent.api.annotation.InputPortFieldAnnotation; import com.datatorrent.api.annotation.OutputPortFieldAnnotation; import com.datatorrent.lib.util.FieldInfo; import com.datatorrent.lib.util.PojoUtils; import com.datatorrent.lib.util.PojoUtils.*; /** * <p> * CassandraOutputOperator class.</p> * A Generic implementation of AbstractCassandraTransactionableOutputOperatorPS which takes in any POJO. * * @displayName Cassandra Output Operator * @category Output * @tags database, nosql, pojo, cassandra * @since 2.1.0 */ @Evolving public class CassandraPOJOOutputOperator extends AbstractCassandraTransactionableOutputOperator<Object> { private List<FieldInfo> fieldInfos; private String tablename; private String query; protected final transient ArrayList<DataType> columnDataTypes; protected final transient ArrayList<Object> getters; protected transient Class<?> pojoClass; @AutoMetric private long successfulRecords; @AutoMetric private long errorRecords; /** * The input port on which tuples are received for writing. */ @InputPortFieldAnnotation(optional = true) public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>() { @Override public void setup(Context.PortContext context) { pojoClass = context.getValue(Context.PortContext.TUPLE_CLASS); } @Override public void process(Object tuple) { CassandraPOJOOutputOperator.super.input.process(tuple); } }; @OutputPortFieldAnnotation(error = true) public final transient DefaultOutputPort<Object> error = new DefaultOutputPort<>(); public CassandraPOJOOutputOperator() { super(); columnDataTypes = new ArrayList<DataType>(); getters = new ArrayList<Object>(); } @Override public void beginWindow(long windowId) { super.beginWindow(windowId); successfulRecords = 0; errorRecords = 0; } @Override public void activate(Context.OperatorContext context) { com.datastax.driver.core.ResultSet rs = store.getSession().execute("select * from " + store.keyspace + "." + tablename); final ColumnDefinitions rsMetaData = rs.getColumnDefinitions(); if(fieldInfos == null) { populateFieldInfosFromPojo(rsMetaData); } for (FieldInfo fieldInfo : getFieldInfos()) { // get the designated column's data type. final DataType type = rsMetaData.getType(fieldInfo.getColumnName()); columnDataTypes.add(type); final Object getter; final String getterExpr = fieldInfo.getPojoFieldExpression(); switch (type.getName()) { case ASCII: case TEXT: case VARCHAR: getter = PojoUtils.createGetter(pojoClass, getterExpr, String.class); break; case BOOLEAN: getter = PojoUtils.createGetterBoolean(pojoClass, getterExpr); break; case INT: getter = PojoUtils.createGetterInt(pojoClass, getterExpr); break; case BIGINT: case COUNTER: getter = PojoUtils.createGetterLong(pojoClass, getterExpr); break; case FLOAT: getter = PojoUtils.createGetterFloat(pojoClass, getterExpr); break; case DOUBLE: getter = PojoUtils.createGetterDouble(pojoClass, getterExpr); break; case DECIMAL: getter = PojoUtils.createGetter(pojoClass, getterExpr, BigDecimal.class); break; case SET: getter = PojoUtils.createGetter(pojoClass, getterExpr, Set.class); break; case MAP: getter = PojoUtils.createGetter(pojoClass, getterExpr, Map.class); break; case LIST: getter = PojoUtils.createGetter(pojoClass, getterExpr, List.class); break; case TIMESTAMP: getter = PojoUtils.createGetter(pojoClass, getterExpr, Date.class); break; case UUID: getter = PojoUtils.createGetter(pojoClass, getterExpr, UUID.class); break; default: getter = PojoUtils.createGetter(pojoClass, getterExpr, Object.class); break; } getters.add(getter); } super.activate(context); } private void populateFieldInfosFromPojo(ColumnDefinitions rsMetaData) { fieldInfos = Lists.newArrayList(); Field[] fields = pojoClass.getDeclaredFields(); for (int i = 0; i < rsMetaData.size(); i++) { String columnName = rsMetaData.getName(i); String pojoField = getMatchingField(fields, columnName); if (pojoField != null && pojoField.length() != 0) { fieldInfos.add(new FieldInfo(columnName, pojoField, null)); } else { LOG.warn("Couldn't find corrosponding pojo field for column: " + columnName); } } } private String getMatchingField(Field[] fields, String columnName) { for (Field f : fields) { if (f.getName().equalsIgnoreCase(columnName)) { return f.getName(); } } return null; } /** * {@inheritDoc} <br/> * If statement/query is not specified by user, insert query is constructed from fileInfo object and table name. */ @Override protected PreparedStatement getUpdateCommand() { PreparedStatement statement; if (query == null) { statement = prepareStatementFromFieldsAndTableName(); } else { statement = store.getSession().prepare(query); } LOG.debug("Statement is: " + statement.getQueryString()); return statement; } private PreparedStatement prepareStatementFromFieldsAndTableName() { if (tablename == null || tablename.length() == 0) { throw new RuntimeException("Please sepcify query or table name."); } StringBuilder queryfields = new StringBuilder(); StringBuilder values = new StringBuilder(); for (FieldInfo fieldInfo: fieldInfos) { if (queryfields.length() == 0) { queryfields.append(fieldInfo.getColumnName()); values.append("?"); } else { queryfields.append(",").append(fieldInfo.getColumnName()); values.append(",").append("?"); } } String statement = "INSERT INTO " + store.keyspace + "." + tablename + " (" + queryfields.toString() + ") " + "VALUES (" + values.toString() + ");"; LOG.debug("statement is {}", statement); return store.getSession().prepare(statement); } @Override @SuppressWarnings("unchecked") protected Statement setStatementParameters(PreparedStatement updateCommand, Object tuple) throws DriverException { final BoundStatement boundStmnt = new BoundStatement(updateCommand); final int size = columnDataTypes.size(); for (int i = 0; i < size; i++) { final DataType type = columnDataTypes.get(i); switch (type.getName()) { case UUID: final UUID id = ((Getter<Object, UUID>)getters.get(i)).get(tuple); boundStmnt.setUUID(i, id); break; case ASCII: case VARCHAR: case TEXT: final String ascii = ((Getter<Object, String>)getters.get(i)).get(tuple); boundStmnt.setString(i, ascii); break; case BOOLEAN: final boolean bool = ((GetterBoolean<Object>)getters.get(i)).get(tuple); boundStmnt.setBool(i, bool); break; case INT: final int intValue = ((GetterInt<Object>)getters.get(i)).get(tuple); boundStmnt.setInt(i, intValue); break; case BIGINT: case COUNTER: final long longValue = ((GetterLong<Object>)getters.get(i)).get(tuple); boundStmnt.setLong(i, longValue); break; case FLOAT: final float floatValue = ((GetterFloat<Object>)getters.get(i)).get(tuple); boundStmnt.setFloat(i, floatValue); break; case DOUBLE: final double doubleValue = ((GetterDouble<Object>)getters.get(i)).get(tuple); boundStmnt.setDouble(i, doubleValue); break; case DECIMAL: final BigDecimal decimal = ((Getter<Object, BigDecimal>)getters.get(i)).get(tuple); boundStmnt.setDecimal(i, decimal); break; case SET: Set<?> set = ((Getter<Object, Set<?>>)getters.get(i)).get(tuple); boundStmnt.setSet(i, set); break; case MAP: final Map<?,?> map = ((Getter<Object, Map<?,?>>)getters.get(i)).get(tuple); boundStmnt.setMap(i, map); break; case LIST: final List<?> list = ((Getter<Object, List<?>>)getters.get(i)).get(tuple); boundStmnt.setList(i, list); break; case TIMESTAMP: final Date date = ((Getter<Object, Date>)getters.get(i)).get(tuple); boundStmnt.setDate(i, date); break; default: throw new RuntimeException("unsupported data type " + type.getName()); } } return boundStmnt; } @Override public void processTuple(Object tuple) { try { super.processTuple(tuple); successfulRecords++; } catch (RuntimeException e) { LOG.error(e.getMessage()); error.emit(tuple); errorRecords++; } } /** * A list of {@link FieldInfo}s where each item maps a column name to a pojo field name. */ public List<FieldInfo> getFieldInfos() { return fieldInfos; } /** * Sets the {@link FieldInfo}s. A {@link FieldInfo} maps a store column to a pojo field name.<br/> * The value from fieldInfo.column is assigned to fieldInfo.pojoFieldExpression. * * @description $[].columnName name of the database column name * @description $[].pojoFieldExpression pojo field name or expression * @useSchema $[].pojoFieldExpression input.fields[].name */ public void setFieldInfos(List<FieldInfo> fieldInfos) { this.fieldInfos = fieldInfos; } /** * Gets cassandra table name * @return tableName */ public String getTablename() { return tablename; } /** * Sets cassandra table name (optional if query is specified) * @param tablename */ public void setTablename(String tablename) { this.tablename = tablename; } /** * Gets cql Query * @return query */ public String getQuery() { return query; } /** * Sets cql Query * @param query */ public void setQuery(String query) { this.query = query; } private static final Logger LOG = LoggerFactory.getLogger(CassandraPOJOOutputOperator.class); }
/******************************************************************************* * Copyright 2013-2015 alladin-IT GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package at.alladin.rmbt.android.impl; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import android.content.Context; import android.graphics.Bitmap; import android.net.TrafficStats; import android.os.Handler; import android.os.Process; import android.webkit.WebView; import android.webkit.WebViewClient; import at.alladin.rmbt.android.util.AsyncHtmlContentRetriever; import at.alladin.rmbt.android.util.AsyncHtmlContentRetriever.ContentRetrieverListener; import at.alladin.rmbt.client.v2.task.service.WebsiteTestService; /** * * @author lb * */ public class WebsiteTestServiceImpl implements WebsiteTestService { /** * <p> * if set to true the traffic will be recorded using * {@link TrafficStats#getUidRxBytes(int)} and {@link TrafficStats#getUidTxPackets(int)} * </p> * <p> * otherwise this service will call: * {@link TrafficStats#getTotalRxBytes()} and {@link TrafficStats#getTotalTxPackets()} * </p> */ private final static boolean USE_PROCESS_UID_FOR_TRAFFIC_MEASUREMENT = true; private WebView webView; private final Context context; private final AtomicBoolean isRunning = new AtomicBoolean(false); private final AtomicBoolean hasFinished = new AtomicBoolean(false); private final AtomicBoolean hasError = new AtomicBoolean(false); private final AtomicInteger resourceCount = new AtomicInteger(0); private int statusCode = -1; private long duration = -1; private RenderingListener listener; private long trafficRxStart; private long trafficTxStart; private long trafficRxEnd; private long trafficTxEnd; private final Handler handler; private int processUid; public WebsiteTestServiceImpl(final Context context) { this.context = context; this.handler = new Handler(context.getMainLooper()); } /** * * @return */ public WebsiteTestServiceImpl getInstance() { return new WebsiteTestServiceImpl(context); } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#getHash() */ @Override public String getHash() { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#getDownloadDuration() */ @Override public long getDownloadDuration() { return duration; } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#run(java.lang.String, long) */ @Override public void run(final String targetUrl, final long timeOut) { handler.post(new Runnable() { @Override public void run() { WebsiteTestServiceImpl.this.processUid = Process.myUid(); //webView.removeAllViews(); if (webView == null) webView = new WebView(context); webView.clearCache(true); final long start = System.nanoTime(); System.out.println("Running WEBSITETASK " + targetUrl); boolean isTrafficServiceSupported = USE_PROCESS_UID_FOR_TRAFFIC_MEASUREMENT ? TrafficStats.getUidRxBytes(processUid) != TrafficStats.UNSUPPORTED : TrafficStats.getTotalRxBytes() != TrafficStats.UNSUPPORTED; if (!isTrafficServiceSupported) { trafficRxStart = -1; trafficTxStart = -1; trafficRxEnd = -1; trafficTxEnd = -1; } else { if (USE_PROCESS_UID_FOR_TRAFFIC_MEASUREMENT) { trafficTxStart = TrafficStats.getUidTxBytes(processUid); trafficRxStart = TrafficStats.getUidRxBytes(processUid); } else { trafficTxStart = TrafficStats.getTotalTxBytes(); trafficRxStart = TrafficStats.getTotalRxBytes(); } } Thread timeoutThread = new Thread(new Runnable() { @Override public void run() { try { System.out.println("WEBSITETASK STARTING TIMEOUT THREAD: " + timeOut + " ms"); Thread.sleep(timeOut); } catch (InterruptedException e) { e.printStackTrace(); Thread.currentThread().interrupt(); // restore interrupt state return; } if (!WebsiteTestServiceImpl.this.hasFinished() && listener != null) { setEndTrafficCounter(); if (listener.onTimeoutReached(WebsiteTestServiceImpl.this)) { System.out.println("WEBSITETESTTASK TIMEOUT"); WebsiteTestServiceImpl.this.handler.post(new Runnable() { @Override public void run() { WebsiteTestServiceImpl.this.webView.stopLoading(); } }); } } } }); timeoutThread.start(); webView.getSettings().setJavaScriptEnabled(true); webView.setWebViewClient(new WebViewClient() { /* @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { view.loadUrl(url); return true; } */ @Override public void onLoadResource(WebView view, String url) { System.out.println("getting resource: " + url + " progress: " + view.getProgress()); resourceCount.incrementAndGet(); super.onLoadResource(view, url); } @Override public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); WebsiteTestServiceImpl.this.isRunning.set(false); WebsiteTestServiceImpl.this.hasFinished.set(true); WebsiteTestServiceImpl.this.hasError.set(false); WebsiteTestServiceImpl.this.duration = System.nanoTime() - start; if (WebsiteTestServiceImpl.this.trafficRxStart != -1) { setEndTrafficCounter(); } System.out.println("PAGE FINISHED " + targetUrl + " progress: " + view.getProgress() + "%, resources counter: " + resourceCount.get()); if (listener != null) { listener.onRenderFinished(WebsiteTestServiceImpl.this); } } @Override public void onPageStarted(final WebView view, String url, Bitmap favicon) { WebsiteTestServiceImpl.this.isRunning.set(true); WebsiteTestServiceImpl.this.hasFinished.set(false); WebsiteTestServiceImpl.this.hasError.set(false); if (listener != null) { listener.onDownloadStarted(WebsiteTestServiceImpl.this); } System.out.println("PAGE STARTED " + targetUrl); super.onPageStarted(view, url, favicon); } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { super.onReceivedError(view, errorCode, description, failingUrl); WebsiteTestServiceImpl.this.isRunning.set(false); WebsiteTestServiceImpl.this.hasFinished.set(true); WebsiteTestServiceImpl.this.hasError.set(true); WebsiteTestServiceImpl.this.duration = System.nanoTime() - start; if (WebsiteTestServiceImpl.this.trafficRxStart != -1) { setEndTrafficCounter(); } if (listener != null) { listener.onError(WebsiteTestServiceImpl.this); } } }); AsyncHtmlContentRetriever task = new AsyncHtmlContentRetriever(); task.setContentRetrieverListener(new ContentRetrieverListener() { @Override public void onContentFinished(String htmlContent, int statusCode) { WebsiteTestServiceImpl.this.statusCode = statusCode; if (statusCode >= 0) { //webView.loadDataWithBaseURL(targetUrl, htmlContent, "text/html", "utf-8", null); webView.loadUrl(targetUrl); //webView.loadData(htmlContent, "text/html", "utf-8"); } else { WebsiteTestServiceImpl.this.isRunning.set(false); WebsiteTestServiceImpl.this.hasFinished.set(true); WebsiteTestServiceImpl.this.hasError.set(true); WebsiteTestServiceImpl.this.duration = System.nanoTime() - start; if (WebsiteTestServiceImpl.this.trafficRxStart != -1) { setEndTrafficCounter(); } if (listener != null) { listener.onError(WebsiteTestServiceImpl.this); } } } }); task.execute(targetUrl); //webView.loadUrl(targetUrl); } }); } /** * */ private void setEndTrafficCounter() { if (USE_PROCESS_UID_FOR_TRAFFIC_MEASUREMENT) { this.trafficRxEnd = TrafficStats.getUidRxBytes(processUid); this.trafficTxEnd = TrafficStats.getUidTxBytes(processUid); } else { this.trafficRxEnd = TrafficStats.getTotalRxBytes(); this.trafficTxEnd = TrafficStats.getTotalTxBytes(); } } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#isRunning() */ @Override public boolean isRunning() { final boolean isRunning = this.isRunning.get(); return isRunning; } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#hasFinished() */ @Override public boolean hasFinished() { final boolean hasFinished = this.hasFinished.get(); return hasFinished; } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#setOnRenderingFinishedListener(at.alladin.rmbt.client.v2.task.WebsiteTest.RenderingFinishedListener) */ @Override public void setOnRenderingFinishedListener(RenderingListener listener) { this.listener = listener; } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#hasError() */ @Override public boolean hasError() { return this.hasError.get(); } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#getStatusCode() */ @Override public int getStatusCode() { return statusCode; } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#getTxBytes() */ @Override public long getTxBytes() { return (trafficTxStart != -1 ? trafficTxEnd - trafficTxStart : -1); } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#getRxBytes() */ @Override public long getRxBytes() { return (trafficRxStart != -1 ? trafficRxEnd - trafficRxStart : -1); } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.WebsiteTest#getTotalTrafficBytes() */ @Override public long getTotalTrafficBytes() { return (getRxBytes() != -1 ? getRxBytes() + getTxBytes() : -1); } /* * (non-Javadoc) * @see at.alladin.rmbt.client.v2.task.service.WebsiteTestService#getResourceCount() */ @Override public int getResourceCount() { return resourceCount.get(); } }
/** * */ package gov.nih.nci.cagrid.portal.portlet.util; import gov.nih.nci.cagrid.common.Utils; import gov.nih.nci.cagrid.data.DataServiceConstants; import gov.nih.nci.cagrid.fqp.common.DCQLConstants; import gov.nih.nci.cagrid.portal.domain.PortalUser; import gov.nih.nci.cagrid.portal.domain.catalog.CatalogEntry; import gov.nih.nci.cagrid.portal.domain.catalog.CatalogEntryRelationshipInstance; import gov.nih.nci.cagrid.portal.portlet.query.results.QueryResultToTableHandler; import gov.nih.nci.cagrid.portal.portlet.query.results.QueryResultToWorkbookHandler; import gov.nih.nci.cagrid.portal.util.PortalUtils; import java.awt.Image; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.imageio.ImageIO; import javax.portlet.PortletRequest; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; import org.springframework.core.type.classreading.MetadataReader; import org.springframework.core.type.classreading.MetadataReaderFactory; import org.springframework.core.type.filter.TypeFilter; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import com.liferay.portal.PortalException; import com.liferay.portal.SystemException; import com.liferay.portal.model.ResourceConstants; import com.liferay.portal.service.ResourceLocalServiceUtil; /** * @author <a href="mailto:joshua.phillips@semanticbits.com">Joshua Phillips</a> * */ public class PortletUtils { private static final Log logger = LogFactory.getLog(PortletUtils.class); // public static final String EMPTY_RESULT_PATTERN = // "count(/CQLQueryResults/child::*) = 0"; // public static final String OBJECT_RESULT_PATTERN = // "count(/CQLQueryResults/ObjectResult) > 0"; // public static final String ATTRIBUTE_RESULT_PATTERN = // "count(/CQLQueryResults/AttributeResult) > 0"; // public static final String COUNT_RESULT_PATTERN = // "count(/CQLQueryResults/CountResult) = 1"; private static SAXParser parser; public static HSSFWorkbook buildWorkbookFromCQLResults( List<String> colNames, InputStream in) throws Exception { if (parser == null) { SAXParserFactory fact = SAXParserFactory.newInstance(); fact.setNamespaceAware(true); parser = fact.newSAXParser(); } QueryResultToWorkbookHandler handler = new QueryResultToWorkbookHandler(); if (colNames != null) { handler.setColumnNames(colNames); } parser.parse(in, handler); return handler.getWorkbook(); } public static Table buildTableFromCQLResults(List<String> colNames, InputStream in) throws Exception { if (parser == null) { SAXParserFactory fact = SAXParserFactory.newInstance(); fact.setNamespaceAware(true); parser = fact.newSAXParser(); } QueryResultToTableHandler handler = new QueryResultToTableHandler(); if (colNames != null) { handler.setColumnNames(colNames); } parser.parse(in, handler); return handler.getTable(); } public static void doScrollOp(PortletRequest request, Scroller scroller) { String scrollOp = request.getParameter("scrollOp"); logger.debug("scrollOp = '" + scrollOp + "'"); if (!PortalUtils.isEmpty(scrollOp)) { if ("first".equals(scrollOp)) { scroller.first(); } else if ("previous".equals(scrollOp)) { scroller.previous(); } else if ("next".equals(scrollOp)) { scroller.next(); } else if ("last".equals(scrollOp)) { scroller.last(); } else { logger.warn("Invalid scroll operation: '" + scrollOp + "'"); } } } public static String[] parsePath(String path) { String[] parts = null; int idx = path.indexOf("/"); if (idx == -1) { parts = new String[] { path }; } else { parts = new String[] { path.substring(0, idx), path.substring(idx + 1) }; } return parts; } public static String getTargetUMLClassName(String cqlQuery) { String targetClassName = null; try { // NOTE: We don't need to worry about XML bomb here since, // CQL was already validated (i.e. parsed with Axis API which // disables DOCTYPE). Document doc = DocumentBuilderFactory.newInstance() .newDocumentBuilder().parse( new ByteArrayInputStream(cqlQuery.getBytes())); XPathFactory xpFact = XPathFactory.newInstance(); Element targetEl = (Element) xpFact.newXPath().compile( "/CQLQuery/Target").evaluate(doc, XPathConstants.NODE); if (targetEl == null) { targetEl = (Element) xpFact.newXPath().compile( "/DCQLQuery/TargetObject").evaluate(doc, XPathConstants.NODE); } if (targetEl != null) { targetClassName = targetEl.getAttribute("name"); } } catch (Exception ex) { logger.error("Error getting target class name: " + ex.getMessage(), ex); } return targetClassName; } public static boolean isCountQuery(String cqlQuery) { boolean isCountQuery = false; try { Document doc = DocumentBuilderFactory.newInstance() .newDocumentBuilder().parse( new ByteArrayInputStream(cqlQuery.getBytes())); XPathFactory xpFact = XPathFactory.newInstance(); String countOnly = (String) xpFact.newXPath().compile( "/CQLQuery/QueryModifier/@countOnly").evaluate(doc, XPathConstants.STRING); isCountQuery = "true".equals(countOnly); } catch (Exception ex) { String msg = "Error checking count query: " + ex.getMessage(); logger.error(msg, ex); throw new RuntimeException(msg, ex); } return isCountQuery; } public static List<Class> getSubclasses(String packageName, final Class superclass) { String pkgName = packageName; if (pkgName == null) { pkgName = superclass.getPackage().getName(); } List<Class> subclasses = new ArrayList<Class>(); ClassPathScanningCandidateComponentProvider provider = new ClassPathScanningCandidateComponentProvider( false); provider.addIncludeFilter(new TypeFilter() { public boolean match(MetadataReader reader, MetadataReaderFactory factory) throws IOException { boolean match = false; try { Class klass = Class.forName(reader.getClassMetadata() .getClassName()); match = superclass.isAssignableFrom(klass); } catch (Exception ex) { throw new RuntimeException("Error matching: " + ex.getMessage(), ex); } return match; } }); try { for (BeanDefinition def : provider.findCandidateComponents(pkgName)) { subclasses.add(Class.forName(def.getBeanClassName())); } } catch (Exception ex) { throw new RuntimeException("Error loading classes: " + ex.getMessage(), ex); } return subclasses; } public static void main(String[] args) throws Exception { // for(Class klass : // getSubclasses("gov.nih.nci.cagrid.portal.domain.catalog", // CatalogEntry.class)){ // System.out.println(klass.getName()); // } String inFilePath = "/Users/joshua/Desktop/person_placeholder_180px.png"; String outFilePath = "/Users/joshua/Desktop/person_placeholder_50px.png"; BufferedImage image = ImageIO.read(new FileInputStream(inFilePath)); System.out.println("Height: " + image.getHeight() + ", Width: " + image.getWidth()); Image thumb = image.getScaledInstance(50, -1, -1); ImageIO.write(ImageUtils.toBufferedImage(thumb), "png", new File( outFilePath)); } public static Set<String> getTargetServiceUrls(String dcql) throws Exception { Set<String> urls = new HashSet<String>(); Document doc = DocumentBuilderFactory.newInstance() .newDocumentBuilder().parse( new ByteArrayInputStream(dcql.getBytes())); XPathFactory xpFact = XPathFactory.newInstance(); NodeList urlEls = (NodeList) xpFact.newXPath().compile( "/DCQLQuery/targetServiceURL").evaluate(doc, XPathConstants.NODESET); for (int i = 0; i < urlEls.getLength(); i++) { Element el = (Element) urlEls.item(i); urls.add(el.getTextContent()); } return urls; } public static Object getMapValueForType(Class klass, Map map) { Object value = null; Class superclass = klass; while (true) { value = map.get(superclass.getName()); if (value != null) { break; } superclass = superclass.getSuperclass(); if (superclass == null) { break; } } return value; } public static void addResource(PortalUser portalUser, Class klass, Integer id) { String[] portalId = portalUser.getPortalId().split(":"); try { ResourceLocalServiceUtil.addResources(Long.parseLong(portalId[0]), 0, Long.parseLong(portalId[1]), klass.getName(), String .valueOf(id), false, false, false); } catch (Exception ex) { throw new RuntimeException("Error creating resource: " + ex.getMessage(), ex); } } public static void deleteResource(PortalUser portalUser, Class klass, Integer id) { String[] portalId = portalUser.getPortalId().split(":"); try { ResourceLocalServiceUtil.deleteResource( Long.parseLong(portalId[0]), klass.getName(), ResourceConstants.SCOPE_INDIVIDUAL, String.valueOf(id)); } catch (Exception ex) { throw new RuntimeException("Error creating resource: " + ex.getMessage(), ex); } } public static gov.nih.nci.cagrid.cqlquery.CQLQuery parseCQL(String queryXML) { try { return (gov.nih.nci.cagrid.cqlquery.CQLQuery) Utils .deserializeObject(new StringReader(queryXML), gov.nih.nci.cagrid.cqlquery.CQLQuery.class); } catch (Exception ex) { throw new RuntimeException("Error parsing CQL: " + ex.getMessage(), ex); } } public static String normalizeCQL(String queryXML) { StringWriter w = new StringWriter(); try { Utils.serializeObject(parseCQL(queryXML), DataServiceConstants.CQL_QUERY_QNAME, w); } catch (Exception ex) { throw new RuntimeException("Error normalizing CQL: " + ex.getMessage(), ex); } return w.toString(); } public static gov.nih.nci.cagrid.dcql.DCQLQuery parseDCQL(String queryXML) { try { return (gov.nih.nci.cagrid.dcql.DCQLQuery) Utils.deserializeObject( new StringReader(queryXML), gov.nih.nci.cagrid.dcql.DCQLQuery.class); } catch (Exception ex) { throw new RuntimeException( "Error parsing DCQL: " + ex.getMessage(), ex); } } public static String normalizeDCQL(String queryXML) { StringWriter w = new StringWriter(); try { Utils.serializeObject(parseDCQL(queryXML), DCQLConstants.DCQL_QUERY_QNAME, w); } catch (Exception ex) { throw new RuntimeException("Error normalizing DCQL: " + ex.getMessage(), ex); } return w.toString(); } public static boolean isDCQL(String queryXML) { boolean isDCQL = false; try { parseDCQL(queryXML); isDCQL = true; } catch (Exception ex) { } return isDCQL; } public static boolean isCQL(String queryXML) { boolean isCQL = false; try { parseCQL(queryXML); isCQL = true; } catch (Exception ex) { } return isCQL; } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertThat; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.rules.BuildContext; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.CellPathResolver; import com.facebook.buck.rules.CommandTool; import com.facebook.buck.rules.DefaultSourcePathResolver; import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer; import com.facebook.buck.rules.FakeBuildContext; import com.facebook.buck.rules.FakeSourcePath; import com.facebook.buck.rules.HashedFileTool; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.RuleKey; import com.facebook.buck.rules.RuleKeyObjectSink; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.TestBuildRuleParams; import com.facebook.buck.rules.TestCellPathResolver; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.args.RuleKeyAppendableFunction; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.rules.coercer.FrameworkPath; import com.facebook.buck.rules.keys.DefaultRuleKeyFactory; import com.facebook.buck.testutil.FakeFileHashCache; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Optional; import org.junit.Test; public class CxxPreprocessAndCompileTest { private static class PreprocessorWithColorSupport extends GccPreprocessor { static final String COLOR_FLAG = "-use-color-in-preprocessor"; public PreprocessorWithColorSupport(Tool tool) { super(tool); } @Override public Optional<ImmutableList<String>> getFlagsForColorDiagnostics() { return Optional.of(ImmutableList.of(COLOR_FLAG)); } } private static class CompilerWithColorSupport extends DefaultCompiler { static final String COLOR_FLAG = "-use-color-in-compiler"; public CompilerWithColorSupport(Tool tool) { super(tool); } @Override public Optional<ImmutableList<String>> getFlagsForColorDiagnostics() { return Optional.of(ImmutableList.of(COLOR_FLAG)); } } private static final Preprocessor DEFAULT_PREPROCESSOR = new GccPreprocessor(new HashedFileTool(Paths.get("preprocessor"))); private static final Compiler DEFAULT_COMPILER = new GccCompiler(new HashedFileTool(Paths.get("compiler"))); private static final Preprocessor PREPROCESSOR_WITH_COLOR_SUPPORT = new PreprocessorWithColorSupport(new HashedFileTool(Paths.get("preprocessor"))); private static final Compiler COMPILER_WITH_COLOR_SUPPORT = new CompilerWithColorSupport(new HashedFileTool(Paths.get("compiler"))); private static final CxxToolFlags DEFAULT_TOOL_FLAGS = CxxToolFlags.explicitBuilder() .addPlatformFlags(StringArg.of("-fsanitize=address")) .addRuleFlags(StringArg.of("-O3")) .build(); private static final Path DEFAULT_OUTPUT = Paths.get("test.o"); private static final SourcePath DEFAULT_INPUT = new FakeSourcePath("test.cpp"); private static final CxxSource.Type DEFAULT_INPUT_TYPE = CxxSource.Type.CXX; private static final Path DEFAULT_WORKING_DIR = Paths.get(System.getProperty("user.dir")); private static final RuleKeyAppendableFunction<FrameworkPath, Path> DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION = new RuleKeyAppendableFunction<FrameworkPath, Path>() { @Override public void appendToRuleKey(RuleKeyObjectSink sink) { // Do nothing. } @Override public Path apply(FrameworkPath input) { return Paths.get("test", "framework", "path", input.toString()); } }; @Test public void inputChangesCauseRuleKeyChangesForCompilation() throws Exception { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder( new BuildRuleResolver( TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); BuildTarget target = BuildTargetFactory.newInstance("//foo:bar"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuildRuleParams params = TestBuildRuleParams.create(); FakeFileHashCache hashCache = FakeFileHashCache.createFromStrings( ImmutableMap.<String, String>builder() .put("preprocessor", Strings.repeat("a", 40)) .put("compiler", Strings.repeat("a", 40)) .put("test.o", Strings.repeat("b", 40)) .put("test.cpp", Strings.repeat("c", 40)) .put("different", Strings.repeat("d", 40)) .put("foo/test.h", Strings.repeat("e", 40)) .put("path/to/a/plugin.so", Strings.repeat("f", 40)) .put("path/to/a/different/plugin.so", Strings.repeat("a0", 40)) .build()); // Generate a rule key for the defaults. RuleKey defaultRuleKey = new DefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build( CxxPreprocessAndCompile.compile( target, projectFilesystem, params, new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, DEFAULT_COMPILER, DEFAULT_TOOL_FLAGS), DEFAULT_OUTPUT, DEFAULT_INPUT, DEFAULT_INPUT_TYPE, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty())); // Verify that changing the compiler causes a rulekey change. RuleKey compilerChange = new DefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build( CxxPreprocessAndCompile.compile( target, projectFilesystem, params, new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, new GccCompiler(new HashedFileTool(Paths.get("different"))), DEFAULT_TOOL_FLAGS), DEFAULT_OUTPUT, DEFAULT_INPUT, DEFAULT_INPUT_TYPE, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty())); assertNotEquals(defaultRuleKey, compilerChange); // Verify that changing the operation causes a rulekey change. RuleKey operationChange = new DefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build( CxxPreprocessAndCompile.preprocessAndCompile( target, projectFilesystem, params, new PreprocessorDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, CxxPlatformUtils.DEFAULT_PLATFORM.getHeaderVerification(), DEFAULT_WORKING_DIR, DEFAULT_PREPROCESSOR, PreprocessorFlags.builder().build(), DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION, Optional.empty(), /* leadingIncludePaths */ Optional.empty()), new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, DEFAULT_COMPILER, DEFAULT_TOOL_FLAGS), DEFAULT_OUTPUT, DEFAULT_INPUT, DEFAULT_INPUT_TYPE, Optional.empty(), CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty())); assertNotEquals(defaultRuleKey, operationChange); // Verify that changing the platform flags causes a rulekey change. RuleKey platformFlagsChange = new DefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build( CxxPreprocessAndCompile.compile( target, projectFilesystem, params, new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, DEFAULT_COMPILER, CxxToolFlags.explicitBuilder() .addPlatformFlags(StringArg.of("-different")) .setRuleFlags(DEFAULT_TOOL_FLAGS.getRuleFlags()) .build()), DEFAULT_OUTPUT, DEFAULT_INPUT, DEFAULT_INPUT_TYPE, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty())); assertNotEquals(defaultRuleKey, platformFlagsChange); // Verify that changing the rule flags causes a rulekey change. RuleKey ruleFlagsChange = new DefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build( CxxPreprocessAndCompile.compile( target, projectFilesystem, params, new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, DEFAULT_COMPILER, CxxToolFlags.explicitBuilder() .setPlatformFlags(DEFAULT_TOOL_FLAGS.getPlatformFlags()) .addAllRuleFlags(StringArg.from("-other", "flags")) .build()), DEFAULT_OUTPUT, DEFAULT_INPUT, DEFAULT_INPUT_TYPE, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty())); assertNotEquals(defaultRuleKey, ruleFlagsChange); // Verify that changing the input causes a rulekey change. RuleKey inputChange = new DefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build( CxxPreprocessAndCompile.compile( target, projectFilesystem, params, new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, DEFAULT_COMPILER, DEFAULT_TOOL_FLAGS), DEFAULT_OUTPUT, new FakeSourcePath("different"), DEFAULT_INPUT_TYPE, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty())); assertNotEquals(defaultRuleKey, inputChange); } @Test public void preprocessorFlagsRuleKeyChangesCauseRuleKeyChangesForPreprocessing() throws Exception { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder( new BuildRuleResolver( TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())); final SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); BuildTarget target = BuildTargetFactory.newInstance("//foo:bar"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); final BuildRuleParams params = TestBuildRuleParams.create(); final FakeFileHashCache hashCache = FakeFileHashCache.createFromStrings( ImmutableMap.<String, String>builder() .put("preprocessor", Strings.repeat("a", 40)) .put("compiler", Strings.repeat("a", 40)) .put("test.o", Strings.repeat("b", 40)) .put("test.cpp", Strings.repeat("c", 40)) .put("different", Strings.repeat("d", 40)) .put("foo/test.h", Strings.repeat("e", 40)) .put("path/to/a/plugin.so", Strings.repeat("f", 40)) .put("path/to/a/different/plugin.so", Strings.repeat("a0", 40)) .build()); class TestData { public RuleKey generate(PreprocessorFlags flags) throws Exception { return new DefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build( CxxPreprocessAndCompile.preprocessAndCompile( target, projectFilesystem, params, new PreprocessorDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, CxxPlatformUtils.DEFAULT_PLATFORM.getHeaderVerification(), DEFAULT_WORKING_DIR, DEFAULT_PREPROCESSOR, flags, DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION, Optional.empty(), /* leadingIncludePaths */ Optional.empty()), new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, DEFAULT_COMPILER, CxxToolFlags.of()), DEFAULT_OUTPUT, DEFAULT_INPUT, DEFAULT_INPUT_TYPE, Optional.empty(), CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty())); } } TestData testData = new TestData(); PreprocessorFlags defaultFlags = PreprocessorFlags.builder().build(); PreprocessorFlags alteredFlags = defaultFlags.withFrameworkPaths( FrameworkPath.ofSourcePath(new FakeSourcePath("different"))); assertNotEquals(testData.generate(defaultFlags), testData.generate(alteredFlags)); } @Test public void usesCorrectCommandForCompile() { // Setup some dummy values for inputs to the CxxPreprocessAndCompile. SourcePathResolver pathResolver = DefaultSourcePathResolver.from( new SourcePathRuleFinder( new BuildRuleResolver( TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); BuildTarget target = BuildTargetFactory.newInstance("//foo:bar"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuildRuleParams params = TestBuildRuleParams.create(); CxxToolFlags flags = CxxToolFlags.explicitBuilder() .addPlatformFlags(StringArg.of("-ffunction-sections")) .addRuleFlags(StringArg.of("-O3")) .build(); Path output = Paths.get("test.o"); Path input = Paths.get("test.ii"); Path scratchDir = Paths.get("scratch"); CxxPreprocessAndCompile buildRule = CxxPreprocessAndCompile.compile( target, projectFilesystem, params, new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, DEFAULT_COMPILER, flags), output, new FakeSourcePath(input.toString()), DEFAULT_INPUT_TYPE, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty()); ImmutableList<String> expectedCompileCommand = ImmutableList.<String>builder() .add("compiler") .add("-ffunction-sections") .add("-O3") .add("-x", "c++") .add("-c") .add(input.toString()) .add("-o", output.toString()) .build(); ImmutableList<String> actualCompileCommand = buildRule.makeMainStep(pathResolver, scratchDir, false).getCommand(); assertEquals(expectedCompileCommand, actualCompileCommand); } @Test public void compilerAndPreprocessorAreAlwaysReturnedFromGetInputsAfterBuildingLocally() throws Exception { ProjectFilesystem filesystem = FakeProjectFilesystem.createJavaOnlyFilesystem(); CellPathResolver cellPathResolver = TestCellPathResolver.get(filesystem); SourcePath preprocessor = new PathSourcePath(filesystem, filesystem.getPath("preprocessor")); Tool preprocessorTool = new CommandTool.Builder().addInput(preprocessor).build(); SourcePath compiler = new PathSourcePath(filesystem, filesystem.getPath("compiler")); Tool compilerTool = new CommandTool.Builder().addInput(compiler).build(); SourcePathResolver pathResolver = DefaultSourcePathResolver.from( new SourcePathRuleFinder( new BuildRuleResolver( TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); BuildTarget target = BuildTargetFactory.newInstance("//foo:bar"); BuildRuleParams params = TestBuildRuleParams.create(); BuildContext context = FakeBuildContext.withSourcePathResolver(pathResolver); filesystem.writeContentsToPath( "test.o: " + pathResolver.getRelativePath(DEFAULT_INPUT) + " ", filesystem.getPath("test.o.dep")); FakeSourcePath fakeInput = new FakeSourcePath(filesystem, "test.cpp"); CxxPreprocessAndCompile cxxPreprocess = CxxPreprocessAndCompile.preprocessAndCompile( target, filesystem, params, new PreprocessorDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, CxxPlatformUtils.DEFAULT_PLATFORM.getHeaderVerification(), DEFAULT_WORKING_DIR, new GccPreprocessor(preprocessorTool), PreprocessorFlags.builder().build(), DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION, Optional.empty(), /* leadingIncludePaths */ Optional.empty()), new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, DEFAULT_COMPILER, CxxToolFlags.of()), DEFAULT_OUTPUT, fakeInput, DEFAULT_INPUT_TYPE, Optional.empty(), CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty()); assertThat( cxxPreprocess.getInputsAfterBuildingLocally(context, cellPathResolver), hasItem(preprocessor)); CxxPreprocessAndCompile cxxCompile = CxxPreprocessAndCompile.compile( target, filesystem, params, new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, new GccCompiler(compilerTool), CxxToolFlags.of()), DEFAULT_OUTPUT, fakeInput, DEFAULT_INPUT_TYPE, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty()); assertThat( cxxCompile.getInputsAfterBuildingLocally(context, cellPathResolver), hasItem(compiler)); } @Test public void usesColorFlagForCompilationWhenRequested() { SourcePathResolver pathResolver = DefaultSourcePathResolver.from( new SourcePathRuleFinder( new BuildRuleResolver( TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); BuildTarget target = BuildTargetFactory.newInstance("//foo:bar"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuildRuleParams params = TestBuildRuleParams.create(); Path output = Paths.get("test.o"); Path input = Paths.get("test.ii"); Path scratchDir = Paths.get("scratch"); CompilerDelegate compilerDelegate = new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, COMPILER_WITH_COLOR_SUPPORT, CxxToolFlags.of()); CxxPreprocessAndCompile buildRule = CxxPreprocessAndCompile.compile( target, projectFilesystem, params, compilerDelegate, output, new FakeSourcePath(input.toString()), DEFAULT_INPUT_TYPE, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty()); ImmutableList<String> command = buildRule .makeMainStep(pathResolver, buildRule.getProjectFilesystem().getRootPath(), false) .getArguments(/* allowColorsInDiagnostics */ false); assertThat(command, not(hasItem(CompilerWithColorSupport.COLOR_FLAG))); command = buildRule .makeMainStep(pathResolver, scratchDir, false) .getArguments(/* allowColorsInDiagnostics */ true); assertThat(command, hasItem(CompilerWithColorSupport.COLOR_FLAG)); } @Test public void usesColorFlagForPreprocessingWhenRequested() throws Exception { SourcePathResolver pathResolver = DefaultSourcePathResolver.from( new SourcePathRuleFinder( new BuildRuleResolver( TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); BuildTarget target = BuildTargetFactory.newInstance("//foo:bar"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuildRuleParams params = TestBuildRuleParams.create(); Path output = Paths.get("test.ii"); Path input = Paths.get("test.cpp"); Path scratchDir = Paths.get("scratch"); CxxPreprocessAndCompile buildRule = CxxPreprocessAndCompile.preprocessAndCompile( target, projectFilesystem, params, new PreprocessorDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, CxxPlatformUtils.DEFAULT_PLATFORM.getHeaderVerification(), DEFAULT_WORKING_DIR, PREPROCESSOR_WITH_COLOR_SUPPORT, PreprocessorFlags.builder().build(), DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION, Optional.empty(), /* leadingIncludePaths */ Optional.empty()), new CompilerDelegate( pathResolver, CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, COMPILER_WITH_COLOR_SUPPORT, CxxToolFlags.of()), output, new FakeSourcePath(input.toString()), DEFAULT_INPUT_TYPE, Optional.empty(), CxxPlatformUtils.DEFAULT_COMPILER_DEBUG_PATH_SANITIZER, Optional.empty()); ImmutableList<String> command = buildRule .makeMainStep(pathResolver, scratchDir, false) .getArguments(/* allowColorsInDiagnostics */ false); assertThat(command, not(hasItem(PreprocessorWithColorSupport.COLOR_FLAG))); command = buildRule .makeMainStep(pathResolver, scratchDir, false) .getArguments(/* allowColorsInDiagnostics */ true); assertThat(command, hasItem(CompilerWithColorSupport.COLOR_FLAG)); } @Test public void testGetGcnoFile() throws Exception { Path input = Paths.get("foo/bar.m.o"); Path output = CxxPreprocessAndCompile.getGcnoPath(input); assertEquals(Paths.get("foo/bar.m.gcno"), output); } }
/* * Copyright 2013 Gunnar Kappei. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.opengis.gml.impl; /** * An XML LocationPropertyType(@http://www.opengis.net/gml). * * This is a complex type. */ public class LocationPropertyTypeImpl extends org.apache.xmlbeans.impl.values.XmlComplexContentImpl implements net.opengis.gml.LocationPropertyType { private static final long serialVersionUID = 1L; public LocationPropertyTypeImpl(org.apache.xmlbeans.SchemaType sType) { super(sType); } private static final javax.xml.namespace.QName GEOMETRY$0 = new javax.xml.namespace.QName("http://www.opengis.net/gml", "_Geometry"); private static final org.apache.xmlbeans.QNameSet GEOMETRY$1 = org.apache.xmlbeans.QNameSet.forArray( new javax.xml.namespace.QName[] { new javax.xml.namespace.QName("http://www.opengis.net/gml", "MultiSurface"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "LineString"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "MultiLineString"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "_Curve"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "OrientableSurface"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "CompositeSolid"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "_Solid"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "_GeometricAggregate"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "Tin"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "CompositeCurve"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "_Surface"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "Polygon"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "_GeometricPrimitive"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "_Ring"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "MultiSolid"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "Curve"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "OrientableCurve"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "TriangulatedSurface"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "LinearRing"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "_Geometry"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "_ImplicitGeometry"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "MultiPolygon"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "Solid"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "CompositeSurface"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "Ring"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "MultiGeometry"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "Surface"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "Point"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "MultiCurve"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "MultiPoint"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "RectifiedGrid"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "PolyhedralSurface"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "Grid"), new javax.xml.namespace.QName("http://www.opengis.net/gml", "GeometricComplex"), }); private static final javax.xml.namespace.QName LOCATIONKEYWORD$2 = new javax.xml.namespace.QName("http://www.opengis.net/gml", "LocationKeyWord"); private static final javax.xml.namespace.QName LOCATIONSTRING$4 = new javax.xml.namespace.QName("http://www.opengis.net/gml", "LocationString"); private static final javax.xml.namespace.QName NULL$6 = new javax.xml.namespace.QName("http://www.opengis.net/gml", "Null"); private static final javax.xml.namespace.QName TYPE$8 = new javax.xml.namespace.QName("http://www.w3.org/1999/xlink", "type"); private static final javax.xml.namespace.QName HREF$10 = new javax.xml.namespace.QName("http://www.w3.org/1999/xlink", "href"); private static final javax.xml.namespace.QName ROLE$12 = new javax.xml.namespace.QName("http://www.w3.org/1999/xlink", "role"); private static final javax.xml.namespace.QName ARCROLE$14 = new javax.xml.namespace.QName("http://www.w3.org/1999/xlink", "arcrole"); private static final javax.xml.namespace.QName TITLE$16 = new javax.xml.namespace.QName("http://www.w3.org/1999/xlink", "title"); private static final javax.xml.namespace.QName SHOW$18 = new javax.xml.namespace.QName("http://www.w3.org/1999/xlink", "show"); private static final javax.xml.namespace.QName ACTUATE$20 = new javax.xml.namespace.QName("http://www.w3.org/1999/xlink", "actuate"); private static final javax.xml.namespace.QName REMOTESCHEMA$22 = new javax.xml.namespace.QName("http://www.opengis.net/gml", "remoteSchema"); /** * Gets the "_Geometry" element */ public net.opengis.gml.AbstractGeometryType getGeometry() { synchronized (monitor()) { check_orphaned(); net.opengis.gml.AbstractGeometryType target = null; target = (net.opengis.gml.AbstractGeometryType)get_store().find_element_user(GEOMETRY$1, 0); if (target == null) { return null; } return target; } } /** * True if has "_Geometry" element */ public boolean isSetGeometry() { synchronized (monitor()) { check_orphaned(); return get_store().count_elements(GEOMETRY$1) != 0; } } /** * Sets the "_Geometry" element */ public void setGeometry(net.opengis.gml.AbstractGeometryType geometry) { synchronized (monitor()) { check_orphaned(); net.opengis.gml.AbstractGeometryType target = null; target = (net.opengis.gml.AbstractGeometryType)get_store().find_element_user(GEOMETRY$1, 0); if (target == null) { target = (net.opengis.gml.AbstractGeometryType)get_store().add_element_user(GEOMETRY$0); } target.set(geometry); } } /** * Appends and returns a new empty "_Geometry" element */ public net.opengis.gml.AbstractGeometryType addNewGeometry() { synchronized (monitor()) { check_orphaned(); net.opengis.gml.AbstractGeometryType target = null; target = (net.opengis.gml.AbstractGeometryType)get_store().add_element_user(GEOMETRY$0); return target; } } /** * Unsets the "_Geometry" element */ public void unsetGeometry() { synchronized (monitor()) { check_orphaned(); get_store().remove_element(GEOMETRY$1, 0); } } /** * Gets the "LocationKeyWord" element */ public net.opengis.gml.CodeType getLocationKeyWord() { synchronized (monitor()) { check_orphaned(); net.opengis.gml.CodeType target = null; target = (net.opengis.gml.CodeType)get_store().find_element_user(LOCATIONKEYWORD$2, 0); if (target == null) { return null; } return target; } } /** * True if has "LocationKeyWord" element */ public boolean isSetLocationKeyWord() { synchronized (monitor()) { check_orphaned(); return get_store().count_elements(LOCATIONKEYWORD$2) != 0; } } /** * Sets the "LocationKeyWord" element */ public void setLocationKeyWord(net.opengis.gml.CodeType locationKeyWord) { generatedSetterHelperImpl(locationKeyWord, LOCATIONKEYWORD$2, 0, org.apache.xmlbeans.impl.values.XmlObjectBase.KIND_SETTERHELPER_SINGLETON); } /** * Appends and returns a new empty "LocationKeyWord" element */ public net.opengis.gml.CodeType addNewLocationKeyWord() { synchronized (monitor()) { check_orphaned(); net.opengis.gml.CodeType target = null; target = (net.opengis.gml.CodeType)get_store().add_element_user(LOCATIONKEYWORD$2); return target; } } /** * Unsets the "LocationKeyWord" element */ public void unsetLocationKeyWord() { synchronized (monitor()) { check_orphaned(); get_store().remove_element(LOCATIONKEYWORD$2, 0); } } /** * Gets the "LocationString" element */ public net.opengis.gml.StringOrRefType getLocationString() { synchronized (monitor()) { check_orphaned(); net.opengis.gml.StringOrRefType target = null; target = (net.opengis.gml.StringOrRefType)get_store().find_element_user(LOCATIONSTRING$4, 0); if (target == null) { return null; } return target; } } /** * True if has "LocationString" element */ public boolean isSetLocationString() { synchronized (monitor()) { check_orphaned(); return get_store().count_elements(LOCATIONSTRING$4) != 0; } } /** * Sets the "LocationString" element */ public void setLocationString(net.opengis.gml.StringOrRefType locationString) { generatedSetterHelperImpl(locationString, LOCATIONSTRING$4, 0, org.apache.xmlbeans.impl.values.XmlObjectBase.KIND_SETTERHELPER_SINGLETON); } /** * Appends and returns a new empty "LocationString" element */ public net.opengis.gml.StringOrRefType addNewLocationString() { synchronized (monitor()) { check_orphaned(); net.opengis.gml.StringOrRefType target = null; target = (net.opengis.gml.StringOrRefType)get_store().add_element_user(LOCATIONSTRING$4); return target; } } /** * Unsets the "LocationString" element */ public void unsetLocationString() { synchronized (monitor()) { check_orphaned(); get_store().remove_element(LOCATIONSTRING$4, 0); } } /** * Gets the "Null" element */ public java.lang.Object getNull() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_element_user(NULL$6, 0); if (target == null) { return null; } return target.getObjectValue(); } } /** * Gets (as xml) the "Null" element */ public net.opengis.gml.NullType xgetNull() { synchronized (monitor()) { check_orphaned(); net.opengis.gml.NullType target = null; target = (net.opengis.gml.NullType)get_store().find_element_user(NULL$6, 0); return target; } } /** * True if has "Null" element */ public boolean isSetNull() { synchronized (monitor()) { check_orphaned(); return get_store().count_elements(NULL$6) != 0; } } /** * Sets the "Null" element */ public void setNull(java.lang.Object xnull) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_element_user(NULL$6, 0); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_element_user(NULL$6); } target.setObjectValue(xnull); } } /** * Sets (as xml) the "Null" element */ public void xsetNull(net.opengis.gml.NullType xnull) { synchronized (monitor()) { check_orphaned(); net.opengis.gml.NullType target = null; target = (net.opengis.gml.NullType)get_store().find_element_user(NULL$6, 0); if (target == null) { target = (net.opengis.gml.NullType)get_store().add_element_user(NULL$6); } target.set(xnull); } } /** * Unsets the "Null" element */ public void unsetNull() { synchronized (monitor()) { check_orphaned(); get_store().remove_element(NULL$6, 0); } } /** * Gets the "type" attribute */ public org.w3.x1999.xlink.TypeType.Enum getType() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(TYPE$8); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_default_attribute_value(TYPE$8); } if (target == null) { return null; } return (org.w3.x1999.xlink.TypeType.Enum)target.getEnumValue(); } } /** * Gets (as xml) the "type" attribute */ public org.w3.x1999.xlink.TypeType xgetType() { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.TypeType target = null; target = (org.w3.x1999.xlink.TypeType)get_store().find_attribute_user(TYPE$8); if (target == null) { target = (org.w3.x1999.xlink.TypeType)get_default_attribute_value(TYPE$8); } return target; } } /** * True if has "type" attribute */ public boolean isSetType() { synchronized (monitor()) { check_orphaned(); return get_store().find_attribute_user(TYPE$8) != null; } } /** * Sets the "type" attribute */ public void setType(org.w3.x1999.xlink.TypeType.Enum type) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(TYPE$8); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(TYPE$8); } target.setEnumValue(type); } } /** * Sets (as xml) the "type" attribute */ public void xsetType(org.w3.x1999.xlink.TypeType type) { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.TypeType target = null; target = (org.w3.x1999.xlink.TypeType)get_store().find_attribute_user(TYPE$8); if (target == null) { target = (org.w3.x1999.xlink.TypeType)get_store().add_attribute_user(TYPE$8); } target.set(type); } } /** * Unsets the "type" attribute */ public void unsetType() { synchronized (monitor()) { check_orphaned(); get_store().remove_attribute(TYPE$8); } } /** * Gets the "href" attribute */ public java.lang.String getHref() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(HREF$10); if (target == null) { return null; } return target.getStringValue(); } } /** * Gets (as xml) the "href" attribute */ public org.w3.x1999.xlink.HrefType xgetHref() { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.HrefType target = null; target = (org.w3.x1999.xlink.HrefType)get_store().find_attribute_user(HREF$10); return target; } } /** * True if has "href" attribute */ public boolean isSetHref() { synchronized (monitor()) { check_orphaned(); return get_store().find_attribute_user(HREF$10) != null; } } /** * Sets the "href" attribute */ public void setHref(java.lang.String href) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(HREF$10); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(HREF$10); } target.setStringValue(href); } } /** * Sets (as xml) the "href" attribute */ public void xsetHref(org.w3.x1999.xlink.HrefType href) { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.HrefType target = null; target = (org.w3.x1999.xlink.HrefType)get_store().find_attribute_user(HREF$10); if (target == null) { target = (org.w3.x1999.xlink.HrefType)get_store().add_attribute_user(HREF$10); } target.set(href); } } /** * Unsets the "href" attribute */ public void unsetHref() { synchronized (monitor()) { check_orphaned(); get_store().remove_attribute(HREF$10); } } /** * Gets the "role" attribute */ public java.lang.String getRole() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(ROLE$12); if (target == null) { return null; } return target.getStringValue(); } } /** * Gets (as xml) the "role" attribute */ public org.w3.x1999.xlink.RoleType xgetRole() { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.RoleType target = null; target = (org.w3.x1999.xlink.RoleType)get_store().find_attribute_user(ROLE$12); return target; } } /** * True if has "role" attribute */ public boolean isSetRole() { synchronized (monitor()) { check_orphaned(); return get_store().find_attribute_user(ROLE$12) != null; } } /** * Sets the "role" attribute */ public void setRole(java.lang.String role) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(ROLE$12); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(ROLE$12); } target.setStringValue(role); } } /** * Sets (as xml) the "role" attribute */ public void xsetRole(org.w3.x1999.xlink.RoleType role) { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.RoleType target = null; target = (org.w3.x1999.xlink.RoleType)get_store().find_attribute_user(ROLE$12); if (target == null) { target = (org.w3.x1999.xlink.RoleType)get_store().add_attribute_user(ROLE$12); } target.set(role); } } /** * Unsets the "role" attribute */ public void unsetRole() { synchronized (monitor()) { check_orphaned(); get_store().remove_attribute(ROLE$12); } } /** * Gets the "arcrole" attribute */ public java.lang.String getArcrole() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(ARCROLE$14); if (target == null) { return null; } return target.getStringValue(); } } /** * Gets (as xml) the "arcrole" attribute */ public org.w3.x1999.xlink.ArcroleType xgetArcrole() { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.ArcroleType target = null; target = (org.w3.x1999.xlink.ArcroleType)get_store().find_attribute_user(ARCROLE$14); return target; } } /** * True if has "arcrole" attribute */ public boolean isSetArcrole() { synchronized (monitor()) { check_orphaned(); return get_store().find_attribute_user(ARCROLE$14) != null; } } /** * Sets the "arcrole" attribute */ public void setArcrole(java.lang.String arcrole) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(ARCROLE$14); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(ARCROLE$14); } target.setStringValue(arcrole); } } /** * Sets (as xml) the "arcrole" attribute */ public void xsetArcrole(org.w3.x1999.xlink.ArcroleType arcrole) { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.ArcroleType target = null; target = (org.w3.x1999.xlink.ArcroleType)get_store().find_attribute_user(ARCROLE$14); if (target == null) { target = (org.w3.x1999.xlink.ArcroleType)get_store().add_attribute_user(ARCROLE$14); } target.set(arcrole); } } /** * Unsets the "arcrole" attribute */ public void unsetArcrole() { synchronized (monitor()) { check_orphaned(); get_store().remove_attribute(ARCROLE$14); } } /** * Gets the "title" attribute */ public java.lang.String getTitle() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(TITLE$16); if (target == null) { return null; } return target.getStringValue(); } } /** * Gets (as xml) the "title" attribute */ public org.w3.x1999.xlink.TitleAttrType xgetTitle() { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.TitleAttrType target = null; target = (org.w3.x1999.xlink.TitleAttrType)get_store().find_attribute_user(TITLE$16); return target; } } /** * True if has "title" attribute */ public boolean isSetTitle() { synchronized (monitor()) { check_orphaned(); return get_store().find_attribute_user(TITLE$16) != null; } } /** * Sets the "title" attribute */ public void setTitle(java.lang.String title) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(TITLE$16); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(TITLE$16); } target.setStringValue(title); } } /** * Sets (as xml) the "title" attribute */ public void xsetTitle(org.w3.x1999.xlink.TitleAttrType title) { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.TitleAttrType target = null; target = (org.w3.x1999.xlink.TitleAttrType)get_store().find_attribute_user(TITLE$16); if (target == null) { target = (org.w3.x1999.xlink.TitleAttrType)get_store().add_attribute_user(TITLE$16); } target.set(title); } } /** * Unsets the "title" attribute */ public void unsetTitle() { synchronized (monitor()) { check_orphaned(); get_store().remove_attribute(TITLE$16); } } /** * Gets the "show" attribute */ public org.w3.x1999.xlink.ShowType.Enum getShow() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(SHOW$18); if (target == null) { return null; } return (org.w3.x1999.xlink.ShowType.Enum)target.getEnumValue(); } } /** * Gets (as xml) the "show" attribute */ public org.w3.x1999.xlink.ShowType xgetShow() { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.ShowType target = null; target = (org.w3.x1999.xlink.ShowType)get_store().find_attribute_user(SHOW$18); return target; } } /** * True if has "show" attribute */ public boolean isSetShow() { synchronized (monitor()) { check_orphaned(); return get_store().find_attribute_user(SHOW$18) != null; } } /** * Sets the "show" attribute */ public void setShow(org.w3.x1999.xlink.ShowType.Enum show) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(SHOW$18); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(SHOW$18); } target.setEnumValue(show); } } /** * Sets (as xml) the "show" attribute */ public void xsetShow(org.w3.x1999.xlink.ShowType show) { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.ShowType target = null; target = (org.w3.x1999.xlink.ShowType)get_store().find_attribute_user(SHOW$18); if (target == null) { target = (org.w3.x1999.xlink.ShowType)get_store().add_attribute_user(SHOW$18); } target.set(show); } } /** * Unsets the "show" attribute */ public void unsetShow() { synchronized (monitor()) { check_orphaned(); get_store().remove_attribute(SHOW$18); } } /** * Gets the "actuate" attribute */ public org.w3.x1999.xlink.ActuateType.Enum getActuate() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(ACTUATE$20); if (target == null) { return null; } return (org.w3.x1999.xlink.ActuateType.Enum)target.getEnumValue(); } } /** * Gets (as xml) the "actuate" attribute */ public org.w3.x1999.xlink.ActuateType xgetActuate() { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.ActuateType target = null; target = (org.w3.x1999.xlink.ActuateType)get_store().find_attribute_user(ACTUATE$20); return target; } } /** * True if has "actuate" attribute */ public boolean isSetActuate() { synchronized (monitor()) { check_orphaned(); return get_store().find_attribute_user(ACTUATE$20) != null; } } /** * Sets the "actuate" attribute */ public void setActuate(org.w3.x1999.xlink.ActuateType.Enum actuate) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(ACTUATE$20); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(ACTUATE$20); } target.setEnumValue(actuate); } } /** * Sets (as xml) the "actuate" attribute */ public void xsetActuate(org.w3.x1999.xlink.ActuateType actuate) { synchronized (monitor()) { check_orphaned(); org.w3.x1999.xlink.ActuateType target = null; target = (org.w3.x1999.xlink.ActuateType)get_store().find_attribute_user(ACTUATE$20); if (target == null) { target = (org.w3.x1999.xlink.ActuateType)get_store().add_attribute_user(ACTUATE$20); } target.set(actuate); } } /** * Unsets the "actuate" attribute */ public void unsetActuate() { synchronized (monitor()) { check_orphaned(); get_store().remove_attribute(ACTUATE$20); } } /** * Gets the "remoteSchema" attribute */ public java.lang.String getRemoteSchema() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(REMOTESCHEMA$22); if (target == null) { return null; } return target.getStringValue(); } } /** * Gets (as xml) the "remoteSchema" attribute */ public org.apache.xmlbeans.XmlAnyURI xgetRemoteSchema() { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.XmlAnyURI target = null; target = (org.apache.xmlbeans.XmlAnyURI)get_store().find_attribute_user(REMOTESCHEMA$22); return target; } } /** * True if has "remoteSchema" attribute */ public boolean isSetRemoteSchema() { synchronized (monitor()) { check_orphaned(); return get_store().find_attribute_user(REMOTESCHEMA$22) != null; } } /** * Sets the "remoteSchema" attribute */ public void setRemoteSchema(java.lang.String remoteSchema) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.SimpleValue target = null; target = (org.apache.xmlbeans.SimpleValue)get_store().find_attribute_user(REMOTESCHEMA$22); if (target == null) { target = (org.apache.xmlbeans.SimpleValue)get_store().add_attribute_user(REMOTESCHEMA$22); } target.setStringValue(remoteSchema); } } /** * Sets (as xml) the "remoteSchema" attribute */ public void xsetRemoteSchema(org.apache.xmlbeans.XmlAnyURI remoteSchema) { synchronized (monitor()) { check_orphaned(); org.apache.xmlbeans.XmlAnyURI target = null; target = (org.apache.xmlbeans.XmlAnyURI)get_store().find_attribute_user(REMOTESCHEMA$22); if (target == null) { target = (org.apache.xmlbeans.XmlAnyURI)get_store().add_attribute_user(REMOTESCHEMA$22); } target.set(remoteSchema); } } /** * Unsets the "remoteSchema" attribute */ public void unsetRemoteSchema() { synchronized (monitor()) { check_orphaned(); get_store().remove_attribute(REMOTESCHEMA$22); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.testframework.*; import org.apache.ignite.testframework.junits.common.*; import org.jetbrains.annotations.*; import javax.cache.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import static org.apache.ignite.cache.CacheMode.*; import static org.apache.ignite.cache.CacheRebalanceMode.*; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.*; /** * Test cases for partitioned cache {@link GridDhtPreloader preloader}. */ public class GridCacheDhtPreloadPutGetSelfTest extends GridCommonAbstractTest { /** Key count. */ private static final int KEY_CNT = 1000; /** Iterations count. */ private static final int ITER_CNT = 10; /** Frequency. */ private static final int FREQUENCY = 100; /** Number of key backups. Each test method can set this value as required. */ private int backups; /** Preload mode. */ private CacheRebalanceMode preloadMode; /** IP finder. */ private TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); assert preloadMode != null; CacheConfiguration cacheCfg = defaultCacheConfiguration(); cacheCfg.setCacheMode(PARTITIONED); cacheCfg.setWriteSynchronizationMode(FULL_SYNC); cacheCfg.setRebalanceMode(preloadMode); cacheCfg.setBackups(backups); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(ipFinder); cfg.setDiscoverySpi(disco); cfg.setCacheConfiguration(cacheCfg); return cfg; } /** * @throws Exception If failed. */ public void testPutGetAsync0() throws Exception { preloadMode = ASYNC; backups = 0; performTest(); } /** * @throws Exception If failed. */ public void testPutGetAsync1() throws Exception { preloadMode = ASYNC; backups = 1; performTest(); } /** * @throws Exception If failed. */ public void testPutGetAsync2() throws Exception { preloadMode = ASYNC; backups = 2; performTest(); } /** * @throws Exception If failed. */ public void testPutGetSync0() throws Exception { preloadMode = SYNC; backups = 0; performTest(); } /** * @throws Exception If failed. */ public void testPutGetSync1() throws Exception { preloadMode = SYNC; backups = 1; performTest(); } /** * @throws Exception If failed. */ public void testPutGetSync2() throws Exception { preloadMode = SYNC; backups = 2; performTest(); } /** * @throws Exception If failed. */ public void testPutGetNone0() throws Exception { preloadMode = NONE; backups = 0; performTest(); } /** * @throws Exception If failed. */ public void testPutGetNone1() throws Exception { preloadMode = NONE; backups = 1; performTest(); } /** * @throws Exception If failed. */ public void testPutGetNone2() throws Exception { preloadMode = NONE; backups = 2; performTest(); } /** * @throws Exception If test fails. */ private void performTest() throws Exception { try { final CountDownLatch writeLatch = new CountDownLatch(1); final CountDownLatch readLatch = new CountDownLatch(1); final AtomicBoolean done = new AtomicBoolean(); IgniteInternalFuture fut1 = GridTestUtils.runMultiThreadedAsync( new Callable<Object>() { @Nullable @Override public Object call() throws Exception { Ignite g2 = startGrid(2); for (int i = 0; i < ITER_CNT; i++) { info("Iteration # " + i); IgniteCache<Integer, Integer> cache = g2.cache(null); for (int j = 0; j < KEY_CNT; j++) { Integer val = cache.get(j); if (j % FREQUENCY == 0) info("Read entry: " + j + " -> " + val); if (done.get()) assert val != null && val == j; } writeLatch.countDown(); readLatch.await(); } return null; } }, 1, "reader" ); IgniteInternalFuture fut2 = GridTestUtils.runMultiThreadedAsync( new Callable<Object>() { @Nullable @Override public Object call() throws Exception { try { writeLatch.await(10, TimeUnit.SECONDS); Ignite g1 = startGrid(1); IgniteCache<Integer, Integer> cache = g1.cache(null); for (int j = 0; j < KEY_CNT; j++) { cache.put(j, j); if (j % FREQUENCY == 0) info("Stored value in cache: " + j); } done.set(true); for (int j = 0; j < KEY_CNT; j++) { Cache.Entry<Integer, Integer> entry = internalCache(cache).entry(j); assert entry != null; Integer val = entry.getValue(); if (j % FREQUENCY == 0) info("Read entry: " + entry.getKey() + " -> " + val); assert val != null && val == j; } if (backups > 0) stopGrid(1); } finally { readLatch.countDown(); } return null; } }, 1, "writer" ); fut1.get(); fut2.get(); } finally { stopAllGrids(); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.memorydb.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/memorydb-2021-01-01/DescribeUsers" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeUsersResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * A list of users. * </p> */ private java.util.List<User> users; /** * <p> * An optional argument to pass in case the total number of records exceeds the value of MaxResults. If nextToken is * returned, there are more results available. The value of nextToken is a unique pagination token for each page. * Make the call again using the returned token to retrieve the next page. Keep all other arguments unchanged. * </p> */ private String nextToken; /** * <p> * A list of users. * </p> * * @return A list of users. */ public java.util.List<User> getUsers() { return users; } /** * <p> * A list of users. * </p> * * @param users * A list of users. */ public void setUsers(java.util.Collection<User> users) { if (users == null) { this.users = null; return; } this.users = new java.util.ArrayList<User>(users); } /** * <p> * A list of users. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setUsers(java.util.Collection)} or {@link #withUsers(java.util.Collection)} if you want to override the * existing values. * </p> * * @param users * A list of users. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeUsersResult withUsers(User... users) { if (this.users == null) { setUsers(new java.util.ArrayList<User>(users.length)); } for (User ele : users) { this.users.add(ele); } return this; } /** * <p> * A list of users. * </p> * * @param users * A list of users. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeUsersResult withUsers(java.util.Collection<User> users) { setUsers(users); return this; } /** * <p> * An optional argument to pass in case the total number of records exceeds the value of MaxResults. If nextToken is * returned, there are more results available. The value of nextToken is a unique pagination token for each page. * Make the call again using the returned token to retrieve the next page. Keep all other arguments unchanged. * </p> * * @param nextToken * An optional argument to pass in case the total number of records exceeds the value of MaxResults. If * nextToken is returned, there are more results available. The value of nextToken is a unique pagination * token for each page. Make the call again using the returned token to retrieve the next page. Keep all * other arguments unchanged. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * An optional argument to pass in case the total number of records exceeds the value of MaxResults. If nextToken is * returned, there are more results available. The value of nextToken is a unique pagination token for each page. * Make the call again using the returned token to retrieve the next page. Keep all other arguments unchanged. * </p> * * @return An optional argument to pass in case the total number of records exceeds the value of MaxResults. If * nextToken is returned, there are more results available. The value of nextToken is a unique pagination * token for each page. Make the call again using the returned token to retrieve the next page. Keep all * other arguments unchanged. */ public String getNextToken() { return this.nextToken; } /** * <p> * An optional argument to pass in case the total number of records exceeds the value of MaxResults. If nextToken is * returned, there are more results available. The value of nextToken is a unique pagination token for each page. * Make the call again using the returned token to retrieve the next page. Keep all other arguments unchanged. * </p> * * @param nextToken * An optional argument to pass in case the total number of records exceeds the value of MaxResults. If * nextToken is returned, there are more results available. The value of nextToken is a unique pagination * token for each page. Make the call again using the returned token to retrieve the next page. Keep all * other arguments unchanged. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeUsersResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getUsers() != null) sb.append("Users: ").append(getUsers()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeUsersResult == false) return false; DescribeUsersResult other = (DescribeUsersResult) obj; if (other.getUsers() == null ^ this.getUsers() == null) return false; if (other.getUsers() != null && other.getUsers().equals(this.getUsers()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getUsers() == null) ? 0 : getUsers().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public DescribeUsersResult clone() { try { return (DescribeUsersResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright (c) 2020, John Campbell and other contributors. All rights reserved. * * This file is part of Tectonicus. It is subject to the license terms in the LICENSE file found in * the top-level directory of this distribution. The full list of project contributors is contained * in the AUTHORS file found in the same location. * */ package tectonicus.blockTypes; import org.joml.Vector3f; import org.joml.Vector4f; import tectonicus.BlockContext; import tectonicus.BlockType; import tectonicus.BlockTypeRegistry; import tectonicus.Chunk; import tectonicus.configuration.LightFace; import tectonicus.rasteriser.SubMesh; import tectonicus.rasteriser.SubMesh.Rotation; import tectonicus.raw.RawChunk; import tectonicus.renderer.Geometry; import tectonicus.texture.SubTexture; import static tectonicus.Version.VERSION_4; public class RedstoneRepeater implements BlockType { private static final int HEIGHT_IN_TEXELS = 4; private final String name; private final SubTexture baseTexture, sideTexture; private final SubTexture torchTopTexture, torchSideTexture, litTorchTopTexture, baseLit; public RedstoneRepeater(String name, SubTexture baseTexture, SubTexture sideTexture, SubTexture torchTexture, SubTexture litTorchTexture, SubTexture baseLit) { this.name = name; final float texelSize, baseTile; if (baseTexture.texturePackVersion == VERSION_4) { texelSize = 1.0f / 16.0f / 16.0f; baseTile = 0; } else { texelSize = 1.0f / 16.0f; baseTile = (1.0f / baseTexture.texture.getHeight()) * baseTexture.texture.getWidth(); } this.baseTexture = new SubTexture(baseTexture.texture, baseTexture.u0, baseTexture.v0, baseTexture.u1, baseTexture.v0+baseTile); this.baseLit = baseLit; final float vHeight = texelSize * 14; this.sideTexture = new SubTexture(sideTexture.texture, sideTexture.u0, sideTexture.v0+vHeight, sideTexture.u1, sideTexture.v1); // Torch textures final float topOffset = texelSize * 6; this.torchSideTexture = new SubTexture(torchTexture.texture, torchTexture.u0, torchTexture.v0 + topOffset, torchTexture.u1, torchTexture.v1); final float uOffset = texelSize * 7; final float vOffset0 = texelSize * 6; final float vOffset1 = texelSize * 8; this.torchTopTexture = new SubTexture(torchTexture.texture, torchTexture.u0 + uOffset, torchTexture.v0 + vOffset0, torchTexture.u1 - uOffset, torchTexture.v1 - vOffset1); if(litTorchTexture != null) { this.litTorchTopTexture = new SubTexture(litTorchTexture.texture, torchTexture.u0 + uOffset, torchTexture.v0 + vOffset0, torchTexture.u1 - uOffset, torchTexture.v1 - vOffset1); } else litTorchTopTexture = null; } @Override public String getName() { return name; } @Override public boolean isSolid() { return false; } @Override public boolean isWater() { return false; } @Override public void addInteriorGeometry(int x, int y, int z, BlockContext world, BlockTypeRegistry registry, RawChunk rawChunk, Geometry geometry) { addEdgeGeometry(x, y, z, world, registry, rawChunk, geometry); } @Override public void addEdgeGeometry(int x, int y, int z, BlockContext world, BlockTypeRegistry registry, RawChunk chunk, Geometry geometry) { final int data = chunk.getBlockData(x, y, z); final float height = 1.0f / 16.0f * HEIGHT_IN_TEXELS; final float lightness = Chunk.getLight(world.getLightStyle(), LightFace.Top, chunk, x, y, z); Vector4f white = new Vector4f(lightness, lightness, lightness, 1); SubMesh subMesh = new SubMesh(); SubMesh baseMesh = new SubMesh(); SubMesh torchMesh = new SubMesh(); SubMesh litTorchMesh = new SubMesh(); SubTexture base = null; if((data & 0x8) > 0 && baseLit != null) { final float baseLitTile = (1.0f / baseLit.texture.getHeight()) * baseLit.texture.getWidth(); base = new SubTexture(baseLit.texture, baseLit.u0, baseLit.v0, baseLit.u1, baseLit.v0+baseLitTile); } else { base = baseTexture; } baseMesh.addQuad(new Vector3f(0, height, 0), new Vector3f(1, height, 0), new Vector3f(1, height, 1), new Vector3f(0, height, 1), white, base); // North edge subMesh.addQuad(new Vector3f(0, height, 0), new Vector3f(0, height, 1), new Vector3f(0, 0, 1), new Vector3f(0, 0, 0), white, sideTexture); // South edge subMesh.addQuad(new Vector3f(1, height, 1), new Vector3f(1, height, 0), new Vector3f(1, 0, 0), new Vector3f(1, 0, 1), white, sideTexture); // East edge subMesh.addQuad(new Vector3f(1, height, 0), new Vector3f(0, height, 0), new Vector3f(0, 0, 0), new Vector3f(1, 0, 0), white, sideTexture); // West edge subMesh.addQuad(new Vector3f(0, height, 1), new Vector3f(1, height, 1), new Vector3f(1, 0, 1), new Vector3f(0, 0, 1), white, sideTexture); final float texel = 1.0f / 16.0f; //This is rather messy because block ID 150 "Redstone Comparator (Active)" doesn't seem to be valid, so we have to pass in all possibly needed textures, etc. if(name.equals("Redstone Repeater")) { // Static torch addTorch(torchMesh, texel*7, 0, texel*2); // Delay torch final int delay = (data>>2) & 0x3; final float yPixel = delay * 2 + 6; // Valid offsets are from 6 to 12 addTorch(torchMesh, texel*7, 0, texel*yPixel); } else if(data == 4 || data == 5 || data == 6 || data == 7 && litTorchTopTexture != null) { addTorch(litTorchMesh, texel*7, -texel, texel*2); addTorch(torchMesh, texel*4, -texel, texel*11); addTorch(torchMesh, texel*10, -texel, texel*11); } else if(data == 8 || data == 9 || data == 10 || data == 11 && litTorchTopTexture != null) { addTorch(torchMesh, texel*7, texel*-4, texel*2); addTorch(litTorchMesh, texel*4, -texel, texel*11); addTorch(litTorchMesh, texel*10, -texel, texel*11); } else if(data == 12 || data == 13 || data == 14 || data == 15 && litTorchTopTexture != null) { addTorch(litTorchMesh, texel*7, -texel, texel*2); addTorch(litTorchMesh, texel*4, -texel, texel*11); addTorch(litTorchMesh, texel*10, -texel, texel*11); } else { addTorch(torchMesh, texel*7, texel*-4, texel*2); addTorch(torchMesh, texel*4, -texel, texel*11); addTorch(torchMesh, texel*10, -texel, texel*11); } // Now do rotation Rotation rotation = Rotation.None; float angle = 0; final int direction = data & 0x3; if (direction == 2) { // Facing east rotation = Rotation.Clockwise; angle = 180; } else if (direction == 3) { // Facing south rotation = Rotation.Clockwise; angle = 90; } else if (direction == 0) { // Facing west (built direction) } else if (direction == 1) { // Facing south rotation = Rotation.AntiClockwise; angle = 90; } subMesh.pushTo(geometry.getMesh(sideTexture.texture, Geometry.MeshType.AlphaTest), x, y, z, rotation, angle); if((data & 0x8) > 0 && baseLit != null) { final float baseLitTile = (1.0f / baseLit.texture.getHeight()) * baseLit.texture.getWidth(); SubTexture lit = new SubTexture(baseLit.texture, baseLit.u0, baseLit.v0, baseLit.u1, baseLit.v0+baseLitTile); baseMesh.pushTo(geometry.getMesh(lit.texture, Geometry.MeshType.AlphaTest), x, y, z, rotation, angle); } else baseMesh.pushTo(geometry.getMesh(baseTexture.texture, Geometry.MeshType.AlphaTest), x, y, z, rotation, angle); torchMesh.pushTo(geometry.getMesh(torchSideTexture.texture, Geometry.MeshType.AlphaTest), x, y, z, rotation, angle); if(litTorchTopTexture != null) { litTorchMesh.pushTo(geometry.getMesh(litTorchTopTexture.texture, Geometry.MeshType.AlphaTest), x, y, z, rotation, angle); } } private void addTorch(SubMesh subMesh, float x, float y, float z) { final float lightness = 1.0f; Vector4f colour = new Vector4f(1, 1, 1, 1); final float leftSide = 7.0f / 16.0f; final float rightSide = 9.0f / 16.0f; final float height = 10.0f / 16.0f; final float texel = 1.0f / 16.0f; // Shift so x/y/z of zero starts the torch just next to the origin x -= texel*7; z -= texel*7; // Data defines torch placement // 0x1: Pointing south // 0x2: Pointing north // 0x3; Pointing west // 0x4: Pointing east // 0x5: Standing on the floor final float bottomOffsetX; final float bottomOffsetZ; final float bottomOffsetY; { // Standing on the floor bottomOffsetX = 0.0f; bottomOffsetZ = 0.0f; bottomOffsetY = 0.0f; } // Top subMesh.addQuad( new Vector3f(x+leftSide, y+height+bottomOffsetY, z+leftSide), new Vector3f(x+rightSide, y+height+bottomOffsetY, z+leftSide), new Vector3f(x+rightSide, y+height+bottomOffsetY, z+rightSide), new Vector3f(x+leftSide, y+height+bottomOffsetY, z+rightSide), new Vector4f(colour.x * lightness, colour.y * lightness, colour.z * lightness, colour.w), torchTopTexture); // North subMesh.addQuad( new Vector3f(x+leftSide, y+height+bottomOffsetY, z), new Vector3f(x+leftSide, y+height+bottomOffsetY, z+1), new Vector3f(x+leftSide + bottomOffsetX, y+bottomOffsetY, z+1 + bottomOffsetZ), new Vector3f(x+leftSide + bottomOffsetX, y+bottomOffsetY, z + bottomOffsetZ), new Vector4f(colour.x * lightness, colour.y * lightness, colour.z * lightness, colour.w), torchSideTexture); // South subMesh.addQuad( new Vector3f(x+rightSide, y+height+bottomOffsetY, z+1), new Vector3f(x+rightSide, y+height+bottomOffsetY, z), new Vector3f(x+rightSide + bottomOffsetX, y+bottomOffsetY, z + bottomOffsetZ), new Vector3f(x+rightSide + bottomOffsetX, y+bottomOffsetY, z+1 + bottomOffsetZ), new Vector4f(colour.x * lightness, colour.y * lightness, colour.z * lightness, colour.w), torchSideTexture); // East subMesh.addQuad( new Vector3f(x+1, y+height+bottomOffsetY, z+leftSide), new Vector3f(x, y+height+bottomOffsetY, z+leftSide), new Vector3f(x + bottomOffsetX, y+bottomOffsetY, z+leftSide + bottomOffsetZ), new Vector3f(x+1 + bottomOffsetX, y+bottomOffsetY, z+leftSide + bottomOffsetZ), new Vector4f(colour.x * lightness, colour.y * lightness, colour.z * lightness, colour.w), torchSideTexture); // West subMesh.addQuad( new Vector3f(x, y+height+bottomOffsetY, z+rightSide), new Vector3f(x+1, y+height+bottomOffsetY, z+rightSide), new Vector3f(x+1 + bottomOffsetX, y+bottomOffsetY, z+rightSide + bottomOffsetZ), new Vector3f(x + bottomOffsetX, y+bottomOffsetY, z+rightSide + bottomOffsetZ), new Vector4f(colour.x * lightness, colour.y * lightness, colour.z * lightness, colour.w), torchSideTexture); } }
/* * Copyright 2015 OWASP. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.owasp.dependencycheck.xml.pom; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; /** * * @author jeremy */ public class ModelTest { /** * Test of getName method, of class Model. */ @Test public void testGetName() { Model instance = new Model(); instance.setName(""); String expResult = ""; String result = instance.getName(); assertEquals(expResult, result); } /** * Test of setName method, of class Model. */ @Test public void testSetName() { String name = ""; Model instance = new Model(); instance.setName(name); } /** * Test of getOrganization method, of class Model. */ @Test public void testGetOrganization() { Model instance = new Model(); instance.setOrganization(""); String expResult = ""; String result = instance.getOrganization(); assertEquals(expResult, result); } /** * Test of setOrganization method, of class Model. */ @Test public void testSetOrganization() { String organization = ""; Model instance = new Model(); instance.setOrganization(organization); } /** * Test of getDescription method, of class Model. */ @Test public void testGetDescription() { Model instance = new Model(); instance.setDescription(""); String expResult = ""; String result = instance.getDescription(); assertEquals(expResult, result); } /** * Test of setDescription method, of class Model. */ @Test public void testSetDescription() { String description = ""; Model instance = new Model(); instance.setDescription(description); } /** * Test of getGroupId method, of class Model. */ @Test public void testGetGroupId() { Model instance = new Model(); instance.setGroupId(""); String expResult = ""; String result = instance.getGroupId(); assertEquals(expResult, result); } /** * Test of setGroupId method, of class Model. */ @Test public void testSetGroupId() { String groupId = ""; Model instance = new Model(); instance.setGroupId(groupId); } /** * Test of getArtifactId method, of class Model. */ @Test public void testGetArtifactId() { Model instance = new Model(); instance.setArtifactId(""); String expResult = ""; String result = instance.getArtifactId(); assertEquals(expResult, result); } /** * Test of setArtifactId method, of class Model. */ @Test public void testSetArtifactId() { String artifactId = ""; Model instance = new Model(); instance.setArtifactId(artifactId); } /** * Test of getVersion method, of class Model. */ @Test public void testGetVersion() { Model instance = new Model(); instance.setVersion(""); String expResult = ""; String result = instance.getVersion(); assertEquals(expResult, result); } /** * Test of setVersion method, of class Model. */ @Test public void testSetVersion() { String version = ""; Model instance = new Model(); instance.setVersion(version); } /** * Test of getParentGroupId method, of class Model. */ @Test public void testGetParentGroupId() { Model instance = new Model(); instance.setParentGroupId(""); String expResult = ""; String result = instance.getParentGroupId(); assertEquals(expResult, result); } /** * Test of setParentGroupId method, of class Model. */ @Test public void testSetParentGroupId() { String parentGroupId = ""; Model instance = new Model(); instance.setParentGroupId(parentGroupId); } /** * Test of getParentArtifactId method, of class Model. */ @Test public void testGetParentArtifactId() { Model instance = new Model(); instance.setParentArtifactId(""); String expResult = ""; String result = instance.getParentArtifactId(); assertEquals(expResult, result); } /** * Test of setParentArtifactId method, of class Model. */ @Test public void testSetParentArtifactId() { String parentArtifactId = ""; Model instance = new Model(); instance.setParentArtifactId(parentArtifactId); } /** * Test of getParentVersion method, of class Model. */ @Test public void testGetParentVersion() { Model instance = new Model(); instance.setParentVersion(""); String expResult = ""; String result = instance.getParentVersion(); assertEquals(expResult, result); } /** * Test of setParentVersion method, of class Model. */ @Test public void testSetParentVersion() { String parentVersion = ""; Model instance = new Model(); instance.setParentVersion(parentVersion); } /** * Test of getLicenses method, of class Model. */ @Test public void testGetLicenses() { Model instance = new Model(); instance.addLicense(new License("name", "url")); List<License> expResult = new ArrayList<License>(); expResult.add(new License("name", "url")); List<License> result = instance.getLicenses(); assertEquals(expResult, result); } /** * Test of addLicense method, of class Model. */ @Test public void testAddLicense() { License license = new License("name", "url"); Model instance = new Model(); instance.addLicense(license); } /** * Test of processProperties method, of class Model. */ @Test public void testProcessProperties() { Properties prop = new Properties(); prop.setProperty("key", "value"); prop.setProperty("nested", "nested ${key}"); String text = "This is a test of '${key}' '${nested}'"; Model instance = new Model(); instance.setName(text); instance.processProperties(prop); String expResults = "This is a test of 'value' 'nested value'"; assertEquals(expResults, instance.getName()); } /** * Test of interpolateString method, of class Model. */ @Test public void testInterpolateString() { Properties prop = new Properties(); prop.setProperty("key", "value"); prop.setProperty("nested", "nested ${key}"); String text = "This is a test of '${key}' '${nested}'"; String expResults = "This is a test of 'value' 'nested value'"; String results = Model.interpolateString(text, prop); assertEquals(expResults, results); } }
package org.daisy.pipeline.jobs.test; import static org.junit.Assert.*; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import org.daisy.pipeline.client.Pipeline2Exception; import org.daisy.pipeline.client.Pipeline2Logger; import org.daisy.pipeline.client.filestorage.JobStorage; import org.daisy.pipeline.client.models.Argument; import org.daisy.pipeline.client.models.Callback; import org.daisy.pipeline.client.models.Callback.Type; import org.daisy.pipeline.client.models.Job; import org.daisy.pipeline.client.models.Script; import org.daisy.pipeline.client.utils.XML; import org.daisy.pipeline.client.utils.XPath; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.w3c.dom.Document; import org.w3c.dom.Node; public class JobStorageTest { private File resources = new File("src/test/resources/"); private String loadResource(String href) { File scriptXmlFile = new File(resources, href); try { byte[] encoded = Files.readAllBytes(Paths.get(scriptXmlFile.getPath())); return new String(encoded, Charset.defaultCharset()); } catch (IOException e) { assertTrue("Failed to read "+scriptXmlFile.getPath(), false); return null; } } public TemporaryFolder testFolder; public File jobStorageDir; @Before public void populateTestFolder() throws IOException { testFolder = new TemporaryFolder(); testFolder.create(); jobStorageDir = testFolder.newFolder("jobs"); File sourceFolder = new File(resources, "jobs"); copyFolder(sourceFolder, jobStorageDir); } @After public void tearDown() { // testFolder.delete(); } public static void copyFolder(File sourceFolder, File destinationFolder) throws IOException { if (sourceFolder.isDirectory()) { if (!destinationFolder.exists()) { destinationFolder.mkdir(); } String files[] = sourceFolder.list(); for (String file : files) { File srcFile = new File(sourceFolder, file); File destFile = new File(destinationFolder, file); copyFolder(srcFile, destFile); } } else { Files.copy(sourceFolder.toPath(), destinationFolder.toPath()); } } @Test public void testListAndLoadJobs() { List<String> jobs = JobStorage.listJobs(jobStorageDir); assertEquals(2, jobs.size()); assertEquals("job1", jobs.get(0)); assertEquals("job2", jobs.get(1)); assertNull(JobStorage.loadJob("does-not-exist", jobStorageDir)); Job job = JobStorage.loadJob(jobs.get(0), jobStorageDir); assertEquals("job1", job.getId()); assertEquals("Nicename", job.getNicename()); assertEquals("\nshort description\n\nlong description\n", job.getDescription()); assertEquals("http://localhost:8181/ws/scripts/dtbook-to-epub3", job.getScriptHref()); Script script = job.getScript(); assertEquals("http://localhost:8181/ws/scripts/dtbook-to-epub3", script.getHref()); assertEquals("dtbook-to-epub3", script.getId()); assertEquals(5, script.getInputs().size()); assertNotNull(script.getArgument("source")); assertNotNull(script.getArgument("language")); assertNotNull(script.getArgument("assert-valid")); assertNotNull(script.getArgument("tts-config")); assertNotNull(script.getArgument("audio")); assertEquals(script.getArgument("source"), job.getArgument("source")); assertEquals(script.getArgument("language"), job.getArgument("language")); assertEquals(script.getArgument("assert-valid"), job.getArgument("assert-valid")); assertEquals(script.getArgument("tts-config"), job.getArgument("tts-config")); assertEquals(script.getArgument("audio"), job.getArgument("audio")); assertEquals(2, job.getArgument("source").size()); assertEquals(1, job.getArgument("language").size()); assertEquals(1, job.getArgument("assert-valid").size()); assertEquals(1, job.getArgument("audio").size()); assertEquals("hauy_valid.xml", job.getArgument("source").getAsList().get(0)); assertEquals("dtbook.2005.basic.css", job.getArgument("source").getAsList().get(1)); assertEquals("en", job.getArgument("language").get()); assertEquals("true", job.getArgument("assert-valid").get()); assertEquals("true", job.getArgument("audio").get()); assertEquals(true, job.getArgument("assert-valid").getAsBoolean()); assertEquals(true, job.getArgument("audio").getAsBoolean()); job = JobStorage.loadJob(jobs.get(1), jobStorageDir); assertEquals("job2", job.getId()); assertEquals("Other nicename", job.getNicename()); assertEquals(null, job.getDescription()); assertEquals("http://localhost:8181/ws/scripts/dtbook-to-epub3", job.getScriptHref()); assertEquals("dtbook-to-epub3", job.getScript().getId()); assertEquals(3, job.getInputs().size()); assertNotNull(job.getArgument("source")); assertNull(job.getArgument("language")); assertNotNull(job.getArgument("assert-valid")); assertNotNull(job.getArgument("audio")); assertEquals(1, job.getArgument("source").size()); assertEquals(1, job.getArgument("assert-valid").size()); assertEquals(1, job.getArgument("audio").size()); assertEquals("hauy_valid.xml", job.getArgument("source").get()); assertEquals(true, job.getArgument("assert-valid").getAsBoolean()); assertEquals(false, job.getArgument("audio").getAsBoolean()); } @Test public void testDeleteJob() { List<String> jobsBefore = JobStorage.listJobs(jobStorageDir); Job job2 = JobStorage.loadJob("job2", jobStorageDir); job2.getJobStorage().delete(); List<String> jobsAfter = JobStorage.listJobs(jobStorageDir); assertEquals(2, jobsBefore.size()); assertEquals(1, jobsAfter.size()); assertTrue(jobsAfter.contains("job1")); assertFalse(jobsAfter.contains("job2")); } @Test public void testStoreJob() { List<String> jobsBefore = JobStorage.listJobs(jobStorageDir); Path textFilePath = null; try { textFilePath = Files.createTempFile("test", "test"); String text = "this is a test"; Files.write(textFilePath, text.getBytes()); } catch (IOException e) { Pipeline2Logger.logger().error("Unable to store XML for job", e); assertTrue(false); } Job newJob = new Job(); try { String scriptXmlString = loadResource("scripts/dtbook-to-epub3.xml"); Document scriptXml = XML.getXml(scriptXmlString); Script script = new Script(scriptXml); newJob.setScript(script); } catch (Pipeline2Exception e) { assertTrue(false); } assertNotNull(newJob.getScript()); assertNotNull(newJob.getScript().getArgument("source")); assertEquals(Argument.Kind.input, newJob.getScript().getArgument("source").getKind()); assertEquals(Argument.Kind.option, newJob.getScript().getArgument("assert-valid").getKind()); newJob.setId("job3"); JobStorage jobStorage = new JobStorage(newJob, jobStorageDir, null); newJob.getArgument("source").add(textFilePath.toFile(), jobStorage); newJob.getArgument("assert-valid").set(false); List<Callback> callbacks = new ArrayList<Callback>(); callbacks.add(new Callback("http://example.com/1", Type.status, "1")); callbacks.add(new Callback("http://example.com/2", Type.messages, "2")); newJob.setCallback(callbacks); jobStorage.save(); List<String> jobsAfter = JobStorage.listJobs(jobStorageDir); assertEquals(2, jobsBefore.size()); assertEquals(3, jobsAfter.size()); assertTrue(jobsAfter.contains("job1")); assertTrue(jobsAfter.contains("job2")); assertTrue(jobsAfter.contains("job3")); assertTrue(new File(jobStorageDir, "job3/job.xml").isFile()); File jobXmlFile = new File(jobStorageDir, "job3/job.xml"); String jobXmlString = null; List<Node> elements = null; try { byte[] encoded = Files.readAllBytes(Paths.get(jobXmlFile.getPath())); jobXmlString = new String(encoded, Charset.defaultCharset()); Document serializedXml = XML.getXml(jobXmlString); elements = XPath.selectNodes("//*", serializedXml, XPath.dp2ns); } catch (IOException e) { assertTrue("Failed to read "+jobXmlFile.getPath(), false); } catch (Pipeline2Exception e) { assertTrue("Failed to read "+jobXmlFile.getPath(), false); } for (Node element : elements) { assertNull("Elements should be in the default namespace (element name: "+element.getLocalName()+" has the namespace prefix '"+element.getPrefix()+"')", element.getPrefix()); assertEquals("The default namespace for all elements should be 'http://www.daisy.org/ns/pipeline/data' (element name: "+element.getLocalName()+")", "http://www.daisy.org/ns/pipeline/data", element.getNamespaceURI()); assertFalse("No element should have the local name 'null' (parent:"+element.getParentNode().getLocalName()+")", "null".equals(element.getLocalName())); } } @Test public void testFiles() { Job job = new Job(); try { String scriptXmlString = loadResource("scripts/dtbook-to-epub3.xml"); Document scriptXml = XML.getXml(scriptXmlString); Script script = new Script(scriptXml); job.setScript(script); } catch (Pipeline2Exception e) { assertTrue(false); } job.setId("filesJob"); JobStorage jobStorage = new JobStorage(job, jobStorageDir, null); File xmlFile = new File(jobStorageDir, "job1/context/hauy_valid.xml"); File cssFile = new File(jobStorageDir, "job1/context/dtbook.2005.basic.css"); Argument source = job.getArgument("source"); source.set(xmlFile, jobStorage); assertEquals(1, source.size()); assertEquals("hauy_valid.xml", source.get()); jobStorage.addContextFile(cssFile, "css/style.css"); source.set(cssFile, jobStorage); assertEquals(1, source.size()); assertEquals("css/style.css", source.get()); source.add(xmlFile, jobStorage); assertEquals(2, source.size()); assertEquals("hauy_valid.xml", source.getAsList().get(1)); Argument outputDir = job.getArgument("output-dir"); outputDir.set(testFolder.getRoot(), jobStorage); assertEquals(1, outputDir.size()); try { if (System.getProperty("os.name").toLowerCase().startsWith("mac os x")) assertEquals("file:"+testFolder.getRoot().getAbsolutePath()+"/", outputDir.get()); else assertEquals("file:"+testFolder.getRoot().getCanonicalPath()+"/", outputDir.get()); } catch (IOException e) { assertTrue(false); } } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.wallf.cloudcomic.imageloader; import java.io.BufferedInputStream; import java.io.BufferedWriter; import java.io.Closeable; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Array; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** ****************************************************************************** * Taken from the JB source code, can be found in: * libcore/luni/src/main/java/libcore/io/DiskLruCache.java * or direct link: * https://android.googlesource.com/platform/libcore/+/android-4.1.1_r1/luni/src/main/java/libcore/io/DiskLruCache.java ****************************************************************************** * * A cache that uses a bounded amount of space on a filesystem. Each cache * entry has a string key and a fixed number of values. Values are byte * sequences, accessible as streams or files. Each value must be between {@code * 0} and {@code Integer.MAX_VALUE} bytes in length. * * <p>The cache stores its data in a directory on the filesystem. This * directory must be exclusive to the cache; the cache may delete or overwrite * files from its directory. It is an error for multiple processes to use the * same cache directory at the same time. * * <p>This cache limits the number of bytes that it will store on the * filesystem. When the number of stored bytes exceeds the limit, the cache will * remove entries in the background until the limit is satisfied. The limit is * not strict: the cache may temporarily exceed it while waiting for files to be * deleted. The limit does not include filesystem overhead or the cache * journal so space-sensitive applications should set a conservative limit. * * <p>Clients call {@link #edit} to create or update the values of an entry. An * entry may have only one editor at one time; if a value is not available to be * edited then {@link #edit} will return null. * <ul> * <li>When an entry is being <strong>created</strong> it is necessary to * supply a full set of values; the empty value should be used as a * placeholder if necessary. * <li>When an entry is being <strong>edited</strong>, it is not necessary * to supply data for every value; values default to their previous * value. * </ul> * Every {@link #edit} call must be matched by a call to {@link Editor#commit} * or {@link Editor#abort}. Committing is atomic: a read observes the full set * of values as they were before or after the commit, but never a mix of values. * * <p>Clients call {@link #get} to read a snapshot of an entry. The read will * observe the value at the time that {@link #get} was called. Updates and * removals after the call do not impact ongoing reads. * * <p>This class is tolerant of some I/O errors. If files are missing from the * filesystem, the corresponding entries will be dropped from the cache. If * an error occurs while writing a cache value, the edit will fail silently. * Callers should handle other problems by catching {@code IOException} and * responding appropriately. */ public final class DiskLruCache implements Closeable { static final String JOURNAL_FILE = "journal"; static final String JOURNAL_FILE_TMP = "journal.tmp"; static final String MAGIC = "libcore.io.DiskLruCache"; static final String VERSION_1 = "1"; static final long ANY_SEQUENCE_NUMBER = -1; private static final String CLEAN = "CLEAN"; private static final String DIRTY = "DIRTY"; private static final String REMOVE = "REMOVE"; private static final String READ = "READ"; private static final Charset UTF_8 = Charset.forName("UTF-8"); private static final int IO_BUFFER_SIZE = 8 * 1024; /* * This cache uses a journal file named "journal". A typical journal file * looks like this: * libcore.io.DiskLruCache * 1 * 100 * 2 * * CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054 * DIRTY 335c4c6028171cfddfbaae1a9c313c52 * CLEAN 335c4c6028171cfddfbaae1a9c313c52 3934 2342 * REMOVE 335c4c6028171cfddfbaae1a9c313c52 * DIRTY 1ab96a171faeeee38496d8b330771a7a * CLEAN 1ab96a171faeeee38496d8b330771a7a 1600 234 * READ 335c4c6028171cfddfbaae1a9c313c52 * READ 3400330d1dfc7f3f7f4b8d4d803dfcf6 * * The first five lines of the journal form its header. They are the * constant string "libcore.io.DiskLruCache", the disk cache's version, * the application's version, the value count, and a blank line. * * Each of the subsequent lines in the file is a record of the state of a * cache entry. Each line contains space-separated values: a state, a key, * and optional state-specific values. * o DIRTY lines track that an entry is actively being created or updated. * Every successful DIRTY action should be followed by a CLEAN or REMOVE * action. DIRTY lines without a matching CLEAN or REMOVE indicate that * temporary files may need to be deleted. * o CLEAN lines track a cache entry that has been successfully published * and may be read. A publish line is followed by the lengths of each of * its values. * o READ lines track accesses for LRU. * o REMOVE lines track entries that have been deleted. * * The journal file is appended to as cache operations occur. The journal may * occasionally be compacted by dropping redundant lines. A temporary file named * "journal.tmp" will be used during compaction; that file should be deleted if * it exists when the cache is opened. */ private final File directory; private final File journalFile; private final File journalFileTmp; private final int appVersion; private final long maxSize; private final int valueCount; private long size = 0; private Writer journalWriter; private final LinkedHashMap<String, Entry> lruEntries = new LinkedHashMap<String, Entry>(0, 0.75f, true); private int redundantOpCount; /** * To differentiate between old and current snapshots, each entry is given * a sequence number each time an edit is committed. A snapshot is stale if * its sequence number is not equal to its entry's sequence number. */ private long nextSequenceNumber = 0; /* From java.util.Arrays */ @SuppressWarnings("unchecked") private static <T> T[] copyOfRange(T[] original, int start, int end) { final int originalLength = original.length; // For exception priority compatibility. if (start > end) { throw new IllegalArgumentException(); } if (start < 0 || start > originalLength) { throw new ArrayIndexOutOfBoundsException(); } final int resultLength = end - start; final int copyLength = Math.min(resultLength, originalLength - start); final T[] result = (T[]) Array .newInstance(original.getClass().getComponentType(), resultLength); System.arraycopy(original, start, result, 0, copyLength); return result; } /** * Returns the remainder of 'reader' as a string, closing it when done. */ public static String readFully(Reader reader) throws IOException { try { StringWriter writer = new StringWriter(); char[] buffer = new char[1024]; int count; while ((count = reader.read(buffer)) != -1) { writer.write(buffer, 0, count); } return writer.toString(); } finally { reader.close(); } } /** * Returns the ASCII characters up to but not including the next "\r\n", or * "\n". * * @throws EOFException if the stream is exhausted before the next newline * character. */ public static String readAsciiLine(InputStream in) throws IOException { // TODO: support UTF-8 here instead StringBuilder result = new StringBuilder(80); while (true) { int c = in.read(); if (c == -1) { throw new EOFException(); } else if (c == '\n') { break; } result.append((char) c); } int length = result.length(); if (length > 0 && result.charAt(length - 1) == '\r') { result.setLength(length - 1); } return result.toString(); } /** * Closes 'closeable', ignoring any checked exceptions. Does nothing if 'closeable' is null. */ public static void closeQuietly(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (RuntimeException rethrown) { throw rethrown; } catch (Exception ignored) { } } } /** * Recursively delete everything in {@code dir}. */ // TODO: this should specify paths as Strings rather than as Files public static void deleteContents(File dir) throws IOException { File[] files = dir.listFiles(); if (files == null) { throw new IllegalArgumentException("not a directory: " + dir); } for (File file : files) { if (file.isDirectory()) { deleteContents(file); } if (!file.delete()) { throw new IOException("failed to delete file: " + file); } } } /** This cache uses a single background thread to evict entries. */ private final ExecutorService executorService = new ThreadPoolExecutor(0, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); private final Callable<Void> cleanupCallable = new Callable<Void>() { @Override public Void call() throws Exception { synchronized (DiskLruCache.this) { if (journalWriter == null) { return null; // closed } trimToSize(); if (journalRebuildRequired()) { rebuildJournal(); redundantOpCount = 0; } } return null; } }; private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) { this.directory = directory; this.appVersion = appVersion; this.journalFile = new File(directory, JOURNAL_FILE); this.journalFileTmp = new File(directory, JOURNAL_FILE_TMP); this.valueCount = valueCount; this.maxSize = maxSize; } /** * Opens the cache in {@code directory}, creating a cache if none exists * there. * * @param directory a writable directory * @param appVersion * @param valueCount the number of values per cache entry. Must be positive. * @param maxSize the maximum number of bytes this cache should use to store * @throws IOException if reading or writing the cache directory fails */ public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize) throws IOException { if (maxSize <= 0) { throw new IllegalArgumentException("maxSize <= 0"); } if (valueCount <= 0) { throw new IllegalArgumentException("valueCount <= 0"); } // prefer to pick up where we left off DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); if (cache.journalFile.exists()) { try { cache.readJournal(); cache.processJournal(); cache.journalWriter = new BufferedWriter(new FileWriter(cache.journalFile, true), IO_BUFFER_SIZE); return cache; } catch (IOException journalIsCorrupt) { // System.logW("DiskLruCache " + directory + " is corrupt: " // + journalIsCorrupt.getMessage() + ", removing"); cache.delete(); } } // create a new empty cache directory.mkdirs(); cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); cache.rebuildJournal(); return cache; } private void readJournal() throws IOException { InputStream in = new BufferedInputStream(new FileInputStream(journalFile), IO_BUFFER_SIZE); try { String magic = readAsciiLine(in); String version = readAsciiLine(in); String appVersionString = readAsciiLine(in); String valueCountString = readAsciiLine(in); String blank = readAsciiLine(in); if (!MAGIC.equals(magic) || !VERSION_1.equals(version) || !Integer.toString(appVersion).equals(appVersionString) || !Integer.toString(valueCount).equals(valueCountString) || !"".equals(blank)) { throw new IOException("unexpected journal header: [" + magic + ", " + version + ", " + valueCountString + ", " + blank + "]"); } while (true) { try { readJournalLine(readAsciiLine(in)); } catch (EOFException endOfJournal) { break; } } } finally { closeQuietly(in); } } private void readJournalLine(String line) throws IOException { String[] parts = line.split(" "); if (parts.length < 2) { throw new IOException("unexpected journal line: " + line); } String key = parts[1]; if (parts[0].equals(REMOVE) && parts.length == 2) { lruEntries.remove(key); return; } Entry entry = lruEntries.get(key); if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } if (parts[0].equals(CLEAN) && parts.length == 2 + valueCount) { entry.readable = true; entry.currentEditor = null; entry.setLengths(copyOfRange(parts, 2, parts.length)); } else if (parts[0].equals(DIRTY) && parts.length == 2) { entry.currentEditor = new Editor(entry); } else if (parts[0].equals(READ) && parts.length == 2) { // this work was already done by calling lruEntries.get() } else { throw new IOException("unexpected journal line: " + line); } } /** * Computes the initial size and collects garbage as a part of opening the * cache. Dirty entries are assumed to be inconsistent and will be deleted. */ private void processJournal() throws IOException { deleteIfExists(journalFileTmp); for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext(); ) { Entry entry = i.next(); if (entry.currentEditor == null) { for (int t = 0; t < valueCount; t++) { size += entry.lengths[t]; } } else { entry.currentEditor = null; for (int t = 0; t < valueCount; t++) { deleteIfExists(entry.getCleanFile(t)); deleteIfExists(entry.getDirtyFile(t)); } i.remove(); } } } /** * Creates a new journal that omits redundant information. This replaces the * current journal if it exists. */ private synchronized void rebuildJournal() throws IOException { if (journalWriter != null) { journalWriter.close(); } Writer writer = new BufferedWriter(new FileWriter(journalFileTmp), IO_BUFFER_SIZE); writer.write(MAGIC); writer.write("\n"); writer.write(VERSION_1); writer.write("\n"); writer.write(Integer.toString(appVersion)); writer.write("\n"); writer.write(Integer.toString(valueCount)); writer.write("\n"); writer.write("\n"); for (Entry entry : lruEntries.values()) { if (entry.currentEditor != null) { writer.write(DIRTY + ' ' + entry.key + '\n'); } else { writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); } } writer.close(); journalFileTmp.renameTo(journalFile); journalWriter = new BufferedWriter(new FileWriter(journalFile, true), IO_BUFFER_SIZE); } private static void deleteIfExists(File file) throws IOException { // try { // Libcore.os.remove(file.getPath()); // } catch (ErrnoException errnoException) { // if (errnoException.errno != OsConstants.ENOENT) { // throw errnoException.rethrowAsIOException(); // } // } if (file.exists() && !file.delete()) { throw new IOException(); } } /** * Returns a snapshot of the entry named {@code key}, or null if it doesn't * exist is not currently readable. If a value is returned, it is moved to * the head of the LRU queue. */ public synchronized Snapshot get(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null) { return null; } if (!entry.readable) { return null; } /* * Open all streams eagerly to guarantee that we see a single published * snapshot. If we opened streams lazily then the streams could come * from different edits. */ InputStream[] ins = new InputStream[valueCount]; try { for (int i = 0; i < valueCount; i++) { ins[i] = new FileInputStream(entry.getCleanFile(i)); } } catch (FileNotFoundException e) { // a file must have been deleted manually! return null; } redundantOpCount++; journalWriter.append(READ + ' ' + key + '\n'); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return new Snapshot(key, entry.sequenceNumber, ins); } /** * Returns an editor for the entry named {@code key}, or null if another * edit is in progress. */ public Editor edit(String key) throws IOException { return edit(key, ANY_SEQUENCE_NUMBER); } private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER && (entry == null || entry.sequenceNumber != expectedSequenceNumber)) { return null; // snapshot is stale } if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } else if (entry.currentEditor != null) { return null; // another edit is in progress } Editor editor = new Editor(entry); entry.currentEditor = editor; // flush the journal before creating files to prevent file leaks journalWriter.write(DIRTY + ' ' + key + '\n'); journalWriter.flush(); return editor; } /** * Returns the directory where this cache stores its data. */ public File getDirectory() { return directory; } /** * Returns the maximum number of bytes that this cache should use to store * its data. */ public long maxSize() { return maxSize; } /** * Returns the number of bytes currently being used to store the values in * this cache. This may be greater than the max size if a background * deletion is pending. */ public synchronized long size() { return size; } private synchronized void completeEdit(Editor editor, boolean success) throws IOException { Entry entry = editor.entry; if (entry.currentEditor != editor) { throw new IllegalStateException(); } // if this edit is creating the entry for the first time, every index must have a value if (success && !entry.readable) { for (int i = 0; i < valueCount; i++) { if (!entry.getDirtyFile(i).exists()) { editor.abort(); throw new IllegalStateException("edit didn't create file " + i); } } } for (int i = 0; i < valueCount; i++) { File dirty = entry.getDirtyFile(i); if (success) { if (dirty.exists()) { File clean = entry.getCleanFile(i); dirty.renameTo(clean); long oldLength = entry.lengths[i]; long newLength = clean.length(); entry.lengths[i] = newLength; size = size - oldLength + newLength; } } else { deleteIfExists(dirty); } } redundantOpCount++; entry.currentEditor = null; if (entry.readable | success) { entry.readable = true; journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); if (success) { entry.sequenceNumber = nextSequenceNumber++; } } else { lruEntries.remove(entry.key); journalWriter.write(REMOVE + ' ' + entry.key + '\n'); } if (size > maxSize || journalRebuildRequired()) { executorService.submit(cleanupCallable); } } /** * We only rebuild the journal when it will halve the size of the journal * and eliminate at least 2000 ops. */ private boolean journalRebuildRequired() { final int REDUNDANT_OP_COMPACT_THRESHOLD = 2000; return redundantOpCount >= REDUNDANT_OP_COMPACT_THRESHOLD && redundantOpCount >= lruEntries.size(); } /** * Drops the entry for {@code key} if it exists and can be removed. Entries * actively being edited cannot be removed. * * @return true if an entry was removed. */ public synchronized boolean remove(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null || entry.currentEditor != null) { return false; } for (int i = 0; i < valueCount; i++) { File file = entry.getCleanFile(i); if (!file.delete()) { throw new IOException("failed to delete " + file); } size -= entry.lengths[i]; entry.lengths[i] = 0; } redundantOpCount++; journalWriter.append(REMOVE + ' ' + key + '\n'); lruEntries.remove(key); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return true; } /** * Returns true if this cache has been closed. */ public boolean isClosed() { return journalWriter == null; } private void checkNotClosed() { if (journalWriter == null) { throw new IllegalStateException("cache is closed"); } } /** * Force buffered operations to the filesystem. */ public synchronized void flush() throws IOException { checkNotClosed(); trimToSize(); journalWriter.flush(); } /** * Closes this cache. Stored values will remain on the filesystem. */ public synchronized void close() throws IOException { if (journalWriter == null) { return; // already closed } for (Entry entry : new ArrayList<Entry>(lruEntries.values())) { if (entry.currentEditor != null) { entry.currentEditor.abort(); } } trimToSize(); journalWriter.close(); journalWriter = null; } private void trimToSize() throws IOException { while (size > maxSize) { // Map.Entry<String, Entry> toEvict = lruEntries.eldest(); final Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next(); remove(toEvict.getKey()); } } /** * Closes the cache and deletes all of its stored values. This will delete * all files in the cache directory including files that weren't created by * the cache. */ public void delete() throws IOException { close(); deleteContents(directory); } private void validateKey(String key) { if (key.contains(" ") || key.contains("\n") || key.contains("\r")) { throw new IllegalArgumentException( "keys must not contain spaces or newlines: \"" + key + "\""); } } private static String inputStreamToString(InputStream in) throws IOException { return readFully(new InputStreamReader(in, UTF_8)); } /** * A snapshot of the values for an entry. */ public final class Snapshot implements Closeable { private final String key; private final long sequenceNumber; private final InputStream[] ins; private Snapshot(String key, long sequenceNumber, InputStream[] ins) { this.key = key; this.sequenceNumber = sequenceNumber; this.ins = ins; } /** * Returns an editor for this snapshot's entry, or null if either the * entry has changed since this snapshot was created or if another edit * is in progress. */ public Editor edit() throws IOException { return DiskLruCache.this.edit(key, sequenceNumber); } /** * Returns the unbuffered stream with the value for {@code index}. */ public InputStream getInputStream(int index) { return ins[index]; } /** * Returns the string value for {@code index}. */ public String getString(int index) throws IOException { return inputStreamToString(getInputStream(index)); } @Override public void close() { for (InputStream in : ins) { closeQuietly(in); } } } /** * Edits the values for an entry. */ public final class Editor { private final Entry entry; private boolean hasErrors; private Editor(Entry entry) { this.entry = entry; } /** * Returns an unbuffered input stream to read the last committed value, * or null if no value has been committed. */ public InputStream newInputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } if (!entry.readable) { return null; } return new FileInputStream(entry.getCleanFile(index)); } } /** * Returns the last committed value as a string, or null if no value * has been committed. */ public String getString(int index) throws IOException { InputStream in = newInputStream(index); return in != null ? inputStreamToString(in) : null; } /** * Returns a new unbuffered output stream to write the value at * {@code index}. If the underlying output stream encounters errors * when writing to the filesystem, this edit will be aborted when * {@link #commit} is called. The returned output stream does not throw * IOExceptions. */ public OutputStream newOutputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index))); } } /** * Sets the value at {@code index} to {@code value}. */ public void set(int index, String value) throws IOException { Writer writer = null; try { writer = new OutputStreamWriter(newOutputStream(index), UTF_8); writer.write(value); } finally { closeQuietly(writer); } } /** * Commits this edit so it is visible to readers. This releases the * edit lock so another edit may be started on the same key. */ public void commit() throws IOException { if (hasErrors) { completeEdit(this, false); remove(entry.key); // the previous entry is stale } else { completeEdit(this, true); } } /** * Aborts this edit. This releases the edit lock so another edit may be * started on the same key. */ public void abort() throws IOException { completeEdit(this, false); } private class FaultHidingOutputStream extends FilterOutputStream { private FaultHidingOutputStream(OutputStream out) { super(out); } @Override public void write(int oneByte) { try { out.write(oneByte); } catch (IOException e) { hasErrors = true; } } @Override public void write(byte[] buffer, int offset, int length) { try { out.write(buffer, offset, length); } catch (IOException e) { hasErrors = true; } } @Override public void close() { try { out.close(); } catch (IOException e) { hasErrors = true; } } @Override public void flush() { try { out.flush(); } catch (IOException e) { hasErrors = true; } } } } private final class Entry { private final String key; /** Lengths of this entry's files. */ private final long[] lengths; /** True if this entry has ever been published */ private boolean readable; /** The ongoing edit or null if this entry is not being edited. */ private Editor currentEditor; /** The sequence number of the most recently committed edit to this entry. */ private long sequenceNumber; private Entry(String key) { this.key = key; this.lengths = new long[valueCount]; } public String getLengths() throws IOException { StringBuilder result = new StringBuilder(); for (long size : lengths) { result.append(' ').append(size); } return result.toString(); } /** * Set lengths using decimal numbers like "10123". */ private void setLengths(String[] strings) throws IOException { if (strings.length != valueCount) { throw invalidLengths(strings); } try { for (int i = 0; i < strings.length; i++) { lengths[i] = Long.parseLong(strings[i]); } } catch (NumberFormatException e) { throw invalidLengths(strings); } } private IOException invalidLengths(String[] strings) throws IOException { throw new IOException("unexpected journal line: " + Arrays.toString(strings)); } public File getCleanFile(int i) { return new File(directory, key + "." + i); } public File getDirtyFile(int i) { return new File(directory, key + "." + i + ".tmp"); } } }
/* * Copyright 2013 Gunnar Kappei. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.zib.museum.museumdat; /** * A document containing one shapeMeasurements(@http://museum.zib.de/museumdat) element. * * This is a complex type. */ public interface ShapeMeasurementsDocument extends org.apache.xmlbeans.XmlObject { public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType) org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(ShapeMeasurementsDocument.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.sA3BA01EB72591092E4171C5BCB38F1DD").resolveHandle("shapemeasurementsd67ddoctype"); /** * Gets the "shapeMeasurements" element */ de.zib.museum.museumdat.ShapeMeasurementsDocument.ShapeMeasurements getShapeMeasurements(); /** * Sets the "shapeMeasurements" element */ void setShapeMeasurements(de.zib.museum.museumdat.ShapeMeasurementsDocument.ShapeMeasurements shapeMeasurements); /** * Appends and returns a new empty "shapeMeasurements" element */ de.zib.museum.museumdat.ShapeMeasurementsDocument.ShapeMeasurements addNewShapeMeasurements(); /** * An XML shapeMeasurements(@http://museum.zib.de/museumdat). * * This is an atomic type that is a restriction of de.zib.museum.museumdat.ShapeMeasurementsDocument$ShapeMeasurements. */ public interface ShapeMeasurements extends org.apache.xmlbeans.XmlString { public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType) org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(ShapeMeasurements.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.sA3BA01EB72591092E4171C5BCB38F1DD").resolveHandle("shapemeasurements4319elemtype"); /** * Gets the "encodinganalog" attribute */ java.lang.String getEncodinganalog(); /** * Gets (as xml) the "encodinganalog" attribute */ org.apache.xmlbeans.XmlString xgetEncodinganalog(); /** * True if has "encodinganalog" attribute */ boolean isSetEncodinganalog(); /** * Sets the "encodinganalog" attribute */ void setEncodinganalog(java.lang.String encodinganalog); /** * Sets (as xml) the "encodinganalog" attribute */ void xsetEncodinganalog(org.apache.xmlbeans.XmlString encodinganalog); /** * Unsets the "encodinganalog" attribute */ void unsetEncodinganalog(); /** * Gets the "label" attribute */ java.lang.String getLabel(); /** * Gets (as xml) the "label" attribute */ org.apache.xmlbeans.XmlString xgetLabel(); /** * True if has "label" attribute */ boolean isSetLabel(); /** * Sets the "label" attribute */ void setLabel(java.lang.String label); /** * Sets (as xml) the "label" attribute */ void xsetLabel(org.apache.xmlbeans.XmlString label); /** * Unsets the "label" attribute */ void unsetLabel(); /** * Gets the "lang" attribute */ java.lang.String getLang(); /** * Gets (as xml) the "lang" attribute */ org.apache.xmlbeans.XmlLanguage xgetLang(); /** * True if has "lang" attribute */ boolean isSetLang(); /** * Sets the "lang" attribute */ void setLang(java.lang.String lang); /** * Sets (as xml) the "lang" attribute */ void xsetLang(org.apache.xmlbeans.XmlLanguage lang); /** * Unsets the "lang" attribute */ void unsetLang(); /** * A factory class with static methods for creating instances * of this type. */ public static final class Factory { public static de.zib.museum.museumdat.ShapeMeasurementsDocument.ShapeMeasurements newInstance() { return (de.zib.museum.museumdat.ShapeMeasurementsDocument.ShapeMeasurements) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument.ShapeMeasurements newInstance(org.apache.xmlbeans.XmlOptions options) { return (de.zib.museum.museumdat.ShapeMeasurementsDocument.ShapeMeasurements) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); } private Factory() { } // No instance of this class allowed } } /** * A factory class with static methods for creating instances * of this type. */ public static final class Factory { public static de.zib.museum.museumdat.ShapeMeasurementsDocument newInstance() { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument newInstance(org.apache.xmlbeans.XmlOptions options) { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); } /** @param xmlAsString the string value to parse */ public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.lang.String xmlAsString) throws org.apache.xmlbeans.XmlException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.lang.String xmlAsString, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, options ); } /** @param file the file from which to load an xml document */ public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.io.File file) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.io.File file, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, options ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.net.URL u) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.net.URL u, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, options ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.io.InputStream is) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.io.InputStream is, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, options ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.io.Reader r) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(java.io.Reader r, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, options ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(javax.xml.stream.XMLStreamReader sr) throws org.apache.xmlbeans.XmlException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(javax.xml.stream.XMLStreamReader sr, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, options ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(org.w3c.dom.Node node) throws org.apache.xmlbeans.XmlException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, null ); } public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(org.w3c.dom.Node node, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, options ); } /** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */ @Deprecated public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, null ); } /** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */ @Deprecated public static de.zib.museum.museumdat.ShapeMeasurementsDocument parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException { return (de.zib.museum.museumdat.ShapeMeasurementsDocument) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, options ); } /** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */ @Deprecated public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException { return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, null ); } /** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */ @Deprecated public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException { return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, options ); } private Factory() { } // No instance of this class allowed } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.sls.scheduler; import java.io.IOException; import java.text.MessageFormat; import java.util.Queue; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.exceptions.YarnException; @Private @Unstable public class TaskRunner { @Private @Unstable public abstract static class Task implements Runnable, Delayed { private long start; private long end; private long nextRun; private long startTime; private long endTime; private long repeatInterval; private Queue<Task> queue; public Task(){} //values in milliseconds, start/end are milliseconds from now public void init(long startTime, long endTime, long repeatInterval) { if (endTime - startTime < 0) { throw new IllegalArgumentException(MessageFormat.format( "endTime[{0}] cannot be smaller than startTime[{1}]", endTime, startTime)); } if (repeatInterval < 0) { throw new IllegalArgumentException(MessageFormat.format( "repeatInterval[{0}] cannot be less than 1", repeatInterval)); } if ((endTime - startTime) % repeatInterval != 0) { throw new IllegalArgumentException(MessageFormat.format( "Invalid parameters: (endTime[{0}] - startTime[{1}]) " + "% repeatInterval[{2}] != 0", endTime, startTime, repeatInterval)); } start = startTime; end = endTime; this.repeatInterval = repeatInterval; } private void timeRebase(long now) { startTime = now + start; endTime = now + end; this.nextRun = startTime; } //values in milliseconds, start is milliseconds from now //it only executes firstStep() public void init(long startTime) { init(startTime, startTime, 1); } private void setQueue(Queue<Task> queue) { this.queue = queue; } @Override public final void run() { try { if (nextRun == startTime) { firstStep(); nextRun += repeatInterval; if (nextRun <= endTime) { queue.add(this); } } else if (nextRun < endTime) { middleStep(); nextRun += repeatInterval; queue.add(this); } else { lastStep(); } } catch (Exception e) { e.printStackTrace(); Thread.getDefaultUncaughtExceptionHandler() .uncaughtException(Thread.currentThread(), e); } } @Override public long getDelay(TimeUnit unit) { return unit.convert(nextRun - System.currentTimeMillis(), TimeUnit.MILLISECONDS); } @Override public int compareTo(Delayed o) { if (!(o instanceof Task)) { throw new IllegalArgumentException("Parameter must be a Task instance"); } Task other = (Task) o; return (int) Math.signum(nextRun - other.nextRun); } public abstract void firstStep() throws Exception; public abstract void middleStep() throws Exception; public abstract void lastStep() throws Exception; public void setEndTime(long et) { endTime = et; } } private DelayQueue queue; private int threadPoolSize; private ThreadPoolExecutor executor; private long startTimeMS = 0; /*Start - Wajih changing Thread Pool size*/ private boolean unboundedRunner = false; /*End - Wajih */ public TaskRunner() { queue = new DelayQueue(); } /*Start - Wajih changing Thread Pool size*/ public void setQueueSize(int threadPoolSize, boolean unboundedRunner) { this.threadPoolSize = threadPoolSize; this.unboundedRunner = unboundedRunner; } /*End - Wajih */ @SuppressWarnings("unchecked") public void start() { if (executor != null) { throw new IllegalStateException("Already started"); } DelayQueue preStartQueue = queue; queue = new DelayQueue(); /*Start - Wajih changing Thread Pool size*/ System.out.println("~~~~~~~ WAJIH: ThreadS Execution Start "); if (!unboundedRunner) { executor = new ThreadPoolExecutor(threadPoolSize, threadPoolSize, 0, TimeUnit.MILLISECONDS, queue); executor.prestartAllCoreThreads(); } else { executor = new ThreadPoolExecutor(Integer.MAX_VALUE, Integer.MAX_VALUE, 0, TimeUnit.MILLISECONDS, queue); int numPrestartThreads = threadPoolSize; if (preStartQueue.size() > numPrestartThreads) { numPrestartThreads = preStartQueue.size(); } for (int i = 0 ; i < numPrestartThreads ; i++) { executor.prestartCoreThread(); } } System.out.println("~~~~~~~ WAJIH: ThreadS Execution End "); /*End - Wajih */ startTimeMS = System.currentTimeMillis(); for (Object d : preStartQueue) { schedule((Task) d, startTimeMS); } } public void stop() { executor.shutdownNow(); } @SuppressWarnings("unchecked") private void schedule(Task task, long timeNow) { task.timeRebase(timeNow); task.setQueue(queue); queue.add(task); } public void schedule(Task task) { schedule(task, System.currentTimeMillis()); } public long getStartTimeMS() { return this.startTimeMS; } }
/* * Copyright (c) 2015 EMC Corporation * All Rights Reserved */ package com.emc.vipr.client.core; import static com.emc.vipr.client.core.util.ResourceUtils.defaultList; import java.net.URI; import java.util.List; import com.emc.storageos.model.BulkIdParam; import com.emc.storageos.model.NamedRelatedResourceRep; import com.emc.storageos.model.smis.StorageSystemSMISCreateParam; import com.emc.storageos.model.systems.StorageSystemBulkRep; import com.emc.storageos.model.systems.StorageSystemConnectivityList; import com.emc.storageos.model.systems.StorageSystemConnectivityRestRep; import com.emc.storageos.model.systems.StorageSystemList; import com.emc.storageos.model.systems.StorageSystemRequestParam; import com.emc.storageos.model.systems.StorageSystemRestRep; import com.emc.storageos.model.systems.StorageSystemUpdateRequestParam; import com.emc.vipr.client.Task; import com.emc.vipr.client.Tasks; import com.emc.vipr.client.ViPRCoreClient; import com.emc.vipr.client.core.filters.ResourceFilter; import com.emc.vipr.client.core.impl.PathConstants; import com.emc.vipr.client.impl.RestClient; import com.emc.vipr.client.core.util.ResourceUtils; import javax.ws.rs.core.UriBuilder; /** * Storage Systems resources. * <p> * Base URL: <tt>/vdc/storage-systems</tt> */ public class StorageSystems extends AbstractCoreBulkResources<StorageSystemRestRep> implements TopLevelResources<StorageSystemRestRep>, TaskResources<StorageSystemRestRep> { public StorageSystems(ViPRCoreClient parent, RestClient client) { super(parent, client, StorageSystemRestRep.class, PathConstants.STORAGE_SYSTEM_URL); } @Override public StorageSystems withInactive(boolean inactive) { return (StorageSystems) super.withInactive(inactive); } @Override public StorageSystems withInternal(boolean internal) { return (StorageSystems) super.withInternal(internal); } @Override protected List<StorageSystemRestRep> getBulkResources(BulkIdParam input) { StorageSystemBulkRep response = client.post(StorageSystemBulkRep.class, input, getBulkUrl()); return defaultList(response.getStorageSystems()); } @Override public Tasks<StorageSystemRestRep> getTasks(URI id) { return doGetTasks(id); } @Override public Task<StorageSystemRestRep> getTask(URI id, URI taskId) { return doGetTask(id, taskId); } /** * Lists all storage systems. * <p> * API Call: <tt>GET /vdc/storage-systems</tt> * * @return the list of storage system references. */ @Override public List<NamedRelatedResourceRep> list() { StorageSystemList response = client.get(StorageSystemList.class, baseUrl); return ResourceUtils.defaultList(response.getStorageSystems()); } /** * Gets the list of all storage systems. This is a convenience method for: <tt>getByRefs(list())</tt>. * * @return the list of all storage systems. */ @Override public List<StorageSystemRestRep> getAll() { return getAll(null); } /** * Gets the list of all storage systems, optionally filering the results. This is a convenience method for: * <tt>getByRefs(list(), filter)</tt>. * * @param filter * the resource filter to apply to the results as they are returned (optional). * @return the list of all storage systems. */ @Override public List<StorageSystemRestRep> getAll(ResourceFilter<StorageSystemRestRep> filter) { List<NamedRelatedResourceRep> refs = list(); return getByRefs(refs, filter); } /** * Begins creating a storage system. * <p> * API Call: <tt>POST /vdc/storage-systems</tt> * * @param input * the create configuration. * @return a task for monitoring the progress of the operation. */ public Task<StorageSystemRestRep> create(StorageSystemRequestParam input) { return postTask(input, baseUrl); } /** * Begins updating the given storage system by ID. * <p> * API Call: <tt>PUT /vdc/storage-systems/{id}</tt> * * @param id * the ID of the storage system. * @param input * the update configuration. * @return a task for monitoring the progress of the operation. */ public Task<StorageSystemRestRep> update(URI id, StorageSystemUpdateRequestParam input) { return putTask(input, getIdUrl(), id); } /** * Begins deactivating the given storage system by ID. * <p> * API Call: <tt>POST /vdc/storage-systems/{id}/deactivate</tt> * * @param id * the ID of the storage system. * @return a task for monitoring the progress of the operation. */ public Task<StorageSystemRestRep> deactivate(URI id) { return doDeactivateWithTask(id); } /** * Adds an SMI-S storage system. * <p> * API Call: <tt>POST /vdc/storage-providers/storage-systems</tt> * * @param input * the SMI-S storage system configuration. * @return a task for monitoring the progress of the operation. */ public Task<StorageSystemRestRep> add(StorageSystemSMISCreateParam input) { return postTask(input, PathConstants.STORAGE_PROVIDER_URL + "/storage-systems"); } /** * Begins discovery on all storage systems. * <p> * API Call: <tt>POST /vdc/storage-systems/discover</tt> * * @return tasks for monitoring the progress of the operation(s). */ public Tasks<StorageSystemRestRep> discoverAll() { return postTasks(baseUrl + "/discover"); } /** * Begins discovery on the given storage system. * <p> * API Call: <tt>POST /vdc/storage-systems/{id}/discover</tt> * * @param id * the ID of the storage system. * @return a task for monitoring the progress of the operation. */ public Task<StorageSystemRestRep> discover(URI id) { return discover(id, null); } /** * Begins discovery on the given storage system. * <p> * API Call: <tt>POST /vdc/storage-systems/{id}/discover</tt> * * @param id * the ID of the storage system. * @param type * the type of discovery to perform. * @return a task for monitoring the progress of the operation. */ public Task<StorageSystemRestRep> discover(URI id, String type) { UriBuilder builder = client.uriBuilder(getIdUrl() + "/discover"); if (type != null && !type.equals("")) { builder = builder.queryParam("namespace", type); } return postTaskURI(builder.build(id)); } /** * Registers the given storage system by ID. * <p> * API Call: <tt>POST /vdc/storage-systems/{id}/register</tt> * * @param id * the ID of the storage system. * @return the updated storage system. */ public StorageSystemRestRep register(URI id) { return client.post(StorageSystemRestRep.class, getIdUrl() + "/register", id); } /** * De-registers the given storage system by ID. * <p> * API Call: <tt>POST /vdc/storage-systems/{id}/deregister</tt> * * @param id * the ID of the storage system. * @return the updated storage system. */ public StorageSystemRestRep deregister(URI id) { return client.post(StorageSystemRestRep.class, getIdUrl() + "/deregister", id); } /** * Gets the connectivity information for the given storage system by ID. * <p> * API Call: <tt>GET /vdc/storage-systems/{id}/connectivity</tt> * * @param id * the ID of the storage system. * @return the list of storage system connectivity. */ public List<StorageSystemConnectivityRestRep> getConnectivity(URI id) { StorageSystemConnectivityList response = client.get(StorageSystemConnectivityList.class, getIdUrl() + "/connectivity", id); return defaultList(response.getConnections()); } /** * Lists the storage systems for the given SMI-S provider by ID. * <p> * API Call: <tt>GET /vdc/storage-providers/{smisProviderId}/storage-systems</tt> * * @param smisProviderId * the ID of the SMI-S provider. * @return the list of storage system references. */ public List<NamedRelatedResourceRep> listBySmisProvider(URI smisProviderId) { StorageSystemList response = client.get(StorageSystemList.class, PathConstants.STORAGE_SYSTEM_BY_PROVIDER_URL, smisProviderId); return defaultList(response.getStorageSystems()); } /** * Gets the list of storage systems for the given SMI-S provider by ID. This is a convenience method for * <tt>getByRefs(listBySmisProvider(smisProviderId))</tt> * * @param smisProviderId * the ID of the SMI-S provider. * @return the list of storage systems. * * @see #getByRefs(java.util.Collection) * @see #listBySmisProvider(URI) */ public List<StorageSystemRestRep> getBySmisProvider(URI smisProviderId) { List<NamedRelatedResourceRep> refs = listBySmisProvider(smisProviderId); return getByRefs(refs); } }
package com.koch.ambeth.util.collections; /*- * #%L * jambeth-util-test * %% * Copyright (C) 2017 Koch Softwaredevelopment * %% * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * #L% */ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import org.junit.Assert; import org.junit.Test; import com.koch.ambeth.util.collections.IntHashMap.IntHashMapEntry; import com.koch.ambeth.util.collections.IntHashMap.IntIterator; public class IntHashMapTest { protected int[] existingKeys = {5, 6, 7, 8, 9}; protected int[] existingValues = {15, 16, 17, 18, 19}; protected int[] nonExistingKeys = {1, 2, 3, 4}; protected int expectedTestCount = 80 + existingKeys.length; protected IntHashMap fillForTest() { IntHashMap map = new IntHashMap(); for (int a = 100; a-- > 20;) { map.put(a, a + 15); } for (int a = existingKeys.length; a-- > 0;) { map.put(existingKeys[a], existingValues[a]); } return map; } @Test public void ctorDefault() { new IntHashMap(); } @Test public void ctorCapacity() { new IntHashMap(100); } @Test public void ctorCapacityAndFactor() { new IntHashMap(100, 0.75f); } @Test public void init() { IntHashMap map = new IntHashMap(100, 0.75f); map.init(); } @Test public void containsKey() { IntHashMap map = fillForTest(); Assert.assertTrue(map.containsKey(existingKeys[0])); Assert.assertFalse(map.containsKey(nonExistingKeys[0])); } @Test public void clear() { IntHashMap map = fillForTest(); Assert.assertEquals(expectedTestCount, map.size()); map.clear(); Assert.assertEquals(0, map.size()); } @Test public void containsValue() { IntHashMap map = fillForTest(); Assert.assertTrue(map.containsValue(existingValues[0])); Assert.assertFalse(map.containsValue(-1)); } @Test public void size() { IntHashMap map = fillForTest(); Assert.assertEquals(expectedTestCount, map.size()); } @Test public void isEmpty() { IntHashMap map = fillForTest(); map.clear(); Assert.assertTrue(map.isEmpty()); Assert.assertEquals(0, map.size()); } @Test public void get() { IntHashMap map = fillForTest(); for (int a = existingKeys.length; a-- > 0;) { Assert.assertEquals(existingValues[a], map.get(existingKeys[a])); } } @Test public void put() { IntHashMap map = fillForTest(); for (int a = existingKeys.length; a-- > 0;) { Assert.assertTrue(map.containsKey(existingKeys[a])); } } @Test public void putIfNotExists() { IntHashMap map = fillForTest(); for (int a = nonExistingKeys.length; a-- > 0;) { Assert.assertTrue(map.putIfNotExists(nonExistingKeys[a], 1)); } Assert.assertFalse(map.putIfNotExists(nonExistingKeys[0], 2)); Assert.assertEquals(1, map.get(nonExistingKeys[0])); Assert.assertEquals(expectedTestCount + nonExistingKeys.length, map.size()); } @Test public void remove() { IntHashMap map = fillForTest(); for (int a = existingKeys.length; a-- > 0;) { Assert.assertEquals(existingValues[a], map.remove(existingKeys[a])); } Assert.assertEquals(0, map.remove(nonExistingKeys[0])); Assert.assertEquals(expectedTestCount - existingKeys.length, map.size()); } @Test public void removeIfObject() { IntHashMap map = fillForTest(); for (int a = existingKeys.length; a-- > 0;) { Assert.assertFalse(map.removeIfObject(existingKeys[a], -1)); } for (int a = nonExistingKeys.length; a-- > 0;) { Assert.assertFalse(map.removeIfObject(nonExistingKeys[a], -1)); } for (int a = existingKeys.length; a-- > 0;) { Assert.assertTrue(map.removeIfObject(existingKeys[a], existingValues[a])); } Assert.assertEquals(expectedTestCount - existingKeys.length, map.size()); } @Test public void values() { IntHashMap map = fillForTest(); IntArrayList valuesList = map.values(); Assert.assertEquals(map.size(), valuesList.size); } @Test public void serialize() throws Throwable { IntHashMap map = fillForTest(); byte[] content; { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream os = new ObjectOutputStream(bos); os.writeObject(map); os.flush(); content = bos.toByteArray(); } { ByteArrayInputStream bis = new ByteArrayInputStream(content); ObjectInputStream is = new ObjectInputStream(bis); IntHashMap cloneMap = (IntHashMap) is.readObject(); Assert.assertEquals(map.size(), cloneMap.size()); IntIterator iter = map.iterator(); while (iter.hasNext()) { IntHashMapEntry entry = iter.nextEntry(); int cloneValue = cloneMap.remove(entry.getKey()); Assert.assertEquals(entry.getValue(), cloneValue); } Assert.assertEquals(0, cloneMap.size()); } } }
/** * Vero Analytics */ package com.hue.common; /** * @author Tai Hu * */ public enum DBType { POSTGRESQL { @Override public String getVendorName() { return "PostgreSQL"; } @Override public String getDriver() { return "org.postgresql.Driver"; } @Override public String getDBUrl() { return "jdbc:postgresql://%s:%d/%s"; } @Override public int getDefaultPort() { return 5432; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, REDSHIFT { @Override public String getVendorName() { return "Amazon Redshift"; } @Override public String getDriver() { return "com.amazon.redshift.jdbc41.Driver"; } @Override public String getDBUrl() { return "jdbc:redshift://%s:%d/%s"; } @Override public int getDefaultPort() { return 5439; } @Override public String getUserNameParamName() { return "UID"; } @Override public String getPasswordParamName() { return "PWD"; } }, MSSQL { @Override public String getVendorName() { return "Microsoft SQL Server"; } @Override public String getDriver() { return "com.microsoft.sqlserver.jdbc.SQLServerDriver"; } @Override public String getDBUrl() { return "jdbc:sqlserver://%s:%d;databaseName=%s"; } @Override public int getDefaultPort() { return 1433; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, AZURE { @Override public String getVendorName() { return "Microsoft Azure Cloud"; } @Override public String getDriver() { return "com.microsoft.sqlserver.jdbc.SQLServerDriver"; } @Override public String getDBUrl() { return "jdbc:sqlserver://%s:%d;databaseName=%s"; } @Override public int getDefaultPort() { return 1433; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, TERADATA { @Override public String getVendorName() { return "Teradata"; } @Override public String getDriver() { return "com.teradata.jdbc.TeraDriver"; } @Override public String getDBUrl() { return "jdbc:teradata://%s/DBS_PORT=%d,DATABASE=%s"; } @Override public int getDefaultPort() { return 1025; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, MYSQL { @Override public String getVendorName() { return "MySQL"; } @Override public String getDriver() { return "com.mysql.jdbc.Driver"; } @Override public String getDBUrl() { return "jdbc:mysql://%s:%d/%s?rewriteBatchedStatements=true"; } @Override public int getDefaultPort() { return 3306; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, DERBY_LOCAL { @Override public String getVendorName() { return "Derby"; } @Override public String getDriver() { return "org.apache.derby.jdbc.EmbeddedDriver"; } @Override public String getDBUrl() { return "jdbc:derby:%s;create=true"; } @Override public int getDefaultPort() { return 1527; } @Override public String getUserNameParamName() { return null; } @Override public String getPasswordParamName() { return null; } }, DERBY_REMOTE { @Override public String getVendorName() { return "Derby"; } @Override public String getDriver() { return "org.apache.derby.jdbc.ClientDriver"; } @Override public String getDBUrl() { return "jdbc:derby://%s:%d/%s"; } @Override public int getDefaultPort() { return 1527; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, ACCESS { @Override public String getVendorName() { return "Access"; } @Override public String getDriver() { return "net.ucanaccess.jdbc.UcanaccessDriver"; } @Override public String getDBUrl() { return null; } @Override public int getDefaultPort() { return 0; } @Override public String getUserNameParamName() { return null; } @Override public String getPasswordParamName() { return null; } }, HIVE { @Override public String getVendorName() { return "Hive"; } @Override public String getDriver() { return "org.apache.hive.jdbc.HiveDriver"; } @Override public String getDBUrl() { return "jdbc:hive2://%s:%d/%s"; } @Override public int getDefaultPort() { return 10000; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, NETEZZA { @Override public String getVendorName() { return "Netezza"; } @Override public String getDriver() { return "org.netezza.Driver"; } @Override public String getDBUrl() { return "jdbc:netezza://%s:%d:%s"; } @Override public int getDefaultPort() { return 5480; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, VERTICA { @Override public String getVendorName() { return "Vertica"; } @Override public String getDriver() { return "com.vertica.jdbc.Driver"; } @Override public String getDBUrl() { return "jdbc:vertica://%s:%d/%s"; } @Override public int getDefaultPort() { return 5433; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, ORACLE { @Override public String getVendorName() { return "Oracle"; } @Override public String getDriver() { return "oracle.jdbc.driver.OracleDriver"; } @Override public String getDBUrl() { return "jdbc:oracle:thin:@%s:%d:%s"; } @Override public int getDefaultPort() { return 1521; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, PRESTO { @Override public String getVendorName() { return "Presto"; } @Override public String getDriver() { return null; } @Override public String getDBUrl() { //return "jdbc:oracle:thin:@%s:%d:%s"; return null; } @Override public int getDefaultPort() { //return 1521; return 0; } @Override public String getUserNameParamName() { return "user"; } @Override public String getPasswordParamName() { return "password"; } }, UNKNOWN { @Override public String getVendorName() { return null; } @Override public String getDriver() { return null; } @Override public String getDBUrl() { return null; } @Override public int getDefaultPort() { return 0; } @Override public String getUserNameParamName() { return null; } @Override public String getPasswordParamName() { return null; } }; public abstract String getVendorName(); public abstract String getDriver(); public abstract String getDBUrl(); public abstract int getDefaultPort(); public abstract String getUserNameParamName(); public abstract String getPasswordParamName(); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.kstream.Windowed; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.PunctuationType; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.processor.StateStoreContext; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.To; import org.apache.kafka.streams.processor.internals.Task.TaskType; import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl; import org.apache.kafka.streams.state.KeyValueIterator; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.streams.state.SessionStore; import org.apache.kafka.streams.state.TimestampedKeyValueStore; import org.apache.kafka.streams.state.TimestampedWindowStore; import org.apache.kafka.streams.state.ValueAndTimestamp; import org.apache.kafka.streams.state.WindowStore; import org.apache.kafka.streams.state.WindowStoreIterator; import org.apache.kafka.streams.state.internals.ThreadCache; import org.easymock.EasyMock; import org.junit.Before; import org.junit.Test; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.function.Consumer; import static java.util.Arrays.asList; import static org.apache.kafka.streams.processor.internals.ProcessorContextImpl.BYTEARRAY_VALUE_SERIALIZER; import static org.apache.kafka.streams.processor.internals.ProcessorContextImpl.BYTES_KEY_SERIALIZER; import static org.easymock.EasyMock.anyLong; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.anyString; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.easymock.EasyMock.mock; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class ProcessorContextImplTest { private ProcessorContextImpl context; private final StreamsConfig streamsConfig = streamsConfigMock(); private final RecordCollector recordCollector = mock(RecordCollector.class); private static final String KEY = "key"; private static final Bytes KEY_BYTES = Bytes.wrap(KEY.getBytes()); private static final long VALUE = 42L; private static final byte[] VALUE_BYTES = String.valueOf(VALUE).getBytes(); private static final long TIMESTAMP = 21L; private static final long STREAM_TIME = 50L; private static final ValueAndTimestamp<Long> VALUE_AND_TIMESTAMP = ValueAndTimestamp.make(42L, 21L); private static final String STORE_NAME = "underlying-store"; private static final String REGISTERED_STORE_NAME = "registered-store"; private static final TopicPartition CHANGELOG_PARTITION = new TopicPartition("store-changelog", 1); private boolean flushExecuted; private boolean putExecuted; private boolean putWithTimestampExecuted; private boolean putIfAbsentExecuted; private boolean putAllExecuted; private boolean deleteExecuted; private boolean removeExecuted; private KeyValueIterator<String, Long> rangeIter; private KeyValueIterator<String, ValueAndTimestamp<Long>> timestampedRangeIter; private KeyValueIterator<String, Long> allIter; private KeyValueIterator<String, ValueAndTimestamp<Long>> timestampedAllIter; private final List<KeyValueIterator<Windowed<String>, Long>> iters = new ArrayList<>(7); private final List<KeyValueIterator<Windowed<String>, ValueAndTimestamp<Long>>> timestampedIters = new ArrayList<>(7); private WindowStoreIterator windowStoreIter; @Before public void setup() { flushExecuted = false; putExecuted = false; putIfAbsentExecuted = false; putAllExecuted = false; deleteExecuted = false; removeExecuted = false; rangeIter = mock(KeyValueIterator.class); timestampedRangeIter = mock(KeyValueIterator.class); allIter = mock(KeyValueIterator.class); timestampedAllIter = mock(KeyValueIterator.class); windowStoreIter = mock(WindowStoreIterator.class); for (int i = 0; i < 7; i++) { iters.add(i, mock(KeyValueIterator.class)); timestampedIters.add(i, mock(KeyValueIterator.class)); } final ProcessorStateManager stateManager = mock(ProcessorStateManager.class); expect(stateManager.taskType()).andStubReturn(TaskType.ACTIVE); expect(stateManager.getGlobalStore("GlobalKeyValueStore")).andReturn(keyValueStoreMock()); expect(stateManager.getGlobalStore("GlobalTimestampedKeyValueStore")).andReturn(timestampedKeyValueStoreMock()); expect(stateManager.getGlobalStore("GlobalWindowStore")).andReturn(windowStoreMock()); expect(stateManager.getGlobalStore("GlobalTimestampedWindowStore")).andReturn(timestampedWindowStoreMock()); expect(stateManager.getGlobalStore("GlobalSessionStore")).andReturn(sessionStoreMock()); expect(stateManager.getGlobalStore(anyString())).andReturn(null); expect(stateManager.getStore("LocalKeyValueStore")).andReturn(keyValueStoreMock()); expect(stateManager.getStore("LocalTimestampedKeyValueStore")).andReturn(timestampedKeyValueStoreMock()); expect(stateManager.getStore("LocalWindowStore")).andReturn(windowStoreMock()); expect(stateManager.getStore("LocalTimestampedWindowStore")).andReturn(timestampedWindowStoreMock()); expect(stateManager.getStore("LocalSessionStore")).andReturn(sessionStoreMock()); expect(stateManager.registeredChangelogPartitionFor(REGISTERED_STORE_NAME)).andStubReturn(CHANGELOG_PARTITION); replay(stateManager); context = new ProcessorContextImpl( mock(TaskId.class), streamsConfig, stateManager, mock(StreamsMetricsImpl.class), mock(ThreadCache.class) ); final StreamTask task = mock(StreamTask.class); expect(task.streamTime()).andReturn(STREAM_TIME); EasyMock.expect(task.recordCollector()).andStubReturn(recordCollector); replay(task); ((InternalProcessorContext) context).transitionToActive(task, null, null); context.setCurrentNode( new ProcessorNode<>( "fake", (org.apache.kafka.streams.processor.api.Processor<String, Long, Object, Object>) null, new HashSet<>( asList( "LocalKeyValueStore", "LocalTimestampedKeyValueStore", "LocalWindowStore", "LocalTimestampedWindowStore", "LocalSessionStore" ) ) ) ); } private ProcessorContextImpl getStandbyContext() { final ProcessorStateManager stateManager = EasyMock.createNiceMock(ProcessorStateManager.class); expect(stateManager.taskType()).andStubReturn(TaskType.STANDBY); replay(stateManager); return new ProcessorContextImpl( mock(TaskId.class), streamsConfig, stateManager, mock(StreamsMetricsImpl.class), mock(ThreadCache.class) ); } @Test public void globalKeyValueStoreShouldBeReadOnly() { doTest("GlobalKeyValueStore", (Consumer<KeyValueStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); checkThrowsUnsupportedOperation(store::flush, "flush()"); checkThrowsUnsupportedOperation(() -> store.put("1", 1L), "put()"); checkThrowsUnsupportedOperation(() -> store.putIfAbsent("1", 1L), "putIfAbsent()"); checkThrowsUnsupportedOperation(() -> store.putAll(Collections.emptyList()), "putAll()"); checkThrowsUnsupportedOperation(() -> store.delete("1"), "delete()"); assertEquals((Long) VALUE, store.get(KEY)); assertEquals(rangeIter, store.range("one", "two")); assertEquals(allIter, store.all()); assertEquals(VALUE, store.approximateNumEntries()); }); } @Test public void globalTimestampedKeyValueStoreShouldBeReadOnly() { doTest("GlobalTimestampedKeyValueStore", (Consumer<TimestampedKeyValueStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); checkThrowsUnsupportedOperation(store::flush, "flush()"); checkThrowsUnsupportedOperation(() -> store.put("1", ValueAndTimestamp.make(1L, 2L)), "put()"); checkThrowsUnsupportedOperation(() -> store.putIfAbsent("1", ValueAndTimestamp.make(1L, 2L)), "putIfAbsent()"); checkThrowsUnsupportedOperation(() -> store.putAll(Collections.emptyList()), "putAll()"); checkThrowsUnsupportedOperation(() -> store.delete("1"), "delete()"); assertEquals(VALUE_AND_TIMESTAMP, store.get(KEY)); assertEquals(timestampedRangeIter, store.range("one", "two")); assertEquals(timestampedAllIter, store.all()); assertEquals(VALUE, store.approximateNumEntries()); }); } @Test public void globalWindowStoreShouldBeReadOnly() { doTest("GlobalWindowStore", (Consumer<WindowStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); checkThrowsUnsupportedOperation(store::flush, "flush()"); checkThrowsUnsupportedOperation(() -> store.put("1", 1L, 1L), "put()"); assertEquals(iters.get(0), store.fetchAll(0L, 0L)); assertEquals(windowStoreIter, store.fetch(KEY, 0L, 1L)); assertEquals(iters.get(1), store.fetch(KEY, KEY, 0L, 1L)); assertEquals((Long) VALUE, store.fetch(KEY, 1L)); assertEquals(iters.get(2), store.all()); }); } @Test public void globalTimestampedWindowStoreShouldBeReadOnly() { doTest("GlobalTimestampedWindowStore", (Consumer<TimestampedWindowStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); checkThrowsUnsupportedOperation(store::flush, "flush()"); checkThrowsUnsupportedOperation(() -> store.put("1", ValueAndTimestamp.make(1L, 1L), 1L), "put() [with timestamp]"); assertEquals(timestampedIters.get(0), store.fetchAll(0L, 0L)); assertEquals(windowStoreIter, store.fetch(KEY, 0L, 1L)); assertEquals(timestampedIters.get(1), store.fetch(KEY, KEY, 0L, 1L)); assertEquals(VALUE_AND_TIMESTAMP, store.fetch(KEY, 1L)); assertEquals(timestampedIters.get(2), store.all()); }); } @Test public void globalSessionStoreShouldBeReadOnly() { doTest("GlobalSessionStore", (Consumer<SessionStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); checkThrowsUnsupportedOperation(store::flush, "flush()"); checkThrowsUnsupportedOperation(() -> store.remove(null), "remove()"); checkThrowsUnsupportedOperation(() -> store.put(null, null), "put()"); assertEquals(iters.get(3), store.findSessions(KEY, 1L, 2L)); assertEquals(iters.get(4), store.findSessions(KEY, KEY, 1L, 2L)); assertEquals(iters.get(5), store.fetch(KEY)); assertEquals(iters.get(6), store.fetch(KEY, KEY)); }); } @Test public void localKeyValueStoreShouldNotAllowInitOrClose() { doTest("LocalKeyValueStore", (Consumer<KeyValueStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); store.flush(); assertTrue(flushExecuted); store.put("1", 1L); assertTrue(putExecuted); store.putIfAbsent("1", 1L); assertTrue(putIfAbsentExecuted); store.putAll(Collections.emptyList()); assertTrue(putAllExecuted); store.delete("1"); assertTrue(deleteExecuted); assertEquals((Long) VALUE, store.get(KEY)); assertEquals(rangeIter, store.range("one", "two")); assertEquals(allIter, store.all()); assertEquals(VALUE, store.approximateNumEntries()); }); } @Test public void localTimestampedKeyValueStoreShouldNotAllowInitOrClose() { doTest("LocalTimestampedKeyValueStore", (Consumer<TimestampedKeyValueStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); store.flush(); assertTrue(flushExecuted); store.put("1", ValueAndTimestamp.make(1L, 2L)); assertTrue(putExecuted); store.putIfAbsent("1", ValueAndTimestamp.make(1L, 2L)); assertTrue(putIfAbsentExecuted); store.putAll(Collections.emptyList()); assertTrue(putAllExecuted); store.delete("1"); assertTrue(deleteExecuted); assertEquals(VALUE_AND_TIMESTAMP, store.get(KEY)); assertEquals(timestampedRangeIter, store.range("one", "two")); assertEquals(timestampedAllIter, store.all()); assertEquals(VALUE, store.approximateNumEntries()); }); } @Test public void localWindowStoreShouldNotAllowInitOrClose() { doTest("LocalWindowStore", (Consumer<WindowStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); store.flush(); assertTrue(flushExecuted); store.put("1", 1L, 1L); assertTrue(putExecuted); assertEquals(iters.get(0), store.fetchAll(0L, 0L)); assertEquals(windowStoreIter, store.fetch(KEY, 0L, 1L)); assertEquals(iters.get(1), store.fetch(KEY, KEY, 0L, 1L)); assertEquals((Long) VALUE, store.fetch(KEY, 1L)); assertEquals(iters.get(2), store.all()); }); } @Test public void localTimestampedWindowStoreShouldNotAllowInitOrClose() { doTest("LocalTimestampedWindowStore", (Consumer<TimestampedWindowStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); store.flush(); assertTrue(flushExecuted); store.put("1", ValueAndTimestamp.make(1L, 1L), 1L); assertTrue(putExecuted); store.put("1", ValueAndTimestamp.make(1L, 1L), 1L); assertTrue(putWithTimestampExecuted); assertEquals(timestampedIters.get(0), store.fetchAll(0L, 0L)); assertEquals(windowStoreIter, store.fetch(KEY, 0L, 1L)); assertEquals(timestampedIters.get(1), store.fetch(KEY, KEY, 0L, 1L)); assertEquals(VALUE_AND_TIMESTAMP, store.fetch(KEY, 1L)); assertEquals(timestampedIters.get(2), store.all()); }); } @Test public void localSessionStoreShouldNotAllowInitOrClose() { doTest("LocalSessionStore", (Consumer<SessionStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); store.flush(); assertTrue(flushExecuted); store.remove(null); assertTrue(removeExecuted); store.put(null, null); assertTrue(putExecuted); assertEquals(iters.get(3), store.findSessions(KEY, 1L, 2L)); assertEquals(iters.get(4), store.findSessions(KEY, KEY, 1L, 2L)); assertEquals(iters.get(5), store.fetch(KEY)); assertEquals(iters.get(6), store.fetch(KEY, KEY)); }); } @Test public void shouldNotSendRecordHeadersToChangelogTopic() { recordCollector.send( CHANGELOG_PARTITION.topic(), KEY_BYTES, VALUE_BYTES, null, CHANGELOG_PARTITION.partition(), TIMESTAMP, BYTES_KEY_SERIALIZER, BYTEARRAY_VALUE_SERIALIZER ); final StreamTask task = EasyMock.createNiceMock(StreamTask.class); replay(recordCollector, task); context.transitionToActive(task, recordCollector, null); context.logChange(REGISTERED_STORE_NAME, KEY_BYTES, VALUE_BYTES, TIMESTAMP); verify(recordCollector); } @Test public void shouldThrowUnsupportedOperationExceptionOnLogChange() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.logChange("Store", Bytes.wrap("k".getBytes()), null, 0L) ); } @Test public void shouldThrowUnsupportedOperationExceptionOnGetStateStore() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.getStateStore("store") ); } @Test public void shouldThrowUnsupportedOperationExceptionOnForward() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.forward("key", "value") ); } @Test public void shouldThrowUnsupportedOperationExceptionOnForwardWithTo() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.forward("key", "value", To.child("child-name")) ); } @Test public void shouldThrowUnsupportedOperationExceptionOnCommit() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.commit() ); } @Test public void shouldThrowUnsupportedOperationExceptionOnSchedule() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.schedule(Duration.ofMillis(100L), PunctuationType.STREAM_TIME, t -> { }) ); } @Test public void shouldThrowUnsupportedOperationExceptionOnTopic() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.topic() ); } @Test public void shouldThrowUnsupportedOperationExceptionOnPartition() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.partition() ); } @Test public void shouldThrowUnsupportedOperationExceptionOnOffset() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.offset() ); } @Test public void shouldThrowUnsupportedOperationExceptionOnTimestamp() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.timestamp() ); } @Test public void shouldThrowUnsupportedOperationExceptionOnCurrentNode() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.currentNode() ); } @Test public void shouldThrowUnsupportedOperationExceptionOnSetRecordContext() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.setRecordContext(mock(ProcessorRecordContext.class)) ); } @Test public void shouldThrowUnsupportedOperationExceptionOnRecordContext() { context = getStandbyContext(); assertThrows( UnsupportedOperationException.class, () -> context.recordContext() ); } @Test public void shouldMatchStreamTime() { assertEquals(STREAM_TIME, context.currentStreamTimeMs()); } @SuppressWarnings("unchecked") private KeyValueStore<String, Long> keyValueStoreMock() { final KeyValueStore<String, Long> keyValueStoreMock = mock(KeyValueStore.class); initStateStoreMock(keyValueStoreMock); expect(keyValueStoreMock.get(KEY)).andReturn(VALUE); expect(keyValueStoreMock.approximateNumEntries()).andReturn(VALUE); expect(keyValueStoreMock.range("one", "two")).andReturn(rangeIter); expect(keyValueStoreMock.all()).andReturn(allIter); keyValueStoreMock.put(anyString(), anyLong()); expectLastCall().andAnswer(() -> { putExecuted = true; return null; }); keyValueStoreMock.putIfAbsent(anyString(), anyLong()); expectLastCall().andAnswer(() -> { putIfAbsentExecuted = true; return null; }); keyValueStoreMock.putAll(anyObject(List.class)); expectLastCall().andAnswer(() -> { putAllExecuted = true; return null; }); keyValueStoreMock.delete(anyString()); expectLastCall().andAnswer(() -> { deleteExecuted = true; return null; }); replay(keyValueStoreMock); return keyValueStoreMock; } @SuppressWarnings("unchecked") private TimestampedKeyValueStore<String, Long> timestampedKeyValueStoreMock() { final TimestampedKeyValueStore<String, Long> timestampedKeyValueStoreMock = mock(TimestampedKeyValueStore.class); initStateStoreMock(timestampedKeyValueStoreMock); expect(timestampedKeyValueStoreMock.get(KEY)).andReturn(VALUE_AND_TIMESTAMP); expect(timestampedKeyValueStoreMock.approximateNumEntries()).andReturn(VALUE); expect(timestampedKeyValueStoreMock.range("one", "two")).andReturn(timestampedRangeIter); expect(timestampedKeyValueStoreMock.all()).andReturn(timestampedAllIter); timestampedKeyValueStoreMock.put(anyString(), anyObject(ValueAndTimestamp.class)); expectLastCall().andAnswer(() -> { putExecuted = true; return null; }); timestampedKeyValueStoreMock.putIfAbsent(anyString(), anyObject(ValueAndTimestamp.class)); expectLastCall().andAnswer(() -> { putIfAbsentExecuted = true; return null; }); timestampedKeyValueStoreMock.putAll(anyObject(List.class)); expectLastCall().andAnswer(() -> { putAllExecuted = true; return null; }); timestampedKeyValueStoreMock.delete(anyString()); expectLastCall().andAnswer(() -> { deleteExecuted = true; return null; }); replay(timestampedKeyValueStoreMock); return timestampedKeyValueStoreMock; } @SuppressWarnings("unchecked") private WindowStore<String, Long> windowStoreMock() { final WindowStore<String, Long> windowStore = mock(WindowStore.class); initStateStoreMock(windowStore); expect(windowStore.fetchAll(anyLong(), anyLong())).andReturn(iters.get(0)); expect(windowStore.fetch(anyString(), anyString(), anyLong(), anyLong())).andReturn(iters.get(1)); expect(windowStore.fetch(anyString(), anyLong(), anyLong())).andReturn(windowStoreIter); expect(windowStore.fetch(anyString(), anyLong())).andReturn(VALUE); expect(windowStore.all()).andReturn(iters.get(2)); windowStore.put(anyString(), anyLong(), anyLong()); expectLastCall().andAnswer(() -> { putExecuted = true; return null; }); replay(windowStore); return windowStore; } @SuppressWarnings("unchecked") private TimestampedWindowStore<String, Long> timestampedWindowStoreMock() { final TimestampedWindowStore<String, Long> windowStore = mock(TimestampedWindowStore.class); initStateStoreMock(windowStore); expect(windowStore.fetchAll(anyLong(), anyLong())).andReturn(timestampedIters.get(0)); expect(windowStore.fetch(anyString(), anyString(), anyLong(), anyLong())).andReturn(timestampedIters.get(1)); expect(windowStore.fetch(anyString(), anyLong(), anyLong())).andReturn(windowStoreIter); expect(windowStore.fetch(anyString(), anyLong())).andReturn(VALUE_AND_TIMESTAMP); expect(windowStore.all()).andReturn(timestampedIters.get(2)); windowStore.put(anyString(), anyObject(ValueAndTimestamp.class), anyLong()); expectLastCall().andAnswer(() -> { putExecuted = true; return null; }); windowStore.put(anyString(), anyObject(ValueAndTimestamp.class), anyLong()); expectLastCall().andAnswer(() -> { putWithTimestampExecuted = true; return null; }); replay(windowStore); return windowStore; } @SuppressWarnings("unchecked") private SessionStore<String, Long> sessionStoreMock() { final SessionStore<String, Long> sessionStore = mock(SessionStore.class); initStateStoreMock(sessionStore); expect(sessionStore.findSessions(anyString(), anyLong(), anyLong())).andReturn(iters.get(3)); expect(sessionStore.findSessions(anyString(), anyString(), anyLong(), anyLong())).andReturn(iters.get(4)); expect(sessionStore.fetch(anyString())).andReturn(iters.get(5)); expect(sessionStore.fetch(anyString(), anyString())).andReturn(iters.get(6)); sessionStore.put(anyObject(Windowed.class), anyLong()); expectLastCall().andAnswer(() -> { putExecuted = true; return null; }); sessionStore.remove(anyObject(Windowed.class)); expectLastCall().andAnswer(() -> { removeExecuted = true; return null; }); replay(sessionStore); return sessionStore; } private StreamsConfig streamsConfigMock() { final StreamsConfig streamsConfig = mock(StreamsConfig.class); expect(streamsConfig.getString(StreamsConfig.APPLICATION_ID_CONFIG)).andStubReturn("add-id"); expect(streamsConfig.defaultValueSerde()).andStubReturn(Serdes.ByteArray()); expect(streamsConfig.defaultKeySerde()).andStubReturn(Serdes.ByteArray()); replay(streamsConfig); return streamsConfig; } private void initStateStoreMock(final StateStore stateStore) { expect(stateStore.name()).andReturn(STORE_NAME); expect(stateStore.persistent()).andReturn(true); expect(stateStore.isOpen()).andReturn(true); stateStore.flush(); expectLastCall().andAnswer(() -> { flushExecuted = true; return null; }); } private <T extends StateStore> void doTest(final String name, final Consumer<T> checker) { @SuppressWarnings("deprecation") final org.apache.kafka.streams.processor.Processor<String, Long> processor = new org.apache.kafka.streams.processor.Processor<String, Long>() { @Override public void init(final ProcessorContext context) { final T store = context.getStateStore(name); checker.accept(store); } @Override public void process(final String k, final Long v) { //No-op. } @Override public void close() { //No-op. } }; processor.init(context); } private void verifyStoreCannotBeInitializedOrClosed(final StateStore store) { assertEquals(STORE_NAME, store.name()); assertTrue(store.persistent()); assertTrue(store.isOpen()); checkThrowsUnsupportedOperation(() -> store.init((StateStoreContext) null, null), "init()"); checkThrowsUnsupportedOperation(store::close, "close()"); } private void checkThrowsUnsupportedOperation(final Runnable check, final String name) { try { check.run(); fail(name + " should throw exception"); } catch (final UnsupportedOperationException e) { //ignore. } } }
package org.hisp.dhis.analytics.table; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Future; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hisp.dhis.analytics.AnalyticsIndex; import org.hisp.dhis.analytics.AnalyticsTable; import org.hisp.dhis.analytics.AnalyticsTableManager; import org.hisp.dhis.calendar.Calendar; import org.hisp.dhis.common.CodeGenerator; import org.hisp.dhis.common.IdentifiableObjectManager; import org.hisp.dhis.commons.collection.ListUtils; import org.hisp.dhis.commons.collection.UniqueArrayList; import org.hisp.dhis.commons.timer.SystemTimer; import org.hisp.dhis.commons.timer.Timer; import org.hisp.dhis.dataapproval.DataApprovalLevelService; import org.hisp.dhis.dataelement.DataElementCategoryService; import org.hisp.dhis.jdbc.StatementBuilder; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.resourcetable.ResourceTableService; import org.hisp.dhis.setting.SystemSettingManager; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.BadSqlGrammarException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.scheduling.annotation.Async; import org.springframework.transaction.annotation.Transactional; /** * @author Lars Helge Overland */ public abstract class AbstractJdbcTableManager implements AnalyticsTableManager { protected static final Log log = LogFactory.getLog( JdbcAnalyticsTableManager.class ); protected static final String DATE_REGEXP = "^\\d{4}-\\d{2}-\\d{2}(\\s|T)?(\\d{2}:\\d{2}:\\d{2})?$"; public static final String PREFIX_ORGUNITGROUPSET = "ougs_"; public static final String PREFIX_ORGUNITLEVEL = "uidlevel"; public static final String PREFIX_INDEX = "in_"; @Autowired protected IdentifiableObjectManager idObjectManager; @Autowired protected OrganisationUnitService organisationUnitService; @Autowired protected DataElementCategoryService categoryService; @Autowired protected SystemSettingManager systemSettingManager; @Autowired protected DataApprovalLevelService dataApprovalLevelService; @Autowired protected ResourceTableService resourceTableService; @Autowired protected StatementBuilder statementBuilder; @Autowired protected JdbcTemplate jdbcTemplate; // ------------------------------------------------------------------------- // Abstract methods // ------------------------------------------------------------------------- /** * Returns a list of string arrays in where the first index holds the database * column name, the second index holds the database column data type and the * third column holds a table alias and name. Column names are quoted. * * <ul> * <li>0 = database column name (quoted)</li> * <li>1 = database column data type</li> * <li>2 = column alias and name</li> * </ul> */ protected abstract List<String[]> getDimensionColumns( AnalyticsTable table ); /** * Override to perform work before tables are being generated. */ @Override public void preCreateTables() { } // ------------------------------------------------------------------------- // Implementation // ------------------------------------------------------------------------- @Override @Transactional public List<AnalyticsTable> getTables( Date earliest ) { log.info( "Get tables using earliest: " + earliest ); return getTables( getDataYears( earliest ) ); } @Override @Transactional public List<AnalyticsTable> getAllTables() { return getTables( ListUtils.getClosedOpenList( 1500, 2100 ) ); } private List<AnalyticsTable> getTables( List<Integer> dataYears ) { List<AnalyticsTable> tables = new UniqueArrayList<>(); Calendar calendar = PeriodType.getCalendar(); Collections.sort( dataYears ); String baseName = getTableName(); for ( Integer year : dataYears ) { Period period = PartitionUtils.getPeriod( calendar, year ); tables.add( new AnalyticsTable( baseName, getDimensionColumns( null ), period ) ); } return tables; } @Override public String getTempTableName() { return getTableName() + TABLE_TEMP_SUFFIX; } @Override @Async public Future<?> createIndexesAsync( ConcurrentLinkedQueue<AnalyticsIndex> indexes ) { taskLoop : while ( true ) { AnalyticsIndex inx = indexes.poll(); if ( inx == null ) { break taskLoop; } final String indexName = getIndexName( inx ); final String sql = "create index " + indexName + " on " + inx.getTable() + " (" + inx.getColumn() + ")"; log.debug( "Create index: " + indexName + " SQL: " + sql ); jdbcTemplate.execute( sql ); log.debug( "Created index: " + indexName ); } return null; } @Override public void swapTable( AnalyticsTable table ) { final String tempTable = table.getTempTableName(); final String realTable = table.getTableName(); final String sqlDrop = "drop table " + realTable; executeSilently( sqlDrop ); final String sqlAlter = "alter table " + tempTable + " rename to " + realTable; executeSilently( sqlAlter ); } @Override public void dropTable( String tableName ) { final String realTable = tableName.replaceFirst( TABLE_TEMP_SUFFIX, "" ); executeSilently( "drop table " + tableName ); executeSilently( "drop table " + realTable ); } // ------------------------------------------------------------------------- // Supportive methods // ------------------------------------------------------------------------- /** * Quotes the given column name. */ protected String quote( String column ) { return statementBuilder.columnQuote( column ); } /** * Remove quotes from the given column name. */ private String removeQuote( String column ) { return column != null ? column.replaceAll( statementBuilder.getColumnQuote(), StringUtils.EMPTY ) : null; } /** * Shortens the given table name. */ private String shortenTableName( String table ) { table = table.replaceAll( ANALYTICS_TABLE_NAME, "ax" ); table = table.replaceAll( TABLE_TEMP_SUFFIX, StringUtils.EMPTY ); return table; } /** * Returns index name for column. Purpose of code suffix is to avoid uniqueness * collision between indexes for temporary and real tables. */ protected String getIndexName( AnalyticsIndex inx ) { return quote( PREFIX_INDEX + removeQuote( inx.getColumn() ) + "_" + shortenTableName( inx.getTable() ) + "_" + CodeGenerator.generateCode( 5 ) ); } /** * Indicates whether the given table exists and has at least one row. */ protected boolean hasRows( String tableName ) { final String sql = "select * from " + tableName + " limit 1"; try { return jdbcTemplate.queryForRowSet( sql ).next(); } catch ( BadSqlGrammarException ex ) { return false; } } /** * Executes a SQL statement. Ignores existing tables/indexes when attempting * to create new. */ protected void executeSilently( String sql ) { try { jdbcTemplate.execute( sql ); } catch ( BadSqlGrammarException ex ) { log.debug( ex.getMessage() ); } } /** * Checks whether the given list of dimensions are valid. * @throws IllegalStateException if not valid. */ protected void validateDimensionColumns( List<String[]> dimensions ) { if ( dimensions == null || dimensions.isEmpty() ) { throw new IllegalStateException( "Analytics table dimensions are empty" ); } dimensions = new ArrayList<>( dimensions ); List<String> columns = new ArrayList<>(); for ( String[] dimension : dimensions ) { columns.add( dimension[0] ); } Set<String> duplicates = ListUtils.getDuplicates( columns ); if ( !duplicates.isEmpty() ) { throw new IllegalStateException( "Analytics table dimensions contain duplicates: " + duplicates ); } } /** * Executes the given table population SQL statement, log and times the operation. */ protected void populateAndLog( String sql, String tableName ) { log.debug( "Populate table: " + tableName + " SQL: " + sql ); Timer timer = new SystemTimer().start(); jdbcTemplate.execute( sql ); log.info( "Populated " + tableName + ": " + timer.stop().toString() ); } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.mgt.store; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.identity.base.IdentityException; import org.wso2.carbon.identity.mgt.constants.IdentityMgtConstants; import org.wso2.carbon.identity.mgt.dto.UserRecoveryDataDO; import org.wso2.carbon.identity.mgt.internal.IdentityMgtServiceComponent; import org.wso2.carbon.registry.core.Collection; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.exceptions.RegistryException; import java.util.Properties; public class RegistryRecoveryDataStore implements UserRecoveryDataStore { private static final Log log = LogFactory.getLog(RegistryRecoveryDataStore.class); @Override public void store(UserRecoveryDataDO recoveryDataDO) throws IdentityException { Registry registry = null; try { registry = IdentityMgtServiceComponent.getRegistryService(). getConfigSystemRegistry(PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId()); registry.beginTransaction(); Resource resource = registry.newResource(); resource.setProperty(SECRET_KEY, recoveryDataDO.getSecret()); resource.setProperty(USER_ID, recoveryDataDO.getUserName()); resource.setProperty(EXPIRE_TIME, recoveryDataDO.getExpireTime()); resource.setVersionableChange(false); String confirmationKeyPath = IdentityMgtConstants.IDENTITY_MANAGEMENT_DATA + "/" + recoveryDataDO.getCode ().toLowerCase(); registry.put(confirmationKeyPath, resource); } catch (RegistryException e) { log.error(e); throw IdentityException.error("Error while persisting user recovery data for user : " + recoveryDataDO.getUserName()); } finally { if (registry != null) { try { registry.commitTransaction(); } catch (RegistryException e) { log.error("Error while processing registry transaction", e); } } } } @Override public void store(UserRecoveryDataDO[] recoveryDataDOs) throws IdentityException { //To change body of implemented methods use File | Settings | File Templates. } @Override public UserRecoveryDataDO load(String code) throws IdentityException { Registry registry = null; UserRecoveryDataDO dataDO = new UserRecoveryDataDO(); try { registry = IdentityMgtServiceComponent.getRegistryService(). getConfigSystemRegistry(PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId()); registry.beginTransaction(); String secretKeyPath = IdentityMgtConstants.IDENTITY_MANAGEMENT_DATA + RegistryConstants.PATH_SEPARATOR + code.toLowerCase(); if (registry.resourceExists(secretKeyPath)) { Resource resource = registry.get(secretKeyPath); Properties props = resource.getProperties(); for (Object o : props.keySet()) { String key = (String) o; if (key.equals(USER_ID)) { dataDO.setUserName(resource.getProperty(key)); } else if (key.equals(SECRET_KEY)) { dataDO.setSecret(resource.getProperty(key)); } else if (key.equals(EXPIRE_TIME)) { String time = resource.getProperty(key); dataDO.setExpireTime(time); if (System.currentTimeMillis() > Long.parseLong(time)) { dataDO.setValid(false); break; } else { dataDO.setValid(true); } } } } else { return null; } } catch (RegistryException e) { log.error(e); throw IdentityException.error("Error while loading user recovery data for code : " + code); } finally { if (registry != null) { try { registry.commitTransaction(); } catch (RegistryException e) { log.error("Error while processing registry transaction", e); } } } return dataDO; } @Override public void invalidate(UserRecoveryDataDO recoveryDataDO) throws IdentityException { Registry registry = null; try { registry = IdentityMgtServiceComponent.getRegistryService(). getConfigSystemRegistry(PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId()); registry.beginTransaction(); String dataPath = IdentityMgtConstants.IDENTITY_MANAGEMENT_DATA; Collection dataItems = (Collection)registry.get(dataPath); for (int i = 0; i < dataItems.getChildren().length; i++) { Resource currentResource = registry.get(dataItems.getChildren()[i]); if (currentResource instanceof Collection) { String[] currentResourceChildren = ((Collection) currentResource).getChildren(); for (int j = 0; j < currentResourceChildren.length; j++) { Resource innerResource = registry.get(currentResourceChildren[j]); if (innerResource.getProperty(SECRET_KEY).equals(recoveryDataDO.getSecret())) { registry.delete(currentResourceChildren[j]); return; } } } else { if (currentResource.getProperty(SECRET_KEY).equals(recoveryDataDO.getSecret())) { registry.delete(dataItems.getChildren()[i]); return; } } } } catch (RegistryException e) { throw IdentityException.error("Error while deleting resource after loading", e); } finally { if (registry != null) { try { registry.commitTransaction(); } catch (RegistryException e) { log.error("Error while deleting resource after loading.", e); } } } } @Override public void invalidate(String userId, int tenantId) throws IdentityException { Registry registry = null; try { registry = IdentityMgtServiceComponent.getRegistryService(). getConfigSystemRegistry(PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId()); registry.beginTransaction(); deleteOldResourcesIfFound(registry, userId, IdentityMgtConstants.IDENTITY_MANAGEMENT_DATA); } catch (RegistryException e) { throw IdentityException.error("Error while deleting the old confirmation code.", e); } finally { if (registry != null) { try { registry.commitTransaction(); } catch (RegistryException e) { log.error("Error while deleting the old confirmation code \n" + e); } } } } @Override public UserRecoveryDataDO[] load(String userName, int tenantId) throws IdentityException { return new UserRecoveryDataDO[0]; //To change body of implemented methods use File | Settings | File Templates. } private void deleteOldResourcesIfFound(Registry registry, String userName, String secretKeyPath) { try { if (registry.resourceExists(secretKeyPath.toLowerCase())) { Collection collection = (Collection) registry.get(secretKeyPath.toLowerCase()); String[] resources = collection.getChildren(); for (String resource : resources) { String[] splittedResource = resource.split("___"); if (splittedResource.length == 3) { //PRIMARY USER STORE if (resource.contains("___" + userName + "___")) { registry.delete(resource); } } else if (splittedResource.length == 2) { //SECONDARY USER STORE. Resource is a collection. deleteOldResourcesIfFound(registry, userName, resource); } } } } catch (RegistryException e) { log.error("Error while deleting the old confirmation code \n" + e); } } }
package HackerLogin; /** HackerSearch.java HackerRPC provides a system for performing remote function calls. This is good for attaching PHP to the server, It is also good for performing some central tasks like search engine indexing, etc. */ import util.zip; import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import com.plink.dolphinnet.*; import HackerSearch.Assignments.*; import HackerSearch.util.*; import HackerSearch.Server.*; import java.util.ArrayList; import java.util.HashMap; import Assignments.*; import util.*; import Hackscript.Model.*; import java.util.concurrent.Semaphore; import Game.*; import Assignments.*; import Server.*; import org.apache.xmlrpc.client.XmlRpcClient; import org.apache.xmlrpc.client.XmlRpcClientConfigImpl; import java.net.URL; public class HackerRPC extends HttpServlet implements DataHandler{ private boolean RESPONSE=false;//Has a response been received. private static final long TIME_OUT=5000;//How long should we wait for a connection. private static final long RESPONSE_TIME_OUT=15000;//How long should we wait for a response. private Time MyTime=Time.getInstance(); private int RESULTS_PER_PAGE=10; public HackerRPC(){ } /** An XML rpc call that peforms a search. */ public String doSearch(String query,String spage){ String out=""; String ip="localhost"; SearchResultAssignment Result=null; if(query!=null&&!query.equals("")){ //Get the query. int page=0; if(spage!=null&&!spage.equals("")) page=new Integer(spage); SearchServer MySearchServer=SearchServer.getInstance(); int STAMP=(int)MyTime.getCurrentTime(); SearchAssignment SA=new SearchAssignment(STAMP); SA.setIndex(page*RESULTS_PER_PAGE); SA.setVector(query); Result=MySearchServer.requestSearch(SA); } //The search response, edit this for a new look. out+="<html>"; out+="<head>"; out+="<title>Hacker Search</title>"; out+="</head>"; out+="<body style=\"padding:5px;background-color:#313230;width:100%;height:100%\">"; out+="<img width=\"200\" height=\"81\" src=\"http://www.hackwars.net/images/logo.gif\" /><br /><br />"; //out+="<img width=\"120\" height=\"80\" src=\"http://www.hackwars.net/images/logo.jpg\" /><br /><br />"; out+="<b style=\"color:white;size:15px;\">Use the hacker search engine to find the websites of players and NPCs. You can use these websites to purchase items, plan attacks, and to move one step closer to global domination.</b><br /><br />"; out+="<form action=\"search.html\"><input type=\"text\" name=\"query\"><input type=\"submit\" value=\"Search\"></form><br />"; //Output results. if(Result!=null&&query!=null){ ArrayList Results=Result.getResults(); boolean divider=false; if(Result.getCurrent()>RESULTS_PER_PAGE-1){ out+="<a style=\"color:white;\" href=\"?query="+query+"&page="+(Result.getCurrent()/RESULTS_PER_PAGE-1)+"\"> Previous </a>"; divider=true; } if(Result.getCurrent()+RESULTS_PER_PAGE<Result.getSize()){ if(divider) out+="<a style=\"color:white;\"> | </a>"; out+="<a style=\"color:white;\" href=\"?query="+query+"&page="+(Result.getCurrent()/RESULTS_PER_PAGE+1)+"\"> Next </a>"; } if(Results!=null&&Results.size()>0) out+="<a style=\"color:white\">("+(Result.getCurrent()+1)+" to "+Math.min(Result.getSize(),Result.getCurrent()+RESULTS_PER_PAGE)+" of "+Result.getSize()+".)</a>"; else out+="<a style=\"color:white\">No results found.</a>"; XmlRpcClientConfigImpl config = new XmlRpcClientConfigImpl(); try{ config.setServerURL(new URL("http://www.hackwars.net/xmlrpc/domain.php")); }catch(Exception e){} XmlRpcClient client = new XmlRpcClient(); client.setConfig(config); for(int i=0;i<Results.size();i++){ SearchResult SR=(SearchResult)Results.get(i); Object[] send = {SR.getAddress()}; String address=SR.getAddress(); try{ address = (String)client.execute("reverseLookup", send); }catch(Exception e){} out+="<div style=\"padding:5px;background-color:white;\">"; out+="<b style=\"color:blue;size:18px;\"><a href=\""+address+"\">"+SR.getTitle()+"</a></b><br /><b style=\"color:black;size:15px;\">"; out+=SR.getDescription()+"</b><br />"; out+="<i style=\"color:green;font-size:13px;\">"+address+"</i>"; out+="</div><br />"; } if(Result.getCurrent()>RESULTS_PER_PAGE-1){ out+="<a style=\"color:white;\" href=\"?query="+query+"&page="+(Result.getCurrent()/RESULTS_PER_PAGE-1)+"\"> Previous </a>"; divider=true; } if(Result.getCurrent()+RESULTS_PER_PAGE<Result.getSize()){ if(divider) out+="<a style=\"color:white;\"> | </a>"; out+="<a style=\"color:white;\" href=\"?query="+query+"&page="+(Result.getCurrent()/RESULTS_PER_PAGE+1)+"\"> Next </a>"; } if(Results!=null&&Results.size()>0) out+="<a style=\"color:white\">("+(Result.getCurrent()+1)+" to "+Math.min(Result.getSize(),Result.getCurrent()+RESULTS_PER_PAGE)+" of "+Result.getSize()+".)</a>"; } out+="<br /><br /><b style=\"color:white;font-size:13px;\">&copy; Hack Wars 2007.</b>"; out+="</body>"; out+="</html>";//End of search response. Result=null; System.gc(); return(out); } /** The main servlet entry-point that can perform a search. */ public void doGetMultiple(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { String password = null; try { BufferedReader BR = new BufferedReader(new FileReader("password.ini")); password = BR.readLine(); } catch (Exception e) { // e.printStackTrace(); } PrintWriter out = response.getWriter(); String mode=request.getParameter("mode"); if(mode==null){//Serve an account. response.setContentType("application/xml"); String ip=request.getParameter("ip"); String serverID=request.getParameter("serverID"); if (password == null || password.equals(request.getParameter("pass"))) if(ip!=null&&serverID!=null){ CheckOutHandler COH=CheckOutSingleton.getInstance(); boolean activeBoolean=false; String active=request.getParameter("active"); if(active!=null&&!active.equals("")) activeBoolean=true; out.print(COH.checkOutPlayer(ip,serverID,activeBoolean)); } }else{//Serve a search engine. //The main search engine page, for when no results are currently being displayed (edit this for new look and feel). String ip="localhost"; String query=request.getParameter("query"); SearchResultAssignment Result=null; response.setContentType("text/html"); out.println("<html>"); out.println("<head>"); out.println("<title>Hacker Search</title>"); out.println("</head>"); out.println("<body style=\"padding:5px;background-color:#313230;width:100%;height:100%\">"); out.println("<img width=\"200\" height=\"81\" src=\"http://www.hackwars.net/images/logo.gif\" /><br /><br />"); out.println("<b style=\"color:white;size:15px;\">Use the hacker search engine to find the websites of players and NPCs. You can use these websites to purchase items, plan attacks, and to move one step closer to global domination.</b><br /><br />"); out.println("<form action=\"http://"+ip+":8081/search.html\" method=\"get\"><input type=\"text\" name=\"query\"><input type=\"submit\" value=\"Search\"></form><br />"); //Output results. if(Result!=null&&query!=null){ ArrayList Results=Result.getResults(); boolean divider=false; if(Result.getCurrent()>RESULTS_PER_PAGE-1){ out.println("<a style=\"color:white;\" href=\"?query="+query+"&page="+(Result.getCurrent()/RESULTS_PER_PAGE-1)+"\"> Previous </a>"); divider=true; } if(Result.getCurrent()+RESULTS_PER_PAGE<Result.getSize()){ if(divider) out.println("<a style=\"color:white;\"> | </a>"); out.println("<a style=\"color:white;\" href=\"?query="+query+"&page="+(Result.getCurrent()/RESULTS_PER_PAGE+1)+"\"> Next </a>"); } if(Results!=null&&Results.size()>0) out.println("<a style=\"color:white\">("+(Result.getCurrent()+1)+" to "+Math.min(Result.getSize(),Result.getCurrent()+RESULTS_PER_PAGE)+" of "+Result.getSize()+".)</a>"); else out.println("<a style=\"color:white\">No results found.</a>"); for(int i=0;i<Results.size();i++){ out.println("<div style=\"padding:5px;background-color:white;\">"); SearchResult SR=(SearchResult)Results.get(i); out.println("<b style=\"color:blue;size:18px;\"><a href=\""+SR.getAddress()+"\">"+SR.getTitle()+"</a></b><br /><b style=\"color:black;size:15px;\">"); out.println(SR.getDescription()+"</b><br />"); out.println("<i style=\"color:green;font-size:13px;\">"+SR.getAddress()+"</i>"); out.println("</div><br />"); } if(Result.getCurrent()>RESULTS_PER_PAGE-1){ out.println("<a style=\"color:white;\" href=\"?query="+query+"&page="+(Result.getCurrent()/RESULTS_PER_PAGE-1)+"\"> Previous </a>"); divider=true; } if(Result.getCurrent()+RESULTS_PER_PAGE<Result.getSize()){ if(divider) out.println("<a style=\"color:white;\"> | </a>"); out.println("<a style=\"color:white;\" href=\"?query="+query+"&page="+(Result.getCurrent()/RESULTS_PER_PAGE+1)+"\"> Next </a>"); } if(Results!=null&&Results.size()>0) out.println("<a style=\"color:white\">("+(Result.getCurrent()+1)+" to "+Math.min(Result.getSize(),Result.getCurrent()+RESULTS_PER_PAGE)+" of "+Result.getSize()+".)</a>"); } out.println("<br /><br /><b style=\"color:white;font-size:13px;\">&copy; Hack Wars 2007.</b>"); out.println("</body>"); out.println("</html>"); Result=null; } out.close(); } /** The main entry point of the servlet, keeping in mind this file also has XML-RPC specific stuff. */ public synchronized void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { try{ try{ doGetMultiple(request,response); }catch(Exception e){ e.printStackTrace(); } try{ Thread.sleep(250); }catch(Exception e){ e.printStackTrace(); } System.gc(); }catch(Exception e){ e.printStackTrace(); } } /** At one point this XML-RPC call was used to create a profile. now depricated. */ public String createProfile(String ip,String username){ CheckOutHandler COH=CheckOutSingleton.getInstance(); COH.createPlayer("awa878",ip,username); return("Success."); } /** An XML-RPC entry point that allows a facebook remote app to depost. This can be potentially used if we make other mobile apps. */ public String facebookDeposit(String ip,Double amount,Integer port){ ServerConnectionSingleton Server=ServerConnectionSingleton.getInstance(); Object O[]=new Object[]{ip,new Float(amount.floatValue()),port}; RemoteFunctionCall RFC=new RemoteFunctionCall(0,null,O); RFC.setFunction("facebookdeposit"); Server.returnAssignment(RFC); return("IP: "+ip+" Amount:"+amount+"Port: "+port); } /** An XML-RPC entry point that allows a facebook remote app to transfer. This can be potentially used if we make other mobile apps. */ public String facebookTransfer(String ip1,String ip2,Double amount,Integer port){ ServerConnectionSingleton Server=ServerConnectionSingleton.getInstance(); Object O[]=new Object[]{ip1,ip2,new Float(amount.floatValue()),port}; RemoteFunctionCall RFC=new RemoteFunctionCall(0,null,O); RFC.setFunction("facebooktransfer"); Server.returnAssignment(RFC); return("Success!"); } /** An XML-RPC entry point that allows a facebook remote app to withdraw. This can be potentially used if we make other mobile apps. */ public String facebookWithdraw(String ip,Double amount,Integer port){ ServerConnectionSingleton Server=ServerConnectionSingleton.getInstance(); Object O[]=new Object[]{ip,new Float(amount.floatValue()),port}; RemoteFunctionCall RFC=new RemoteFunctionCall(0,null,O); RFC.setFunction("facebookwithdraw"); Server.returnAssignment(RFC); return("Success!"); } /** An XML-RPC entry point that allows a facebook remote app to request an update. This can be potentially used if we make other mobile apps. */ public String facebookUpdate(String ip){ ServerConnectionSingleton Server=ServerConnectionSingleton.getInstance(); Object O[]=new Object[]{ip}; RemoteFunctionCall RFC=new RemoteFunctionCall(0,null,O); RFC.setFunction("facebookupdate"); Server.returnAssignment(RFC); return("Success!"); } /** This XML-RPC call requests a website from the server, this will actually cause scripts to run, etc. */ private static int requestCounter=0; private static HashMap PageReturn=new HashMap(); public String requestWebsite(String ip,Object Keys[],Object Values[]){ System.out.println("Requested Website"); HashMap Parameters=new HashMap(); String function=""; System.out.println(Keys.length); for(int i=0;i<Keys.length;i++){ System.out.println(Keys[i]+">>>"+Values[i]); if(i!=0) Parameters.put((String)Keys[i],(String)Values[i]); else function=(String)Values[i]; } System.out.println("Function: "+function); System.out.println("IP: "+ip); Object O[]=new Object[]{ip,"062.153.7.142",Parameters}; RemoteFunctionCall RFC=new RemoteFunctionCall(0,null,O); RFC.setFunction(function); System.out.println("Starting Server Connection"); ServerConnectionSingleton Server=ServerConnectionSingleton.getInstance(); int requestCheck=requestCounter; RFC.setID(requestCheck); requestCounter++; Integer key=new Integer(requestCheck); PageReturn.put(key,null); System.out.println("Sending Request to server"); Server.returnAssignment(RFC);//Dispatch the request to the server. int breakCount=0; String data=""; while((data=(String)PageReturn.get(key))==null){ try{ Thread.sleep(1); }catch(Exception e){ } if(breakCount>10000) break; breakCount++; } System.out.println("Received Response from server"); if(data.equals("")) data="Maximum timeout reached."; //System.out.println(data); return(data); } /** The XML-RPC call for returning a page requested. */ public String returnWebsite(String title,String body,Integer ID){ PageReturn.put(ID,zip.unzipString(body)); return("sucess."); } /** Used to put the server into a locked down state. */ public String shutdownServer(String username,Integer id,String server){ if(username.equals("bcoe")&&(int)id==850335){ Reporter R=new Reporter(server,200000,10021,10020); R.setDataHandler(this); connect(R,"","","",false); long startTime=MyTime.getCurrentTime(); while(!RESPONSE){ if(MyTime.getCurrentTime()-startTime>RESPONSE_TIME_OUT){ return("Failure."); } } R.addFinishedAssignment(new PingAssignment(850335,"bcoe")); return("Server shutdown message sent."); }else return("Failure"); } /** Remote function call to save an account, this is used when the website needs to be re-indexed. */ public String saveProfile(String password,String ip,String content,Boolean pageUpdated,String pageTitle,String pageBody){ if(!password.equals("asdbas0d98a0sd9fa8sasdlbo")){ return("failure"); }else{ try{ if(pageUpdated){ SearchServer MySearchServer=SearchServer.getInstance(); MySearchServer.returnAssignment(new IndexPageAssignment(0,pageTitle,ip,pageBody)); } CheckOutHandler COH=CheckOutSingleton.getInstance(); COH.returnPlayer(ip,content); }catch(Exception e){ return(e.toString()); } } return("success"); } /** Entry point for Servlet. */ public synchronized void doPost(HttpServletRequest request,HttpServletResponse response) throws IOException, ServletException { doGet(request, response); } /** Connect to the search server and tell the server to shutdown. */ public void connect(Reporter R,String title,String address,String content,boolean sendPacket){ boolean success=true; long startTime=MyTime.getCurrentTime(); while(R.getID()==-1){ if(MyTime.getCurrentTime()-startTime>TIME_OUT){ success=false; break; } } if(success){ if(sendPacket) R.addFinishedAssignment(new IndexPageAssignment(0,title,address,content)); RESPONSE=true; }else RESPONSE=false; } //Interface implementation. /** Called by Data handler to return assignments to front-end. */ public void addData(Object o){ if(o instanceof PingAssignment){ RESPONSE=true; } } //Un-used stuff from the abstract class we implement. public void addFinishedAssignment(Assignment A){}; public Object getData(int i){return(null);} public void resetData(){}; /** Compile the application. */ //This chart provides a list of APIS plus the associated level requirements, plus the associated costs. public static final String Bank[]={"lowerDeposit","mediumDeposit","higherDeposit","withdraw","lowerTransfer","mediumTransfer","higherTransfer","message","getAmount","getSourceIP","getTargetIP","checkPettyCash","getMaliciousIP","checkPettyCashTarget","greaterDeposit","greaterTransfer","playSound","logMessage"}; public static final int BankLevel[]={1,25,75,1,1,25,75,1,1,1,1,25,15,25,95,95,10,1}; public static final float BankCPUCost[]={1.0f,5.0f,10.0f,1.0f,1.0f,5.0f,10.0f,1.0f,1.0f,1.0f,1.0f,5.0f,2.0f,5.0f,20.0f,20.0f,1.0f,1.0f}; public static final float BankPrice[]={10.0f,1500.0f,6000.0f,10.0f,10.0f,1500.0f,6000.0f,10.0f,10.0f,10.0f,10.0f,1500.0f,1500.0f,1500.0f,100000.0f,100000.0f,500.0f,10.0f}; public static final String BankKeys[]=new String[]{"deposit","withdraw","transfer"}; public static final float bankbase=10.0f; public static final String Attack[]={"message","switchAttack","getCPULoad","cancelAttack","underAttack","getSourcePort","getTargetPort","getTargetIP","getHP","installScript","emptyPettyCash","showChoices","checkPettyCash","getMaximumCPULoad","checkPettyCashTarget","destroyWatches","checkForWatch","getTargetCPUCost","getIterations","getTargetHP","getSourceIP","berserk","zombie","isZombie","deleteLogs","getMaliciousIP","playSound","freeze","editLogs","logMessage","changeDailyPay","stealFile"}; public static final int AttackLevel[]={1,50,10,10,10,5,5,5,25,15,15,1,60,25,60,95,50,85,15,35,1,40,60,60,45,1,10,65,55,1,15,70}; public static final float AttackCPUCost[]={1.0f,10.0f,2.0f,2.0f,2.0f,1.0f,1.0f,1.0f,5.0f,5.0f,5.0f,1.0f,8.0f,2.0f,8.0f,25.0f,5.0f,15.0f,2.0f,3.0f,1.0f,5.0f,0.0f,0.0f,4.0f,1.0f,1.0f,10.0f,4.0f,1.0f,5.0f,7.0f}; public static final float AttackPrice[]={5.0f,1000.0f,10.0f,50.0f,50.0f,10.0f,10.0f,10.0f,500.0f,250.0f,250.0f,5.0f,750.0f,500.0f,750.0f,300000.0f,2000.0f,10000.0f,150.0f,1500.0f,5.0f,1500.0f,2000.0f,500.0f,1000.0f,50.0f,500.0f,2500.0f,1500.0f,5.0f,250.0f,3000.0f}; private static String AttackKeys[]=new String[]{"continue","initialize","finalize"}; public static final float attackbase=20.0f; public static final String Redirect[]={"redirectDuctTape","redirectGermanium","redirectSilicon","redirectYBCO","redirectPlutonium"}; public static final int RedirectLevel[]={1,25,45,75,85}; public static final float RedirectCPUCost[]={1.0f,5.0f,10.0f,15.0f,20.0f}; public static final float RedirectPrice[]={20.0f,1500.0f,6000.0f,50000.0f,150000.0f}; private static String RedirectKeys[]=new String[]{"continue","initialize","finalize"}; public static final float redirectbase=20.0f; public static final String FTP[]={"put","get","message","getTargetIP","getMaliciousIP","getFileName","getFileType","getFilePrice","playSound","logMessage"}; public static final int FTPLevel[]={1,1,1,1,1,1,1,1,1,}; public static final float FTPCPUCost[]={2.0f,2.0f,2.0f,2.0f,2.0f,2.0f,2.0f,2.0f,1.0f,2.0f}; private static final float FTPPrice[]={50.0f,50.0f,50.0f,50.0f,500.0f,800.0f,1600.0f,3000.0f,50.0f,50.0f}; private static String FTPKeys[]=new String[]{"put","get"}; public static final float ftpbase=10.0f; public static final String Watch[]={"checkForFireWall","counterattack","checkPettyCash","switchFireWall","switchAnyFireWall","depositPettyCash","checkFireWall","shutDownPorts","shutDownPort","heal","getTargetPort","message","getTargetIP","getSearchFireWall","getSourceIP","counterattackBank","counterattackAttack","getDefaultAttack","logMessage","turnOnPort","turnOnPorts","getDefaultBank","getDefaultFTP","getDefaultHTTP","getPort","cancelAttack","sendEmail","playSound","sendFacebookMessage","transferMoney","shutDownWatch","turnOnWatch","scan","getTriggerParameter","getTransactionAmount","getCPULoad","getMaximumCPULoad","isTriggered","attack","zombieAttack"}; public static final int WatchLevel[]={15,50,25,15,10,95,5,80,75,75,1,1,1,20,1,60,60,50,1,75,80,60,60,60,1,50,35,10,1,40,65,65,45,35,5,10,20,35,50,55}; public static final float WatchCPUCost[]={3.0f,10.0f,4.0f,3.0f,5.0f,15.0f,2.0f,10.0f,10.0f,15.0f,1.0f,1.0f,1.0f,2.0f,1.0f,12.0f,12.0f,2.0f,1.0f,10.0f,15.0f,2.0f,2.0f,2.0f,1.0f,4.0f,1.0f,1.0f,1.0f,5.0f,8.0f,8.0f,5.0f,2.0f,1.0f,2.0f,2.0f,2.0f,10.0f,10.0f}; public static final float WatchPrice[]={250.0f,10000.0f,250.0f,250.0f,500.0f,100000.0f,100.0f,5000.0f,2000.0f,10000.0f,250.0f,50.0f,250.0f,500.0f,250.0f,15000.0f,15000.0f,10000.0f,50.0f,2000.0f,5000.0f,1000.0f,1000.0f,1000.0f,250.0f,2000.0f,1000.0f,500.0f,250.0f,2000.0f,2000.0f,2000.0f,2500.0f,1000.0f,250.0f,250.0f,1000.0f,1000.0f,10000.0f,15000.0f}; private static String WatchKeys[]=new String[]{"fire"}; public static final float watchbase=5.0f; public static final String HTTP[]={"getVisitorIP","getHostIP","message","logMessage","popUp","getParameter","playSound","triggerWatch","replaceContent","fetchGetVariable","hideStore","turnOnWatch"}; public static final int HTTPLevel[]={1,1,1,1,5,20,10,30,30,35,40,30}; public static final float HTTPCPUCost[]={1.0f,1.0f,1.0f,1.0f,2.0f,1.0f,1.0f,3.0f,1.0f,1.0f,6.0f,3.0f}; public static final float HTTPPrice[]={10.0f,10.0f,10.0f,50.0f,250.0f,100.0f,500.0f,1000.0f,250.0f,500.0f,1500.0f,1000.0f}; private static String HTTPKeys[]=new String[]{"enter","exit","submit"}; public static final float httpbase=10.0f; public static String HelperFunctions[]={ "clearFile","readFile","readLine","countLines","writeLine","indexOf","intValue","floatValue","equal", "printf","rand","setGlobal","getGlobal","writeFile","fileExists","replaceAll","parseFloat","parseInt", "char","toUpper","toLower","isGlobalSet","isGetVariableSet","isParameterSet","isTriggerParameterSet", "split","join","length","giveTask","giveCommodity","takeCommodity","giveFile","takeFile","giveXP", "finishQuest","giveAccess","setTask","exchangeFile","exchangeCommodity","giveQuest","getDate","getTime", "giveTask","setTask","giveQuest","finishQuest","takeMoney","takeCommodity","exchangeCommodity","exchangeFile", "giveXP","giveMoney","giveCommodity","giveFile","giveFile2","takeFile","takeFile2","giveMoney","giveAccess","triggerWatchRemote", "changeNetwork","completeTask" }; boolean checkHelper(String function){ for(int i=0;i<HelperFunctions.length;i++){ if(HelperFunctions[i].equals(function)) return(true); } return(false); } /** Compiles a regular HackScript application. */ public HashMap compileApplication(Integer Type,HashMap Source,HashMap Stats){ int type=Type; float cpubase=0.0f; //Create Hash Maps of the Various Function lists. HashMap BankFunctions=new HashMap(); HashMap AttackFunctions=new HashMap(); HashMap FTPFunctions=new HashMap(); HashMap WatchFunctions=new HashMap(); HashMap HTTPFunctions=new HashMap(); HashMap HelperFunctions=new HashMap(); HashMap RedirectFunctions=new HashMap(); HashMap returnMe=new HashMap(); String Keys[]=null; int level=1; Program GatherInformation=null; try{ for(int i=0;i<Challenge.length;i++){ HelperFunctions.put(Challenge[i],""); } for(int i=0;i<Bank.length;i++){ BankFunctions.put(Bank[i],new Integer(i)); } for(int i=0;i<Attack.length;i++){ AttackFunctions.put(Attack[i],new Integer(i)); } for(int i=0;i<Redirect.length;i++){ RedirectFunctions.put(Redirect[i],new Integer(i)); } for(int i=0;i<FTP.length;i++){ FTPFunctions.put(FTP[i],new Integer(i)); } for(int i=0;i<Watch.length;i++){ WatchFunctions.put(Watch[i],new Integer(i)); } for(int i=0;i<HTTP.length;i++){ HTTPFunctions.put(HTTP[i],new Integer(i)); } //*/ if(type==HackerFile.BANKING_COMPILED){ Keys=BankKeys; if(Stats.get("Merchanting")!=null) level=(Integer)Stats.get("Merchanting"); GatherInformation=new Banking(null,null,null); cpubase=bankbase; }else if(type==HackerFile.ATTACKING_COMPILED){ Keys=AttackKeys; if(Stats.get("Attack")!=null) level=(Integer)Stats.get("Attack"); GatherInformation=new AttackProgram(null,null,null,null,null); cpubase=attackbase; }else if(type==HackerFile.SHIPPING_COMPILED){ Keys=RedirectKeys; if(Stats.get("Redirect")!=null) level=(Integer)Stats.get("Redirect"); GatherInformation=new ShippingProgram(null,null,null,null,null); cpubase=redirectbase; }else if(type==HackerFile.FTP_COMPILED){ Keys=FTPKeys; GatherInformation=new FTPProgram(null,null,null,null); cpubase=ftpbase; }else if(type==HackerFile.WATCH_COMPILED){ Keys=WatchKeys; if(Stats.get("Watch")!=null) level=(Integer)Stats.get("Watch"); GatherInformation=new WatchProgram(null,null,null); cpubase=watchbase; } if(type==HackerFile.HTTP){ Keys=HTTPKeys; if(Stats.get("HTTP")!=null) level=(Integer)Stats.get("HTTP"); GatherInformation=new HTTPProgram(null,null); cpubase=httpbase; } }catch(Exception e){ e.printStackTrace(); } String Scripts[]=new String[Keys.length]; try{ for(int i=0;i<Scripts.length;i++){ Scripts[i]=(String)Source.get(Keys[i]); } }catch(Exception e){ } float cpucost=0.0f; float price=5.0f; String error=""; boolean errorFound=false; int currentFunction=0; HackerLinker HL=new HackerLinker(GatherInformation,null); try{ //Exectue the given script. for(int i=0;i<Scripts.length;i++){ currentFunction=i; ArrayList Functions=RunFactory.getCodeList(Scripts[i]); RunFactory.runAllCode(Scripts[i],HL); if(GatherInformation.getError().length()>0){ error=GatherInformation.getError(); errorFound=true; break; } for(int ii=0;ii<Functions.size();ii++){ String name=(String)Functions.get(ii); if(HelperFunctions.get(name)!=null) continue; if(type==HackerFile.BANKING_COMPILED){ if(!(BankFunctions.get(name)==null)){ int key=(Integer)BankFunctions.get(name); if(BankLevel[key]>level){ errorFound=true; error="You must be Merchanting level "+BankLevel[key]+" to compile "+name+"()."; break; } cpucost+=BankCPUCost[key]; price+=BankPrice[key]; }else{ if(!checkHelper(name)){ errorFound=true; error="Function "+name+"() not found."; break; } } }else if(type==HackerFile.ATTACKING_COMPILED){ if(!(AttackFunctions.get(name)==null)){ int key=(Integer)AttackFunctions.get(name); if(AttackLevel[key]>level){ errorFound=true; error="You must be Attack level "+AttackLevel[key]+" to compile "+name+"()."; break; } cpucost+=AttackCPUCost[key]; price+=AttackPrice[key]; }else{ if(!checkHelper(name)){ errorFound=true; error="Function "+name+"() not found."; break; } } }else if(type==HackerFile.SHIPPING_COMPILED){ if(!(RedirectFunctions.get(name)==null)){ int key=(Integer)RedirectFunctions.get(name); if(RedirectLevel[key]>level){ errorFound=true; error="You must be Redirect level "+RedirectLevel[key]+" to compile "+name+"()."; break; } cpucost+=RedirectCPUCost[key]; price+=RedirectPrice[key]; }else{ if(!checkHelper(name)){ errorFound=true; error="Function "+name+"() not found."; break; } } }else if(type==HackerFile.FTP_COMPILED){ if(!(FTPFunctions.get(name)==null)){ int key=(Integer)FTPFunctions.get(name); if(FTPLevel[key]>level){ errorFound=true; error="You must be FTP level "+FTPLevel[key]+" to compile "+name+"()."; break; } cpucost+=FTPCPUCost[key]; price+=FTPPrice[key]; }else{ if(!checkHelper(name)){ errorFound=true; error="Function "+name+"() not found."; break; } } }else if(type==HackerFile.WATCH_COMPILED){ if(!(WatchFunctions.get(name)==null)){ int key=(Integer)WatchFunctions.get(name); if(WatchLevel[key]>level){ errorFound=true; error="You must be Watch level "+WatchLevel[key]+" to compile "+name+"()."; break; } cpucost+=WatchCPUCost[key]; price+=WatchPrice[key]; }else{ if(!checkHelper(name)){ errorFound=true; error="Function "+name+"() not found."; break; } } }else if(type==HackerFile.HTTP){ if(!(HTTPFunctions.get(name)==null)){ int key=(Integer)HTTPFunctions.get(name); if(HTTPLevel[key]>level){ errorFound=true; error="You must be HTTP level "+HTTPLevel[key]+" to compile "+name+"()."; break; } cpucost+=HTTPCPUCost[key]; price+=HTTPPrice[key]; }else{ if(!checkHelper(name)){ errorFound=true; error="Function "+name+"() not found."; break; } } } } if(errorFound) break; } }catch(Exception e){ error="Syntax error parsing function "+Keys[currentFunction]+". compiler returned ["+e.getMessage()+"]"; } returnMe.put("cpucost",new Double(Math.max(cpucost,cpubase))); returnMe.put("price",new Double(price)); returnMe.put("error",error); System.gc(); return(returnMe); } public static final String Challenge[]={"strlen","sqrt","abs","ln","atan","acos","asin","tan","cos","sin","getE","getPI","substr","getInputString","getInputStringCount","setOutputString","getInputFloat","getInputFloatCount","setOutputFloat","getInputInt","getInputIntCount","setOutputInt","equal","printf","rand","intValue","floatValue","indexOf","parseFloat","parseInt","replaceAll","split","length"}; /** Runs a challenge script application. */ public HashMap runToyProblem(String Source,Integer Iterations,Object OInputFloat[],Object OInputString[],Object OInputInteger[],Object OTargetFloat[],Object OTargetString[],Object OTargetInteger[]){ HashMap returnMe=new HashMap(); String error=""; Double InputFloat[]=null; try{ InputFloat=new Double[OInputFloat.length]; for(int i=0;i<OInputFloat.length;i++) InputFloat[i]=(Double)OInputFloat[i]; }catch(Exception e){} String InputString[]=null; try{ InputString=new String[OInputString.length]; for(int i=0;i<OInputString.length;i++) InputString[i]=(String)OInputString[i]; }catch(Exception e){} Integer InputInteger[]=null; try{ InputInteger=new Integer[OInputInteger.length]; for(int i=0;i<OInputInteger.length;i++) InputInteger[i]=(Integer)OInputInteger[i]; }catch(Exception e){} Double TargetFloat[]=null; try{ TargetFloat=new Double[OTargetFloat.length]; for(int i=0;i<OTargetFloat.length;i++) TargetFloat[i]=(Double)OTargetFloat[i]; }catch(Exception e){} String TargetString[]=null; try{ TargetString=new String[OTargetString.length]; for(int i=0;i<OTargetString.length;i++) TargetString[i]=(String)OTargetString[i]; }catch(Exception e){} Integer TargetInteger[]=null; try{ TargetInteger=new Integer[OTargetInteger.length]; for(int i=0;i<OTargetInteger.length;i++) TargetInteger[i]=(Integer)OTargetInteger[i]; }catch(Exception e){} ToyProgram TP=new ToyProgram(Source,64,(int)Iterations); ToyLinker HL=new ToyLinker(TP,null); //Add the inputs. if(InputFloat!=null) for(int i=0;i<InputFloat.length;i++) TP.addInFloat((float)((double)InputFloat[i])); if(InputString!=null) for(int i=0;i<InputString.length;i++){ TP.addInString(InputString[i]); } if(InputInteger!=null) for(int i=0;i<InputInteger.length;i++){ TP.addInInt((int)InputInteger[i]); } //Add the targets. if(TargetFloat!=null) for(int i=0;i<TargetFloat.length;i++) TP.addTargetFloat((float)((double)TargetFloat[i])); if(TargetString!=null) for(int i=0;i<TargetString.length;i++){ TP.addTargetString(TargetString[i]); } if(TargetInteger!=null) for(int i=0;i<TargetInteger.length;i++) TP.addTargetInt((int)TargetInteger[i]); boolean found=false; try{ ArrayList Functions=RunFactory.getCodeList(Source); for(int i=0;i<Functions.size();i++){ String name=(String)Functions.get(i); found=false; for(int ii=0;ii<Challenge.length;ii++){ if(Challenge[ii].equals(name)) found=true; } if(found==false){ TP.setError("Function "+name+" not found."); break; } } }catch(Exception e){ TP.setError("Syntax error in challenge code compiler returned ["+e.getMessage()+"]"); } if(found){ TP.execute(null); } returnMe.put("outint",TP.getOutInt()); returnMe.put("outstring",TP.getOutString()); returnMe.put("outdouble",TP.getOutDouble()); returnMe.put("success",new Boolean(TP.getSuccess())); returnMe.put("error",TP.getError()); System.gc(); return(returnMe); } /** Testing main. */ public static void main(String args[]){ HackerRPC HR=new HackerRPC(); // HR.doSearch("dorothy scare",""); } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.testrunner; import com.facebook.buck.test.selectors.TestDescription; import com.facebook.buck.test.selectors.TestSelectorList; import org.w3c.dom.Document; import org.w3c.dom.Element; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; /** * Base class for both the JUnit and TestNG runners. */ public abstract class BaseRunner { protected static final String FILTER_DESCRIPTION = "TestSelectorList-filter"; protected static final String ENCODING = "UTF-8"; protected File outputDirectory; protected List<String> testClassNames; protected long defaultTestTimeoutMillis; protected TestSelectorList testSelectorList; protected boolean isDryRun; protected Set<TestDescription> seenDescriptions = new HashSet<>(); public abstract void run() throws Throwable; /** * The test result file is written as XML to avoid introducing a dependency on JSON (see class * overview). */ protected void writeResult(String testClassName, List<TestResult> results) throws IOException, ParserConfigurationException, TransformerException { // XML writer logic taken from: // http://www.genedavis.com/library/xml/java_dom_xml_creation.jsp DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document doc = docBuilder.newDocument(); doc.setXmlVersion("1.1"); Element root = doc.createElement("testcase"); root.setAttribute("name", testClassName); doc.appendChild(root); for (TestResult result : results) { Element test = doc.createElement("test"); // name attribute test.setAttribute("name", result.testMethodName); // success attribute boolean isSuccess = result.isSuccess(); test.setAttribute("success", Boolean.toString(isSuccess)); // type attribute test.setAttribute("type", result.type.toString()); // time attribute long runTime = result.runTime; test.setAttribute("time", String.valueOf(runTime)); // Include failure details, if appropriate. Throwable failure = result.failure; if (failure != null) { String message = failure.getMessage(); test.setAttribute("message", message); String stacktrace = stackTraceToString(failure); test.setAttribute("stacktrace", stacktrace); } // stdout, if non-empty. if (result.stdOut != null) { Element stdOutEl = doc.createElement("stdout"); stdOutEl.appendChild(doc.createTextNode(result.stdOut)); test.appendChild(stdOutEl); } // stderr, if non-empty. if (result.stdErr != null) { Element stdErrEl = doc.createElement("stderr"); stdErrEl.appendChild(doc.createTextNode(result.stdErr)); test.appendChild(stdErrEl); } root.appendChild(test); } // Create an XML transformer that pretty-prints with a 2-space indent. // The transformer factory uses a system property to find the class to use. We need to default // to the system default since we have the user's classpath and they may not have everything set // up for the XSLT transform to work. String vendor = System.getProperty("java.vm.vendor"); String factoryClass; if ("IBM Corporation".equals(vendor)) { // Used in the IBM JDK --- from // https://www.ibm.com/support/knowledgecenter/SSYKE2_8.0.0/com.ibm.java.aix.80.doc/user/xml/using_xml.html factoryClass = "com.ibm.xtq.xslt.jaxp.compiler.TransformerFactoryImpl"; } else { // Used in the OpenJDK and the Oracle JDK. factoryClass = "com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl"; } // When we get this far, we're exiting, so no need to reset the property. System.setProperty("javax.xml.transform.TransformerFactory", factoryClass); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer trans = transformerFactory.newTransformer(); trans.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); trans.setOutputProperty(OutputKeys.INDENT, "yes"); trans.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); // Write the result to a file. String testSelectorSuffix = ""; if (!testSelectorList.isEmpty()) { testSelectorSuffix += ".test_selectors"; } if (isDryRun) { testSelectorSuffix += ".dry_run"; } OutputStream output; if (outputDirectory != null) { File outputFile = new File(outputDirectory, testClassName + testSelectorSuffix + ".xml"); output = new BufferedOutputStream(new FileOutputStream(outputFile)); } else { output = System.out; } StreamResult streamResult = new StreamResult(output); DOMSource source = new DOMSource(doc); trans.transform(source, streamResult); if (outputDirectory != null) { output.close(); } } private String stackTraceToString(Throwable exc) { StringWriter writer = new StringWriter(); exc.printStackTrace(new PrintWriter(writer, /* autoFlush */true)); return writer.toString(); } /** * Expected arguments are: * <ul> * <li>(string) output directory * <li>(long) default timeout in milliseconds (0 for no timeout) * <li>(string) newline separated list of test selectors * <li>(string...) fully-qualified names of test classes * </ul> */ protected void parseArgs(String... args) { File outputDirectory = null; long defaultTestTimeoutMillis = Long.MAX_VALUE; TestSelectorList testSelectorList = TestSelectorList.empty(); boolean isDryRun = false; List<String> testClassNames = new ArrayList<>(); for (int i = 0; i < args.length; i++) { switch (args[i]) { case "--default-test-timeout": defaultTestTimeoutMillis = Long.parseLong(args[++i]); break; case "--test-selectors": List<String> rawSelectors = Arrays.asList(args[++i].split("\n")); testSelectorList = TestSelectorList.builder() .addRawSelectors(rawSelectors) .build(); break; case "--dry-run": isDryRun = true; break; case "--output": outputDirectory = new File(args[++i]); if (!outputDirectory.exists()) { System.err.printf("The output directory did not exist: %s\n", outputDirectory); System.exit(1); } break; default: testClassNames.add(args[i]); } } if (testClassNames.isEmpty()) { System.err.println("Must specify at least one test."); System.exit(1); } this.outputDirectory = outputDirectory; this.defaultTestTimeoutMillis = defaultTestTimeoutMillis; this.isDryRun = isDryRun; this.testClassNames = testClassNames; this.testSelectorList = testSelectorList; } protected void runAndExit() { // Run the tests. try { run(); } catch (Throwable e){ e.printStackTrace(); } finally { // Explicitly exit to force the test runner to complete even if tests have sloppily left // behind non-daemon threads that would have otherwise forced the process to wait and // eventually timeout. // // Separately, we're using a successful exit code regardless of test outcome since JUnitRunner // is designed to execute all tests and produce a report of success or failure. We've done // that successfully if we've gotten here. System.exit(0); } } }
package de.windowsfreak.testjni.writer; import de.windowsfreak.testjni.Config; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.Iterator; import java.util.LinkedList; import java.util.Set; /** * Created by lazer_000 on 25.06.2015. */ public class TCPSink implements Sink { ServerSocketChannel serverSocketChannel; boolean running = false; Thread thread; public LinkedList<TCPWriterInstance> instances = new LinkedList<TCPWriterInstance>(); public Selector selector; Object waitRead = new Object(); class TCPWriterInstance { SocketChannel socketChannel; public Selector selector; TCPWriterInstance(SocketChannel socketChannel) throws IOException { this.socketChannel = socketChannel; this.selector = Selector.open(); } } public void initialize(Config config) throws IOException { selector = Selector.open(); if (serverSocketChannel != null) { try { shutDown(); } catch (Exception e) { new Exception("Could not close previous ServerSocketChannel", e).printStackTrace(); } } serverSocketChannel = ServerSocketChannel.open(); serverSocketChannel.socket().bind(new InetSocketAddress(config.sinkPort)); new Thread() { public void run() { while(!(this.isInterrupted())) { try { SocketChannel sc = TCPSink.this.serverSocketChannel.accept(); sc.configureBlocking(false); sc.register(selector, SelectionKey.OP_READ); TCPWriterInstance instance = new TCPWriterInstance(sc); sc.register(instance.selector, SelectionKey.OP_WRITE); instances.add(instance); configWritten = false; } catch (IOException e) { System.out.println("TCP Server stopped!"); //e.printStackTrace(); //e.printStackTrace(); return; } } } }.start(); } public ByteBuffer receiveCommandHeader() { for (TCPWriterInstance instance : instances) { try { selector.select(); Set readyKeys = selector.selectedKeys(); Iterator iterator = readyKeys.iterator(); while (iterator.hasNext()) { SelectionKey key = (SelectionKey) iterator.next(); iterator.remove(); if (key.isReadable()) { SocketChannel client = (SocketChannel) key.channel(); synchronized(header) { header.clear(); int bytesRead = 0, headerBytes = 0; while (headerBytes < headerSize) { bytesRead = client.read(header); headerBytes += bytesRead; if (bytesRead < 1) break; } if (headerBytes < 8) continue; header.flip(); return header; } } } } catch (Exception e) { new Exception("Could not receive command headers", e).printStackTrace(); } } return null; } public void shutDown() throws IOException { for (TCPWriterInstance instance : instances) { try { instance.socketChannel.close(); } catch (Exception e) { new Exception("Could not close SocketChannel", e).printStackTrace(); } } instances.clear(); try { serverSocketChannel.close(); } finally { serverSocketChannel = null; } } public boolean writeHeader(final ByteBuffer header) { boolean errors = false; TCPWriterInstance erroneous = null; int totalBytes = header.remaining(); for (TCPWriterInstance instance : instances) { try { SocketChannel channel = instance.socketChannel; header.rewind(); int totalWritten = 0; while (totalWritten < totalBytes) { instance.selector.select(); Set readyKeys = instance.selector.selectedKeys(); Iterator iterator = readyKeys.iterator(); while (iterator.hasNext()) { SelectionKey key = (SelectionKey) iterator.next(); iterator.remove(); if (key.isWritable()) { SocketChannel client = (SocketChannel) key.channel(); int bytesWritten = (int) client.write(header); totalWritten += bytesWritten; if (bytesWritten < 0) throw new Exception("Could not complete writing!"); } } } } catch (Exception e) { new Exception("Could not write to SocketChannel", e).printStackTrace(); erroneous = instance; errors = true; } } if (errors) { instances.remove(erroneous); } return !errors; } public boolean writeConfig(final Config config) { boolean errors = false; synchronized(header) { header.clear(); header.putInt(0); // magic value for config header.putShort(config.x); header.putShort(config.y); header.flip(); errors |= writeHeader(header); header.clear(); header.putFloat(config.fovX); header.putFloat(config.fovY); header.flip(); errors |= writeHeader(header); header.clear(); header.put(config.fps); header.put(config.mode); header.put(config.depth); header.put((byte) 255); // unused header.putInt(-1); // unused header.flip(); errors |= writeHeader(header); } configWritten = true; return !errors; } final int headerSize = 8; boolean configWritten = false; ByteBuffer header = ByteBuffer.allocateDirect(headerSize); @Override public boolean writeImage(final Config config, final ByteBuffer in, final int frameId) { boolean errors = false; TCPWriterInstance erroneous = null; synchronized(in) { in.rewind(); int size = in.remaining(); synchronized(header) { if (!configWritten) writeConfig(config); header.clear(); header.putInt(size); header.putInt(frameId); header.flip(); int totalBytes = header.remaining() + size; // not multithreaded !!! for (TCPWriterInstance instance : instances) { try { SocketChannel channel = instance.socketChannel; header.rewind(); in.rewind(); int totalWritten = 0; while (totalWritten < totalBytes) { instance.selector.select(); Set readyKeys = instance.selector.selectedKeys(); Iterator iterator = readyKeys.iterator(); while (iterator.hasNext()) { SelectionKey key = (SelectionKey) iterator.next(); iterator.remove(); if (key.isWritable()) { SocketChannel client = (SocketChannel) key.channel(); int bytesWritten = (int) client.write(new ByteBuffer[]{header, in}); totalWritten += bytesWritten; if (bytesWritten < 0) throw new Exception("Could not complete writing!"); } } } } catch (Exception e) { new Exception("Could not write to SocketChannel", e).printStackTrace(); erroneous = instance; errors = true; } } } } if (errors) { instances.remove(erroneous); /* instances.removeIf(new Predicate<TCPWriterInstance>() { @Override public boolean test(TCPWriterInstance instance) { return !instance.socketChannel.isOpen(); } }); */ } return !errors; } public boolean condition(Config config) throws InterruptedException { while (instances.size() < 1 && config.condition) { if (config.debug) System.out.println("Waiting for first client!"); Thread.sleep(500); } return instances.size() > 0; } }
/** * Copyright 2013 Tobias Gierke <tobias.gierke@code-sourcery.de> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.codesourcery.threadwatcher; import java.io.DataInput; import java.io.DataInputStream; import java.io.IOException; import java.io.UTFDataFormatException; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.MalformedInputException; public final class ThreadEvent { public static final int THREAD_START = 0; public static final int THREAD_DEATH= 1; public static final int THREAD_STATE_CHANGE = 2; // max. thread name length (including zero termination byte) public static final int MAX_THREADNAME_LENGTH = 51; // size of generic part public static final int SIZEOF_GENERIC_EVENT = 24; // total record sizes for specific event types public static final int SIZEOF_THREAD_START_EVENT = SIZEOF_GENERIC_EVENT+MAX_THREADNAME_LENGTH; public static final int SIZEOF_THREAD_DEATH_EVENT = SIZEOF_GENERIC_EVENT+0; public static final int SIZEOF_THREAD_STATE_CHANGE_EVENT = SIZEOF_GENERIC_EVENT+4; public static final int MAX_RECORD_SIZE= Math.max( Math.max( SIZEOF_THREAD_START_EVENT, SIZEOF_THREAD_DEATH_EVENT ) , SIZEOF_THREAD_STATE_CHANGE_EVENT); public static final int MIN_RECORD_SIZE= Math.min( Math.min( SIZEOF_THREAD_START_EVENT, SIZEOF_THREAD_DEATH_EVENT ) , SIZEOF_THREAD_STATE_CHANGE_EVENT); public static final int BUFFER_LENGTH = MAX_RECORD_SIZE * 1000; public byte type; public int threadId; public long timestampSeconds; public long timestampNanos; public int threadStateMask; public String threadName; protected void parseCommonFields(byte[] buffer,int offset) { this.type = buffer[offset]; // offset 0 this.threadId = read16Bit( buffer , (offset + 4) % BUFFER_LENGTH ); this.timestampSeconds = read64Bit( buffer , (offset + 8) % BUFFER_LENGTH); this.timestampNanos = read64Bit( buffer , (offset + 16) % BUFFER_LENGTH); } protected int parseThreadStartEvent(byte[] buffer,int offset) { parseCommonFields(buffer,offset); try { threadName = readModifiedUTF8String(buffer, (offset+24)%BUFFER_LENGTH, MAX_THREADNAME_LENGTH); } catch (UTFDataFormatException e) { threadName="<no valid UTF-8>"; } return ThreadEvent.SIZEOF_THREAD_START_EVENT; } protected int parseThreadDeathEvent(byte[] buffer,int offset) { parseCommonFields(buffer,offset); return ThreadEvent.SIZEOF_THREAD_DEATH_EVENT; } protected int parseThreadStateChangeEvent(byte[] buffer,int offset) { parseCommonFields(buffer,offset); threadStateMask = read32Bit(buffer, (offset+24)%BUFFER_LENGTH); return ThreadEvent.SIZEOF_THREAD_STATE_CHANGE_EVENT; } public ThreadEvent() { } public ThreadEvent(ThreadEvent other) { this.type = other.type; this.threadId = other.threadId; this.timestampSeconds = other.timestampSeconds; this.timestampNanos = other.timestampNanos; this.threadStateMask = other.threadStateMask; this.threadName = other.threadName; } /** * * @param target * @param buffer * @param offset * @return the number of bytes consumed from the buffer */ public int parseBuffer(byte[] buffer,int offset) { switch(buffer[offset]) { case ThreadEvent.THREAD_START: return parseThreadStartEvent(buffer, offset); case ThreadEvent.THREAD_DEATH: return parseThreadDeathEvent(buffer, offset); case ThreadEvent.THREAD_STATE_CHANGE: return parseThreadStateChangeEvent(buffer, offset); default: } throw new RuntimeException("Unhandled event type "+buffer[offset]); } @Override public String toString() { String result = ",threadId="+threadId+",ts_seconds="+timestampSeconds+",ts_micros="+timestampNanos; final String sType; switch(type) { case THREAD_START: sType="THREAD_START"; result += ",thread_name="+threadName; break; case THREAD_DEATH: sType="THREAD_DEATH"; break; case THREAD_STATE_CHANGE: sType="THREAD_STATE_CHANGE"; result += ",state="+JVMTIThreadState.fromBitMask( threadStateMask )+" ("+threadStateMask+")"; break; default: sType="<< UNKNOWN >>"; } return "event_type="+sType+" ("+type+")"+result; } protected final int read16Bit(byte[] buffer,int offset) { final int low = buffer[offset] & 0xff; final int hi = buffer[(offset+1)%BUFFER_LENGTH] & 0xff; return (hi<<8)|low; } protected final String readModifiedUTF8String(byte[] buffer,int startOffset,int maxLength) throws UTFDataFormatException { int zeroByteIndex=-1; for ( int idx = 0 ; idx < maxLength ;idx++ ) { final int offset = (startOffset+idx) % BUFFER_LENGTH; if ( buffer[ offset ] == 0 ) { zeroByteIndex = offset; break; } } if ( zeroByteIndex == -1 ) { return null; } return convertFromModifiedUTF8(buffer,startOffset,zeroByteIndex); } public static String convertFromModifiedUTF8(byte[] in, int offset, int zeroByteIndex) throws UTFDataFormatException { final char[] out = new char[MAX_THREADNAME_LENGTH]; final int utfSize = zeroByteIndex - offset; int count = 0, s = 0, a; while (count < utfSize) { if ((out[s] = (char) in[offset + count++]) < '\u0080') { s++; } else if (((a = out[s]) & 0xe0) == 0xc0) { if (count >= utfSize) { throw new UTFDataFormatException("bad second byte at " + count); } int b = in[offset + count++]; if ((b & 0xC0) != 0x80) { throw new UTFDataFormatException("bad second byte at " + (count - 1)); } out[s++] = (char) (((a & 0x1F) << 6) | (b & 0x3F)); } else if ((a & 0xf0) == 0xe0) { if (count + 1 >= utfSize) { throw new UTFDataFormatException("bad third byte at " + (count + 1)); } int b = in[offset + count++]; int c = in[offset + count++]; if (((b & 0xC0) != 0x80) || ((c & 0xC0) != 0x80)) { throw new UTFDataFormatException("bad second or third byte at " + (count - 2)); } out[s++] = (char) (((a & 0x0F) << 12) | ((b & 0x3F) << 6) | (c & 0x3F)); } else { throw new UTFDataFormatException("bad byte at " + (count - 1)); } } return new String(out,0,s); } protected final int read32Bit(byte[] buffer,int offset) { final int loWord = read16Bit(buffer,offset) & 0xffff; final int hiWord = read16Bit(buffer,(offset+2)%BUFFER_LENGTH) & 0xffff; return ((hiWord<<16)|loWord); } protected final long read64Bit(byte[] buffer,int offset) { final long loWord = read32Bit(buffer,offset); final long hiWord = read32Bit(buffer,(offset+4)%BUFFER_LENGTH); return ((hiWord<<32)|loWord) & 0xffffffff; } public long getTimestampMicros() { return timestampNanos; } public long getTimestampSeconds() { return timestampSeconds; } public int getThreadId() { return threadId; } public HiResTimestamp getTimestamp() { return new HiResTimestamp(this.timestampSeconds,this.timestampNanos,false); } public boolean isSameMillisecond(long secondsSinceEpoch,long nanoseconds) { if ( this.timestampSeconds != secondsSinceEpoch ) { return false; } return (this.timestampNanos / 1000000) == (nanoseconds / 1000000); } public boolean isAfter(HiResTimestamp timestamp) { if ( this.timestampSeconds > timestamp.secondsSinceEpoch ) { return true; } if ( this.timestampSeconds < timestamp.secondsSinceEpoch ) { return false; } return this.timestampNanos > timestamp.nanoseconds; } public boolean isBefore(HiResTimestamp timestamp) { if ( this.timestampSeconds > timestamp.secondsSinceEpoch ) { return false; } if ( this.timestampSeconds < timestamp.secondsSinceEpoch ) { return true; } return this.timestampNanos < timestamp.nanoseconds; } public boolean isAfterOrAt(HiResTimestamp timestamp) { if ( this.timestampSeconds < timestamp.secondsSinceEpoch ) { return false; } if ( this.timestampSeconds > timestamp.secondsSinceEpoch ) { return true; } return this.timestampNanos >= timestamp.nanoseconds; } public HiResTimestamp getMillisecondTimestamp() { int millis = (int) (timestampNanos / 1000000.0); return new HiResTimestamp( timestampSeconds , millis*1000000 , true ); } }
/* * Copyright 1997-2017 Optimatika (www.optimatika.se) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.algo.matrix.store.operation; import java.math.BigDecimal; import org.algo.access.Access1D; import org.algo.array.blas.DOT; import org.algo.concurrent.DivideAndConquer; import org.algo.constant.PrimitiveMath; import org.algo.matrix.MatrixUtils; import org.algo.matrix.store.BigDenseStore.BigMultiplyLeft; import org.algo.matrix.store.ComplexDenseStore.ComplexMultiplyLeft; import org.algo.matrix.store.PrimitiveDenseStore.PrimitiveMultiplyLeft; import org.algo.scalar.ComplexNumber; public final class MultiplyLeft extends MatrixOperation { public static final MultiplyLeft SETUP = new MultiplyLeft(); public static int THRESHOLD = 32; static final BigMultiplyLeft BIG = (product, left, complexity, right) -> MultiplyLeft.invoke(product, 0, (int) (left.count() / complexity), left, complexity, right); static final BigMultiplyLeft BIG_MT = (product, left, complexity, right) -> { final DivideAndConquer tmpConquerer = new DivideAndConquer() { @Override public void conquer(final int first, final int limit) { MultiplyLeft.invoke(product, first, limit, left, complexity, right); } }; tmpConquerer.invoke(0, (int) (left.count() / complexity), THRESHOLD); }; static final ComplexMultiplyLeft COMPLEX = (product, left, complexity, right) -> MultiplyLeft.invoke(product, 0, (int) (left.count() / complexity), left, complexity, right); static final ComplexMultiplyLeft COMPLEX_MT = (product, left, complexity, right) -> { final DivideAndConquer tmpConquerer = new DivideAndConquer() { @Override public void conquer(final int first, final int limit) { MultiplyLeft.invoke(product, first, limit, left, complexity, right); } }; tmpConquerer.invoke(0, (int) (left.count() / complexity), THRESHOLD); }; static final PrimitiveMultiplyLeft PRIMITIVE = (product, left, complexity, right) -> MultiplyLeft.invoke(product, 0, (int) (left.count() / complexity), left, complexity, right); static final PrimitiveMultiplyLeft PRIMITIVE_0XN = (product, left, complexity, right) -> { final int tmpRowDim = 10; final int tmpColDim = right.length / complexity; for (int j = 0; j < tmpColDim; j++) { double tmp0J = PrimitiveMath.ZERO; double tmp1J = PrimitiveMath.ZERO; double tmp2J = PrimitiveMath.ZERO; double tmp3J = PrimitiveMath.ZERO; double tmp4J = PrimitiveMath.ZERO; double tmp5J = PrimitiveMath.ZERO; double tmp6J = PrimitiveMath.ZERO; double tmp7J = PrimitiveMath.ZERO; double tmp8J = PrimitiveMath.ZERO; double tmp9J = PrimitiveMath.ZERO; int tmpIndex = 0; for (int c = 0; c < complexity; c++) { final double tmpRightCJ = right[c + (j * complexity)]; tmp0J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp1J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp2J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp3J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp4J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp5J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp6J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp7J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp8J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp9J += left.doubleValue(tmpIndex++) * tmpRightCJ; } product[tmpIndex = j * tmpRowDim] = tmp0J; product[++tmpIndex] = tmp1J; product[++tmpIndex] = tmp2J; product[++tmpIndex] = tmp3J; product[++tmpIndex] = tmp4J; product[++tmpIndex] = tmp5J; product[++tmpIndex] = tmp6J; product[++tmpIndex] = tmp7J; product[++tmpIndex] = tmp8J; product[++tmpIndex] = tmp9J; } }; static final PrimitiveMultiplyLeft PRIMITIVE_1X1 = (product, left, complexity, right) -> { double tmp00 = PrimitiveMath.ZERO; final int tmpLeftStruct = (int) (left.count() / complexity); // The number of rows in the product- and left-matrix. for (int c = 0; c < complexity; c++) { tmp00 += left.doubleValue(c * tmpLeftStruct) * right[c]; } product[0] = tmp00; }; static final PrimitiveMultiplyLeft PRIMITIVE_1XN = (product, left, complexity, right) -> { final int tmpColDim = right.length / complexity; for (int j = 0; j < tmpColDim; j++) { double tmp0J = PrimitiveMath.ZERO; int tmpIndex = 0; for (int c = 0; c < complexity; c++) { tmp0J += left.doubleValue(tmpIndex++) * right[c + (j * complexity)]; } product[j] = tmp0J; } }; static final PrimitiveMultiplyLeft PRIMITIVE_2X2 = (product, left, complexity, right) -> { double tmp00 = PrimitiveMath.ZERO; double tmp10 = PrimitiveMath.ZERO; double tmp01 = PrimitiveMath.ZERO; double tmp11 = PrimitiveMath.ZERO; int tmpIndex; for (int c = 0; c < complexity; c++) { tmpIndex = c * 2; final double tmpLeft0 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft1 = left.doubleValue(tmpIndex); tmpIndex = c; final double tmpRight0 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight1 = right[tmpIndex]; tmp00 += tmpLeft0 * tmpRight0; tmp10 += tmpLeft1 * tmpRight0; tmp01 += tmpLeft0 * tmpRight1; tmp11 += tmpLeft1 * tmpRight1; } product[0] = tmp00; product[1] = tmp10; product[2] = tmp01; product[3] = tmp11; }; static final PrimitiveMultiplyLeft PRIMITIVE_3X3 = (product, left, complexity, right) -> { double tmp00 = PrimitiveMath.ZERO; double tmp10 = PrimitiveMath.ZERO; double tmp20 = PrimitiveMath.ZERO; double tmp01 = PrimitiveMath.ZERO; double tmp11 = PrimitiveMath.ZERO; double tmp21 = PrimitiveMath.ZERO; double tmp02 = PrimitiveMath.ZERO; double tmp12 = PrimitiveMath.ZERO; double tmp22 = PrimitiveMath.ZERO; int tmpIndex; for (int c = 0; c < complexity; c++) { tmpIndex = c * 3; final double tmpLeft0 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft1 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft2 = left.doubleValue(tmpIndex); tmpIndex = c; final double tmpRight0 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight1 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight2 = right[tmpIndex]; tmp00 += tmpLeft0 * tmpRight0; tmp10 += tmpLeft1 * tmpRight0; tmp20 += tmpLeft2 * tmpRight0; tmp01 += tmpLeft0 * tmpRight1; tmp11 += tmpLeft1 * tmpRight1; tmp21 += tmpLeft2 * tmpRight1; tmp02 += tmpLeft0 * tmpRight2; tmp12 += tmpLeft1 * tmpRight2; tmp22 += tmpLeft2 * tmpRight2; } product[0] = tmp00; product[1] = tmp10; product[2] = tmp20; product[3] = tmp01; product[4] = tmp11; product[5] = tmp21; product[6] = tmp02; product[7] = tmp12; product[8] = tmp22; }; static final PrimitiveMultiplyLeft PRIMITIVE_4X4 = (product, left, complexity, right) -> { double tmp00 = PrimitiveMath.ZERO; double tmp10 = PrimitiveMath.ZERO; double tmp20 = PrimitiveMath.ZERO; double tmp30 = PrimitiveMath.ZERO; double tmp01 = PrimitiveMath.ZERO; double tmp11 = PrimitiveMath.ZERO; double tmp21 = PrimitiveMath.ZERO; double tmp31 = PrimitiveMath.ZERO; double tmp02 = PrimitiveMath.ZERO; double tmp12 = PrimitiveMath.ZERO; double tmp22 = PrimitiveMath.ZERO; double tmp32 = PrimitiveMath.ZERO; double tmp03 = PrimitiveMath.ZERO; double tmp13 = PrimitiveMath.ZERO; double tmp23 = PrimitiveMath.ZERO; double tmp33 = PrimitiveMath.ZERO; int tmpIndex; for (int c = 0; c < complexity; c++) { tmpIndex = c * 4; final double tmpLeft0 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft1 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft2 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft3 = left.doubleValue(tmpIndex); tmpIndex = c; final double tmpRight0 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight1 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight2 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight3 = right[tmpIndex]; tmp00 += tmpLeft0 * tmpRight0; tmp10 += tmpLeft1 * tmpRight0; tmp20 += tmpLeft2 * tmpRight0; tmp30 += tmpLeft3 * tmpRight0; tmp01 += tmpLeft0 * tmpRight1; tmp11 += tmpLeft1 * tmpRight1; tmp21 += tmpLeft2 * tmpRight1; tmp31 += tmpLeft3 * tmpRight1; tmp02 += tmpLeft0 * tmpRight2; tmp12 += tmpLeft1 * tmpRight2; tmp22 += tmpLeft2 * tmpRight2; tmp32 += tmpLeft3 * tmpRight2; tmp03 += tmpLeft0 * tmpRight3; tmp13 += tmpLeft1 * tmpRight3; tmp23 += tmpLeft2 * tmpRight3; tmp33 += tmpLeft3 * tmpRight3; } product[0] = tmp00; product[1] = tmp10; product[2] = tmp20; product[3] = tmp30; product[4] = tmp01; product[5] = tmp11; product[6] = tmp21; product[7] = tmp31; product[8] = tmp02; product[9] = tmp12; product[10] = tmp22; product[11] = tmp32; product[12] = tmp03; product[13] = tmp13; product[14] = tmp23; product[15] = tmp33; }; static final PrimitiveMultiplyLeft PRIMITIVE_5X5 = (product, left, complexity, right) -> { double tmp00 = PrimitiveMath.ZERO; double tmp10 = PrimitiveMath.ZERO; double tmp20 = PrimitiveMath.ZERO; double tmp30 = PrimitiveMath.ZERO; double tmp40 = PrimitiveMath.ZERO; double tmp01 = PrimitiveMath.ZERO; double tmp11 = PrimitiveMath.ZERO; double tmp21 = PrimitiveMath.ZERO; double tmp31 = PrimitiveMath.ZERO; double tmp41 = PrimitiveMath.ZERO; double tmp02 = PrimitiveMath.ZERO; double tmp12 = PrimitiveMath.ZERO; double tmp22 = PrimitiveMath.ZERO; double tmp32 = PrimitiveMath.ZERO; double tmp42 = PrimitiveMath.ZERO; double tmp03 = PrimitiveMath.ZERO; double tmp13 = PrimitiveMath.ZERO; double tmp23 = PrimitiveMath.ZERO; double tmp33 = PrimitiveMath.ZERO; double tmp43 = PrimitiveMath.ZERO; double tmp04 = PrimitiveMath.ZERO; double tmp14 = PrimitiveMath.ZERO; double tmp24 = PrimitiveMath.ZERO; double tmp34 = PrimitiveMath.ZERO; double tmp44 = PrimitiveMath.ZERO; int tmpIndex; for (int c = 0; c < complexity; c++) { tmpIndex = c * 5; final double tmpLeft0 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft1 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft2 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft3 = left.doubleValue(tmpIndex); tmpIndex++; final double tmpLeft4 = left.doubleValue(tmpIndex); tmpIndex = c; final double tmpRight0 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight1 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight2 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight3 = right[tmpIndex]; tmpIndex += complexity; final double tmpRight4 = right[tmpIndex]; tmp00 += tmpLeft0 * tmpRight0; tmp10 += tmpLeft1 * tmpRight0; tmp20 += tmpLeft2 * tmpRight0; tmp30 += tmpLeft3 * tmpRight0; tmp40 += tmpLeft4 * tmpRight0; tmp01 += tmpLeft0 * tmpRight1; tmp11 += tmpLeft1 * tmpRight1; tmp21 += tmpLeft2 * tmpRight1; tmp31 += tmpLeft3 * tmpRight1; tmp41 += tmpLeft4 * tmpRight1; tmp02 += tmpLeft0 * tmpRight2; tmp12 += tmpLeft1 * tmpRight2; tmp22 += tmpLeft2 * tmpRight2; tmp32 += tmpLeft3 * tmpRight2; tmp42 += tmpLeft4 * tmpRight2; tmp03 += tmpLeft0 * tmpRight3; tmp13 += tmpLeft1 * tmpRight3; tmp23 += tmpLeft2 * tmpRight3; tmp33 += tmpLeft3 * tmpRight3; tmp43 += tmpLeft4 * tmpRight3; tmp04 += tmpLeft0 * tmpRight4; tmp14 += tmpLeft1 * tmpRight4; tmp24 += tmpLeft2 * tmpRight4; tmp34 += tmpLeft3 * tmpRight4; tmp44 += tmpLeft4 * tmpRight4; } product[0] = tmp00; product[1] = tmp10; product[2] = tmp20; product[3] = tmp30; product[4] = tmp40; product[5] = tmp01; product[6] = tmp11; product[7] = tmp21; product[8] = tmp31; product[9] = tmp41; product[10] = tmp02; product[11] = tmp12; product[12] = tmp22; product[13] = tmp32; product[14] = tmp42; product[15] = tmp03; product[16] = tmp13; product[17] = tmp23; product[18] = tmp33; product[19] = tmp43; product[20] = tmp04; product[21] = tmp14; product[22] = tmp24; product[23] = tmp34; product[24] = tmp44; }; static final PrimitiveMultiplyLeft PRIMITIVE_6XN = (product, left, complexity, right) -> { final int tmpRowDim = 6; final int tmpColDim = right.length / complexity; for (int j = 0; j < tmpColDim; j++) { double tmp0J = PrimitiveMath.ZERO; double tmp1J = PrimitiveMath.ZERO; double tmp2J = PrimitiveMath.ZERO; double tmp3J = PrimitiveMath.ZERO; double tmp4J = PrimitiveMath.ZERO; double tmp5J = PrimitiveMath.ZERO; int tmpIndex = 0; for (int c = 0; c < complexity; c++) { final double tmpRightCJ = right[c + (j * complexity)]; tmp0J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp1J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp2J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp3J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp4J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp5J += left.doubleValue(tmpIndex++) * tmpRightCJ; } product[tmpIndex = j * tmpRowDim] = tmp0J; product[++tmpIndex] = tmp1J; product[++tmpIndex] = tmp2J; product[++tmpIndex] = tmp3J; product[++tmpIndex] = tmp4J; product[++tmpIndex] = tmp5J; } }; static final PrimitiveMultiplyLeft PRIMITIVE_7XN = (product, left, complexity, right) -> { final int tmpRowDim = 7; final int tmpColDim = right.length / complexity; for (int j = 0; j < tmpColDim; j++) { double tmp0J = PrimitiveMath.ZERO; double tmp1J = PrimitiveMath.ZERO; double tmp2J = PrimitiveMath.ZERO; double tmp3J = PrimitiveMath.ZERO; double tmp4J = PrimitiveMath.ZERO; double tmp5J = PrimitiveMath.ZERO; double tmp6J = PrimitiveMath.ZERO; int tmpIndex = 0; for (int c = 0; c < complexity; c++) { final double tmpRightCJ = right[c + (j * complexity)]; tmp0J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp1J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp2J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp3J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp4J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp5J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp6J += left.doubleValue(tmpIndex++) * tmpRightCJ; } product[tmpIndex = j * tmpRowDim] = tmp0J; product[++tmpIndex] = tmp1J; product[++tmpIndex] = tmp2J; product[++tmpIndex] = tmp3J; product[++tmpIndex] = tmp4J; product[++tmpIndex] = tmp5J; product[++tmpIndex] = tmp6J; } }; static final PrimitiveMultiplyLeft PRIMITIVE_8XN = (product, left, complexity, right) -> { final int tmpRowDim = 8; final int tmpColDim = right.length / complexity; for (int j = 0; j < tmpColDim; j++) { double tmp0J = PrimitiveMath.ZERO; double tmp1J = PrimitiveMath.ZERO; double tmp2J = PrimitiveMath.ZERO; double tmp3J = PrimitiveMath.ZERO; double tmp4J = PrimitiveMath.ZERO; double tmp5J = PrimitiveMath.ZERO; double tmp6J = PrimitiveMath.ZERO; double tmp7J = PrimitiveMath.ZERO; int tmpIndex = 0; for (int c = 0; c < complexity; c++) { final double tmpRightCJ = right[c + (j * complexity)]; tmp0J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp1J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp2J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp3J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp4J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp5J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp6J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp7J += left.doubleValue(tmpIndex++) * tmpRightCJ; } product[tmpIndex = j * tmpRowDim] = tmp0J; product[++tmpIndex] = tmp1J; product[++tmpIndex] = tmp2J; product[++tmpIndex] = tmp3J; product[++tmpIndex] = tmp4J; product[++tmpIndex] = tmp5J; product[++tmpIndex] = tmp6J; product[++tmpIndex] = tmp7J; } }; static final PrimitiveMultiplyLeft PRIMITIVE_9XN = (product, left, complexity, right) -> { final int tmpRowDim = 9; final int tmpColDim = right.length / complexity; for (int j = 0; j < tmpColDim; j++) { double tmp0J = PrimitiveMath.ZERO; double tmp1J = PrimitiveMath.ZERO; double tmp2J = PrimitiveMath.ZERO; double tmp3J = PrimitiveMath.ZERO; double tmp4J = PrimitiveMath.ZERO; double tmp5J = PrimitiveMath.ZERO; double tmp6J = PrimitiveMath.ZERO; double tmp7J = PrimitiveMath.ZERO; double tmp8J = PrimitiveMath.ZERO; int tmpIndex = 0; for (int c = 0; c < complexity; c++) { final double tmpRightCJ = right[c + (j * complexity)]; tmp0J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp1J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp2J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp3J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp4J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp5J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp6J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp7J += left.doubleValue(tmpIndex++) * tmpRightCJ; tmp8J += left.doubleValue(tmpIndex++) * tmpRightCJ; } product[tmpIndex = j * tmpRowDim] = tmp0J; product[++tmpIndex] = tmp1J; product[++tmpIndex] = tmp2J; product[++tmpIndex] = tmp3J; product[++tmpIndex] = tmp4J; product[++tmpIndex] = tmp5J; product[++tmpIndex] = tmp6J; product[++tmpIndex] = tmp7J; product[++tmpIndex] = tmp8J; } }; static final PrimitiveMultiplyLeft PRIMITIVE_MT = (product, left, complexity, right) -> { final DivideAndConquer tmpConquerer = new DivideAndConquer() { @Override public void conquer(final int first, final int limit) { MultiplyLeft.invoke(product, first, limit, left, complexity, right); } }; tmpConquerer.invoke(0, (int) (left.count() / complexity), THRESHOLD); }; public static BigMultiplyLeft getBig(final long rows, final long columns) { if (rows > THRESHOLD) { return BIG_MT; } else { return BIG; } } public static ComplexMultiplyLeft getComplex(final long rows, final long columns) { if (rows > THRESHOLD) { return COMPLEX_MT; } else { return COMPLEX; } } public static PrimitiveMultiplyLeft getPrimitive(final long rows, final long columns) { if (rows > THRESHOLD) { return PRIMITIVE_MT; } else if (rows == 10) { return PRIMITIVE_0XN; } else if (rows == 9) { return PRIMITIVE_9XN; } else if (rows == 8) { return PRIMITIVE_8XN; } else if (rows == 7) { return PRIMITIVE_7XN; } else if (rows == 6) { return PRIMITIVE_6XN; } else if ((rows == 5) && (columns == 5)) { return PRIMITIVE_5X5; } else if ((rows == 4) && (columns == 4)) { return PRIMITIVE_4X4; } else if ((rows == 3) && (columns == 3)) { return PRIMITIVE_3X3; } else if ((rows == 2) && (columns == 2)) { return PRIMITIVE_2X2; } else if (rows == 1) { return PRIMITIVE_1XN; } else { return PRIMITIVE; } } static void invoke(final BigDecimal[] product, final int firstRow, final int rowLimit, final Access1D<BigDecimal> left, final int complexity, final BigDecimal[] right) { final int tmpColDim = right.length / complexity; final int tmpRowDim = product.length / tmpColDim; final BigDecimal[] tmpLeftRow = new BigDecimal[complexity]; for (int i = firstRow; i < rowLimit; i++) { for (int c = 0; c < complexity; c++) { tmpLeftRow[c] = left.get(i + (c * tmpRowDim)); } for (int j = 0; j < tmpColDim; j++) { product[i + (j * tmpRowDim)] = DOT.invoke(tmpLeftRow, 0, right, j * complexity, 0, complexity); } } } static void invoke(final ComplexNumber[] product, final int firstRow, final int rowLimit, final Access1D<ComplexNumber> left, final int complexity, final ComplexNumber[] right) { final int tmpColDim = right.length / complexity; final int tmpRowDim = product.length / tmpColDim; final ComplexNumber[] tmpLeftRow = new ComplexNumber[complexity]; for (int i = firstRow; i < rowLimit; i++) { for (int c = 0; c < complexity; c++) { tmpLeftRow[c] = left.get(i + (c * tmpRowDim)); } for (int j = 0; j < tmpColDim; j++) { product[i + (j * tmpRowDim)] = DOT.invoke(tmpLeftRow, 0, right, j * complexity, 0, complexity); } } } static void invoke(final double[] product, final int firstRow, final int rowLimit, final Access1D<?> left, final int complexity, final double[] right) { final int tmpColDim = right.length / complexity; final int tmpRowDim = product.length / tmpColDim; final double[] tmpLeftRow = new double[complexity]; for (int i = firstRow; i < rowLimit; i++) { final int tmpFirstInRow = MatrixUtils.firstInRow(left, i, 0); final int tmpLimitOfRow = MatrixUtils.limitOfRow(left, i, complexity); for (int c = tmpFirstInRow; c < tmpLimitOfRow; c++) { tmpLeftRow[c] = left.doubleValue(i + (c * tmpRowDim)); } for (int j = 0; j < tmpColDim; j++) { product[i + (j * tmpRowDim)] = DOT.invoke(tmpLeftRow, 0, right, j * complexity, tmpFirstInRow, tmpLimitOfRow); } } } private MultiplyLeft() { super(); } @Override public int threshold() { return THRESHOLD; } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableList; import com.google.schemaorg.JsonLdContext; import com.google.schemaorg.SchemaOrgType; import com.google.schemaorg.core.datatype.Date; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.PopularityScoreSpecification; import javax.annotation.Nullable; /** Interface of <a href="http://schema.org/Corporation}">http://schema.org/Corporation}</a>. */ public interface Corporation extends Organization { /** * Builder interface of <a * href="http://schema.org/Corporation}">http://schema.org/Corporation}</a>. */ public interface Builder extends Organization.Builder { @Override Builder addJsonLdContext(@Nullable JsonLdContext context); @Override Builder addJsonLdContext(@Nullable JsonLdContext.Builder context); @Override Builder setJsonLdId(@Nullable String value); @Override Builder setJsonLdReverse(String property, Thing obj); @Override Builder setJsonLdReverse(String property, Thing.Builder builder); /** Add a value to property additionalType. */ Builder addAdditionalType(URL value); /** Add a value to property additionalType. */ Builder addAdditionalType(String value); /** Add a value to property address. */ Builder addAddress(PostalAddress value); /** Add a value to property address. */ Builder addAddress(PostalAddress.Builder value); /** Add a value to property address. */ Builder addAddress(Text value); /** Add a value to property address. */ Builder addAddress(String value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating.Builder value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(String value); /** Add a value to property alternateName. */ Builder addAlternateName(Text value); /** Add a value to property alternateName. */ Builder addAlternateName(String value); /** Add a value to property alumni. */ Builder addAlumni(Person value); /** Add a value to property alumni. */ Builder addAlumni(Person.Builder value); /** Add a value to property alumni. */ Builder addAlumni(String value); /** Add a value to property areaServed. */ Builder addAreaServed(AdministrativeArea value); /** Add a value to property areaServed. */ Builder addAreaServed(AdministrativeArea.Builder value); /** Add a value to property areaServed. */ Builder addAreaServed(GeoShape value); /** Add a value to property areaServed. */ Builder addAreaServed(GeoShape.Builder value); /** Add a value to property areaServed. */ Builder addAreaServed(Place value); /** Add a value to property areaServed. */ Builder addAreaServed(Place.Builder value); /** Add a value to property areaServed. */ Builder addAreaServed(Text value); /** Add a value to property areaServed. */ Builder addAreaServed(String value); /** Add a value to property award. */ Builder addAward(Text value); /** Add a value to property award. */ Builder addAward(String value); /** Add a value to property awards. */ Builder addAwards(Text value); /** Add a value to property awards. */ Builder addAwards(String value); /** Add a value to property brand. */ Builder addBrand(Brand value); /** Add a value to property brand. */ Builder addBrand(Brand.Builder value); /** Add a value to property brand. */ Builder addBrand(Organization value); /** Add a value to property brand. */ Builder addBrand(Organization.Builder value); /** Add a value to property brand. */ Builder addBrand(String value); /** Add a value to property contactPoint. */ Builder addContactPoint(ContactPoint value); /** Add a value to property contactPoint. */ Builder addContactPoint(ContactPoint.Builder value); /** Add a value to property contactPoint. */ Builder addContactPoint(String value); /** Add a value to property contactPoints. */ Builder addContactPoints(ContactPoint value); /** Add a value to property contactPoints. */ Builder addContactPoints(ContactPoint.Builder value); /** Add a value to property contactPoints. */ Builder addContactPoints(String value); /** Add a value to property department. */ Builder addDepartment(Organization value); /** Add a value to property department. */ Builder addDepartment(Organization.Builder value); /** Add a value to property department. */ Builder addDepartment(String value); /** Add a value to property description. */ Builder addDescription(Text value); /** Add a value to property description. */ Builder addDescription(String value); /** Add a value to property dissolutionDate. */ Builder addDissolutionDate(Date value); /** Add a value to property dissolutionDate. */ Builder addDissolutionDate(String value); /** Add a value to property duns. */ Builder addDuns(Text value); /** Add a value to property duns. */ Builder addDuns(String value); /** Add a value to property email. */ Builder addEmail(Text value); /** Add a value to property email. */ Builder addEmail(String value); /** Add a value to property employee. */ Builder addEmployee(Person value); /** Add a value to property employee. */ Builder addEmployee(Person.Builder value); /** Add a value to property employee. */ Builder addEmployee(String value); /** Add a value to property employees. */ Builder addEmployees(Person value); /** Add a value to property employees. */ Builder addEmployees(Person.Builder value); /** Add a value to property employees. */ Builder addEmployees(String value); /** Add a value to property event. */ Builder addEvent(Event value); /** Add a value to property event. */ Builder addEvent(Event.Builder value); /** Add a value to property event. */ Builder addEvent(String value); /** Add a value to property events. */ Builder addEvents(Event value); /** Add a value to property events. */ Builder addEvents(Event.Builder value); /** Add a value to property events. */ Builder addEvents(String value); /** Add a value to property faxNumber. */ Builder addFaxNumber(Text value); /** Add a value to property faxNumber. */ Builder addFaxNumber(String value); /** Add a value to property founder. */ Builder addFounder(Person value); /** Add a value to property founder. */ Builder addFounder(Person.Builder value); /** Add a value to property founder. */ Builder addFounder(String value); /** Add a value to property founders. */ Builder addFounders(Person value); /** Add a value to property founders. */ Builder addFounders(Person.Builder value); /** Add a value to property founders. */ Builder addFounders(String value); /** Add a value to property foundingDate. */ Builder addFoundingDate(Date value); /** Add a value to property foundingDate. */ Builder addFoundingDate(String value); /** Add a value to property foundingLocation. */ Builder addFoundingLocation(Place value); /** Add a value to property foundingLocation. */ Builder addFoundingLocation(Place.Builder value); /** Add a value to property foundingLocation. */ Builder addFoundingLocation(String value); /** Add a value to property globalLocationNumber. */ Builder addGlobalLocationNumber(Text value); /** Add a value to property globalLocationNumber. */ Builder addGlobalLocationNumber(String value); /** Add a value to property hasOfferCatalog. */ Builder addHasOfferCatalog(OfferCatalog value); /** Add a value to property hasOfferCatalog. */ Builder addHasOfferCatalog(OfferCatalog.Builder value); /** Add a value to property hasOfferCatalog. */ Builder addHasOfferCatalog(String value); /** Add a value to property hasPOS. */ Builder addHasPOS(Place value); /** Add a value to property hasPOS. */ Builder addHasPOS(Place.Builder value); /** Add a value to property hasPOS. */ Builder addHasPOS(String value); /** Add a value to property image. */ Builder addImage(ImageObject value); /** Add a value to property image. */ Builder addImage(ImageObject.Builder value); /** Add a value to property image. */ Builder addImage(URL value); /** Add a value to property image. */ Builder addImage(String value); /** Add a value to property isicV4. */ Builder addIsicV4(Text value); /** Add a value to property isicV4. */ Builder addIsicV4(String value); /** Add a value to property legalName. */ Builder addLegalName(Text value); /** Add a value to property legalName. */ Builder addLegalName(String value); /** Add a value to property location. */ Builder addLocation(Place value); /** Add a value to property location. */ Builder addLocation(Place.Builder value); /** Add a value to property location. */ Builder addLocation(PostalAddress value); /** Add a value to property location. */ Builder addLocation(PostalAddress.Builder value); /** Add a value to property location. */ Builder addLocation(Text value); /** Add a value to property location. */ Builder addLocation(String value); /** Add a value to property logo. */ Builder addLogo(ImageObject value); /** Add a value to property logo. */ Builder addLogo(ImageObject.Builder value); /** Add a value to property logo. */ Builder addLogo(URL value); /** Add a value to property logo. */ Builder addLogo(String value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork.Builder value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(URL value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(String value); /** Add a value to property makesOffer. */ Builder addMakesOffer(Offer value); /** Add a value to property makesOffer. */ Builder addMakesOffer(Offer.Builder value); /** Add a value to property makesOffer. */ Builder addMakesOffer(String value); /** Add a value to property member. */ Builder addMember(Organization value); /** Add a value to property member. */ Builder addMember(Organization.Builder value); /** Add a value to property member. */ Builder addMember(Person value); /** Add a value to property member. */ Builder addMember(Person.Builder value); /** Add a value to property member. */ Builder addMember(String value); /** Add a value to property memberOf. */ Builder addMemberOf(Organization value); /** Add a value to property memberOf. */ Builder addMemberOf(Organization.Builder value); /** Add a value to property memberOf. */ Builder addMemberOf(ProgramMembership value); /** Add a value to property memberOf. */ Builder addMemberOf(ProgramMembership.Builder value); /** Add a value to property memberOf. */ Builder addMemberOf(String value); /** Add a value to property members. */ Builder addMembers(Organization value); /** Add a value to property members. */ Builder addMembers(Organization.Builder value); /** Add a value to property members. */ Builder addMembers(Person value); /** Add a value to property members. */ Builder addMembers(Person.Builder value); /** Add a value to property members. */ Builder addMembers(String value); /** Add a value to property naics. */ Builder addNaics(Text value); /** Add a value to property naics. */ Builder addNaics(String value); /** Add a value to property name. */ Builder addName(Text value); /** Add a value to property name. */ Builder addName(String value); /** Add a value to property numberOfEmployees. */ Builder addNumberOfEmployees(QuantitativeValue value); /** Add a value to property numberOfEmployees. */ Builder addNumberOfEmployees(QuantitativeValue.Builder value); /** Add a value to property numberOfEmployees. */ Builder addNumberOfEmployees(String value); /** Add a value to property owns. */ Builder addOwns(OwnershipInfo value); /** Add a value to property owns. */ Builder addOwns(OwnershipInfo.Builder value); /** Add a value to property owns. */ Builder addOwns(Product value); /** Add a value to property owns. */ Builder addOwns(Product.Builder value); /** Add a value to property owns. */ Builder addOwns(String value); /** Add a value to property parentOrganization. */ Builder addParentOrganization(Organization value); /** Add a value to property parentOrganization. */ Builder addParentOrganization(Organization.Builder value); /** Add a value to property parentOrganization. */ Builder addParentOrganization(String value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action.Builder value); /** Add a value to property potentialAction. */ Builder addPotentialAction(String value); /** Add a value to property review. */ Builder addReview(Review value); /** Add a value to property review. */ Builder addReview(Review.Builder value); /** Add a value to property review. */ Builder addReview(String value); /** Add a value to property reviews. */ Builder addReviews(Review value); /** Add a value to property reviews. */ Builder addReviews(Review.Builder value); /** Add a value to property reviews. */ Builder addReviews(String value); /** Add a value to property sameAs. */ Builder addSameAs(URL value); /** Add a value to property sameAs. */ Builder addSameAs(String value); /** Add a value to property seeks. */ Builder addSeeks(Demand value); /** Add a value to property seeks. */ Builder addSeeks(Demand.Builder value); /** Add a value to property seeks. */ Builder addSeeks(String value); /** Add a value to property serviceArea. */ Builder addServiceArea(AdministrativeArea value); /** Add a value to property serviceArea. */ Builder addServiceArea(AdministrativeArea.Builder value); /** Add a value to property serviceArea. */ Builder addServiceArea(GeoShape value); /** Add a value to property serviceArea. */ Builder addServiceArea(GeoShape.Builder value); /** Add a value to property serviceArea. */ Builder addServiceArea(Place value); /** Add a value to property serviceArea. */ Builder addServiceArea(Place.Builder value); /** Add a value to property serviceArea. */ Builder addServiceArea(String value); /** Add a value to property subOrganization. */ Builder addSubOrganization(Organization value); /** Add a value to property subOrganization. */ Builder addSubOrganization(Organization.Builder value); /** Add a value to property subOrganization. */ Builder addSubOrganization(String value); /** Add a value to property taxID. */ Builder addTaxID(Text value); /** Add a value to property taxID. */ Builder addTaxID(String value); /** Add a value to property telephone. */ Builder addTelephone(Text value); /** Add a value to property telephone. */ Builder addTelephone(String value); /** Add a value to property tickerSymbol. */ Builder addTickerSymbol(Text value); /** Add a value to property tickerSymbol. */ Builder addTickerSymbol(String value); /** Add a value to property url. */ Builder addUrl(URL value); /** Add a value to property url. */ Builder addUrl(String value); /** Add a value to property vatID. */ Builder addVatID(Text value); /** Add a value to property vatID. */ Builder addVatID(String value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article.Builder value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(String value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification.Builder value); /** Add a value to property popularityScore. */ Builder addPopularityScore(String value); /** * Add a value to property. * * @param name The property name. * @param value The value of the property. */ Builder addProperty(String name, SchemaOrgType value); /** * Add a value to property. * * @param name The property name. * @param builder The schema.org object builder for the property value. */ Builder addProperty(String name, Thing.Builder builder); /** * Add a value to property. * * @param name The property name. * @param value The string value of the property. */ Builder addProperty(String name, String value); /** Build a {@link Corporation} object. */ Corporation build(); } /** * Returns the value list of property tickerSymbol. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getTickerSymbolList(); }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.ntp; import android.app.Activity; import android.test.InstrumentationTestCase; import android.test.UiThreadTest; import android.test.suitebuilder.annotation.SmallTest; import android.view.View; import org.chromium.chrome.browser.NativePage; import org.chromium.chrome.browser.Tab; import org.chromium.chrome.browser.UrlConstants; import org.chromium.chrome.browser.ntp.NativePageFactory.NativePageType; import org.chromium.chrome.browser.tabmodel.TabModelSelector; /** * Tests public methods in NativePageFactory. */ public class NativePageFactoryTest extends InstrumentationTestCase { private static class MockNativePage implements NativePage { public final NativePageType type; public int updateForUrlCalls; public MockNativePage(NativePageType type) { this.type = type; } @Override public void updateForUrl(String url) { updateForUrlCalls++; } @Override public String getUrl() { return null; } @Override public String getHost() { switch (type) { case NTP: return UrlConstants.NTP_HOST; case BOOKMARKS: return UrlConstants.BOOKMARKS_HOST; case RECENT_TABS: return UrlConstants.RECENT_TABS_HOST; default: fail("Unexpected NativePageType: " + type); return null; } } @Override public void destroy() {} @Override public String getTitle() { return null; } @Override public int getBackgroundColor() { return 0; } @Override public View getView() { return null; } } private static class MockNativePageBuilder extends NativePageFactory.NativePageBuilder { @Override public NativePage buildNewTabPage(Activity activity, Tab tab, TabModelSelector tabModelSelector) { return new MockNativePage(NativePageType.NTP); } @Override public NativePage buildBookmarksPage(Activity activity, Tab tab, TabModelSelector tabModelSelector) { return new MockNativePage(NativePageType.BOOKMARKS); } @Override public NativePage buildRecentTabsPage(Activity activity, Tab tab) { return new MockNativePage(NativePageType.RECENT_TABS); } } private static class UrlCombo { public String url; public NativePageType expectedType; public UrlCombo(String url, NativePageType expectedType) { this.url = url; this.expectedType = expectedType; } } private static final UrlCombo[] VALID_URLS = { new UrlCombo("chrome-native://newtab", NativePageType.NTP), new UrlCombo("chrome-native://newtab/", NativePageType.NTP), new UrlCombo("chrome-native://bookmarks", NativePageType.BOOKMARKS), new UrlCombo("chrome-native://bookmarks/", NativePageType.BOOKMARKS), new UrlCombo("chrome-native://bookmarks/#245", NativePageType.BOOKMARKS), new UrlCombo("chrome-native://recent-tabs", NativePageType.RECENT_TABS), new UrlCombo("chrome-native://recent-tabs/", NativePageType.RECENT_TABS), }; private static final String[] INVALID_URLS = { null, "", "newtab", "newtab@google.com:80", "/newtab", "://newtab", "chrome://", "chrome://newtab", "chrome://newtab#bookmarks", "chrome://newtab/#open_tabs", "chrome://recent-tabs", "chrome://most_visited", "chrome-native://", "chrome-native://newtablet", "chrome-native://bookmarks-inc", "chrome-native://recent_tabs", "chrome-native://recent-tabswitcher", "chrome-native://most_visited", "chrome-native://astronaut", "chrome-internal://newtab", "french-fries://newtab", "http://bookmarks", "https://recent-tabs", "newtab://recent-tabs", "recent-tabs bookmarks", }; private boolean isValidInIncognito(UrlCombo urlCombo) { return urlCombo.expectedType != NativePageType.RECENT_TABS; } /** * Ensures that NativePageFactory.isNativePageUrl() returns true for native page URLs. */ private void runTestPostiveIsNativePageUrl() { for (UrlCombo urlCombo : VALID_URLS) { String url = urlCombo.url; assertTrue(url, NativePageFactory.isNativePageUrl(url, false)); if (isValidInIncognito(urlCombo)) { assertTrue(url, NativePageFactory.isNativePageUrl(url, true)); } } } /** * Ensures that NativePageFactory.isNativePageUrl() returns false for URLs that don't * correspond to a native page. */ private void runTestNegativeIsNativePageUrl() { for (String invalidUrl : INVALID_URLS) { assertFalse(invalidUrl, NativePageFactory.isNativePageUrl(invalidUrl, false)); assertFalse(invalidUrl, NativePageFactory.isNativePageUrl(invalidUrl, true)); } } /** * Ensures that NativePageFactory.createNativePageForURL() returns a native page of the right * type and reuses the candidate page if it's the right type. */ private void runTestCreateNativePage() { NativePageType[] candidateTypes = new NativePageType[] { NativePageType.NONE, NativePageType.NTP, NativePageType.BOOKMARKS, NativePageType.RECENT_TABS }; for (boolean isIncognito : new boolean[] {true, false}) { for (UrlCombo urlCombo : VALID_URLS) { if (isIncognito && !isValidInIncognito(urlCombo)) continue; for (NativePageType candidateType : candidateTypes) { MockNativePage candidate = candidateType == NativePageType.NONE ? null : new MockNativePage(candidateType); MockNativePage page = (MockNativePage) NativePageFactory.createNativePageForURL( urlCombo.url, candidate, null, null, null, isIncognito); String debugMessage = String.format( "Failed test case: isIncognito=%s, urlCombo={%s,%s}, candidateType=%s", isIncognito, urlCombo.url, urlCombo.expectedType, candidateType); assertNotNull(debugMessage, page); assertEquals(debugMessage, 1, page.updateForUrlCalls); assertEquals(debugMessage, urlCombo.expectedType, page.type); if (candidateType == urlCombo.expectedType) { assertSame(debugMessage, candidate, page); } else { assertNotSame(debugMessage, candidate, page); } } } } } /** * Ensures that NativePageFactory.createNativePageForURL() returns null for URLs that don't * correspond to a native page. */ private void runTestCreateNativePageWithInvalidUrl() { for (UrlCombo urlCombo : VALID_URLS) { if (!isValidInIncognito(urlCombo)) { assertNull(urlCombo.url, NativePageFactory.createNativePageForURL(urlCombo.url, null, null, null, null, true)); } } for (boolean isIncognito : new boolean[] {true, false}) { for (String invalidUrl : INVALID_URLS) { assertNull(invalidUrl, NativePageFactory.createNativePageForURL(invalidUrl, null, null, null, null, isIncognito)); } } } /** * Runs all the runTest* methods defined above. */ @SmallTest @UiThreadTest public void testNativePageFactory() { NativePageFactory.setNativePageBuilderForTesting(new MockNativePageBuilder()); runTestPostiveIsNativePageUrl(); runTestNegativeIsNativePageUrl(); runTestCreateNativePage(); runTestCreateNativePageWithInvalidUrl(); } }
package cmput301.cauni.easydo.dal; import android.app.LauncherActivity; import android.content.Context; import android.content.SharedPreferences; import android.text.TextUtils; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Dictionary; import java.util.HashMap; import java.util.Iterator; import java.util.List; import cmput301.cauni.easydo.R; import cmput301.cauni.easydo.bll.TodoItem; import cmput301.cauni.easydo.view.enums.TodoFiles; import cmput301.cauni.easydo.view.enums.TodoOption; public class TodoData { private static class TodoDataSingleton { private static List<TodoItem> Active = null; private static List<TodoItem> Archived = null; //TODO: implement lock system public static List<TodoItem> getActive(Context c) { if (Active == null) { Active = loadTodos(c); } return Active; } public static List<TodoItem> getArchived(Context c) { if (Archived == null) Archived = loadArchivedTodos(c); return Archived; } public static void reset() { Active = null; Archived = null; } } //Update seed count and return it public final static int getTodoIdSeed(Context c) { SharedPreferences sharedPref = c.getSharedPreferences( c.getString(R.string.todo_seed_pref_key), Context.MODE_PRIVATE); int newSeed = sharedPref.getInt(c.getString(R.string.todo_seed_pref_key), 0) + 1; SharedPreferences.Editor editor = sharedPref.edit(); editor.putInt(c.getString(R.string.todo_seed_pref_key), newSeed); editor.commit(); return newSeed; } public final static void saveUserText(Context c, String text) { SharedPreferences sharedPref = c.getSharedPreferences( c.getString(R.string.todo_user_text_key), Context.MODE_PRIVATE); SharedPreferences.Editor editor = sharedPref.edit(); editor.putString(c.getString(R.string.todo_user_text_key), text); editor.commit(); } public final static String recoverUserText(Context c) { SharedPreferences sharedPref = c.getSharedPreferences( c.getString(R.string.todo_user_text_key), Context.MODE_PRIVATE); return sharedPref.getString(c.getString(R.string.todo_user_text_key), ""); } public final static void SaveTodos(Context c, List<TodoItem> obj) { Gson gson = new Gson(); DataParser.SaveJson(c, TodoFiles.TODO, gson.toJson(Sorter(obj))); TodoDataSingleton.reset(); } public final static void SaveArchivedTodos(Context c, List<TodoItem> obj) { Gson gson = new Gson(); DataParser.SaveJson(c, TodoFiles.ARCHIVEDTODO, gson.toJson(Sorter(obj))); TodoDataSingleton.reset(); } private final static List<TodoItem> loadTodos(Context c) { String aux = DataParser.LoadJson(c, TodoFiles.TODO); if (TextUtils.isEmpty(aux)) { return new ArrayList<TodoItem>(); } else { Gson gson = new Gson(); Type listType = new TypeToken<List<TodoItem>>(){}.getType(); return Sorter((List<TodoItem>) gson.fromJson(aux, listType)); } } public final static List<TodoItem> LoadTodos(Context c) { return TodoDataSingleton.getActive(c); } private final static List<TodoItem> loadArchivedTodos(Context c) { String aux = DataParser.LoadJson(c, TodoFiles.ARCHIVEDTODO); if (TextUtils.isEmpty(aux)) { return new ArrayList<TodoItem>(); } else { Gson gson = new Gson(); Type listType = new TypeToken<List<TodoItem>>() { }.getType(); return Sorter((List<TodoItem>) gson.fromJson(aux, listType)); } } public final static List<TodoItem> LoadArchivedTodos(Context c) { return TodoDataSingleton.getArchived(c); } protected final static List<TodoItem> Sorter(List<TodoItem> lst) { //First sort the list by date of creation Collections.sort(lst, new Comparator<TodoItem>() { @Override public int compare(TodoItem obj1, TodoItem obj2) { return obj2.getDateCreated().compareTo(obj1.getDateCreated()); } }); return lst; } public final static List<TodoItem> LoadAllTodos(Context c) { List<TodoItem> aux = TodoDataSingleton.getActive(c); aux.addAll(TodoDataSingleton.getArchived(c)); TodoDataSingleton.reset(); return aux; } public final static int HandleArchive(Context c, List<Integer> lst, boolean archive) { try { List<TodoItem> objListSource = archive ? LoadTodos(c) : LoadArchivedTodos(c); List<TodoItem> objListDestination = archive ? LoadArchivedTodos(c) : LoadTodos(c); for (Iterator<TodoItem> i = objListSource.iterator(); i.hasNext();) { TodoItem obj = i.next(); if (lst.contains(obj.getId())) { obj.Archived = archive; obj.setSelected(false); objListDestination.add(obj); i.remove(); } } SaveTodos(c, archive ? objListSource : objListDestination); SaveArchivedTodos(c, archive ? objListDestination : objListSource); } catch (Exception e) { return 0; } return 1; } public final static int HandleCompleted(Context c, List<Integer> lst, boolean completed) { try { List<TodoItem> objList = LoadTodos(c); int lstSize = objList.size(); for (Integer i : lst) { (objList.get(i)).Completed = completed; } SaveTodos(c, objList); } catch (Exception e) { return 0; } return 1; } public final static int HandleDelete(Context c, TodoOption opt, List<Integer> lst) { try { List<TodoItem> objList = opt == TodoOption.ACTIVE ? LoadTodos(c) : LoadArchivedTodos(c); int lstSize = objList.size(); for (Iterator<TodoItem> i = objList.iterator(); i.hasNext();) { TodoItem obj = i.next(); if (lst.contains(obj.getId())) { i.remove(); } } if (opt == TodoOption.ACTIVE) SaveTodos(c,objList); else SaveArchivedTodos(c,objList); return 1; } catch (Exception e) { return 0; } } public final static List<TodoItem> getTodosFromList(Context c, List<Integer> lst, TodoOption opt) { List<TodoItem> objList = opt == TodoOption.ACTIVE ? LoadTodos(c) : LoadArchivedTodos(c); for (Iterator<TodoItem> i = objList.iterator(); i.hasNext();) { TodoItem obj = i.next(); if (!lst.contains(obj.getId())) { i.remove(); } } TodoDataSingleton.reset(); return objList; } public final static HashMap<String, Integer> getSummary(Context c) { List<TodoItem> lstActive = LoadTodos(c); List<TodoItem> lstArchived = LoadArchivedTodos(c); HashMap<String, Integer> aux = new HashMap<String, Integer>(); int lstActiveSize = lstActive.size(); int lstArchivedSize = lstArchived.size(); int checkedActives = 0; int uncheckedActives = 0; int checkedArchiveds = 0; int uncheckedArchiveds = 0; for (int i = 0; i < lstActiveSize; i++) { TodoItem obj = lstActive.get(i); if(obj.isCompleted()) checkedActives++; else uncheckedActives++; } for (int i = 0; i < lstArchivedSize; i++) { TodoItem obj = lstArchived.get(i); if(obj.isCompleted()) checkedArchiveds++; else uncheckedArchiveds++; } aux.put("summary_total_all", (lstActiveSize + lstArchivedSize)); aux.put("summary_total", (lstActiveSize)); aux.put("summary_total_checked", (checkedActives)); aux.put("summary_total_unchecked", (uncheckedActives)); aux.put("summary_total_archived", (lstArchivedSize)); aux.put("summary_total_archived_checked", (checkedArchiveds)); aux.put("summary_total_archived_unchecked", (uncheckedArchiveds)); return aux; } }
/** * * @author degtyarjov * @version $Id$ */ package integration; import com.haulmont.yarg.formatters.ReportFormatter; import com.haulmont.yarg.formatters.factory.DefaultFormatterFactory; import com.haulmont.yarg.formatters.factory.FormatterFactoryInput; import com.haulmont.yarg.structure.ReportOutputType; import com.haulmont.yarg.structure.BandData; import com.haulmont.yarg.structure.BandOrientation; import com.haulmont.yarg.structure.impl.ReportTemplateImpl; import junit.framework.Assert; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.poi.hssf.usermodel.HSSFCell; import org.apache.poi.hssf.usermodel.HSSFRow; import org.apache.poi.hssf.usermodel.HSSFSheet; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.junit.Test; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.*; public class XlsIntegrationTest { @Test public void testFormats() throws Exception { BandData root = new BandData("Root", null, BandOrientation.HORIZONTAL); HashMap<String, Object> rootData = new HashMap<String, Object>(); root.setData(rootData); BandData band1 = new BandData("Band1", root, BandOrientation.HORIZONTAL); band1.addData("date", new SimpleDateFormat("dd-MM-yyyy").parse("12-04-1961")); root.addChild(band1); FileOutputStream outputStream = new FileOutputStream("./result/integration/result-formats.xls"); ReportFormatter formatter = new DefaultFormatterFactory().createFormatter(new FormatterFactoryInput("xls", root, new ReportTemplateImpl("", "./modules/core/test/integration/test-formats.xls", "./modules/core/test/integration/test-formats.xls", ReportOutputType.xls), outputStream)); formatter.renderDocument(); IOUtils.closeQuietly(outputStream); compareFiles("./modules/core/test/integration/etalon-formats.xls", "./result/integration/result-formats.xls"); } @Test public void testFormulas() throws Exception { BandData root = createRootBandForFormulas(); FileOutputStream outputStream = new FileOutputStream("./result/integration/result-with-formulas.xls"); ReportFormatter formatter = new DefaultFormatterFactory().createFormatter(new FormatterFactoryInput("xls", root, new ReportTemplateImpl("", "smoketest/test.xls", "./modules/core/test/integration/test-with-formulas.xls", ReportOutputType.xls), outputStream)); formatter.renderDocument(); IOUtils.closeQuietly(outputStream); compareFiles("./modules/core/test/integration/etalon-with-formulas.xls", "./result/integration/result-with-formulas.xls"); } @Test public void testAggregations() throws Exception { BandData root = createRootBandForAggregation(); FileOutputStream outputStream = new FileOutputStream("./result/integration/result-with-aggregation.xls"); ReportFormatter formatter = new DefaultFormatterFactory().createFormatter(new FormatterFactoryInput("xls", root, new ReportTemplateImpl("", "smoketest/test.xls", "./modules/core/test/integration/test-with-aggregation.xls", ReportOutputType.xls), outputStream)); formatter.renderDocument(); IOUtils.closeQuietly(outputStream); compareFiles("./modules/core/test/integration/etalon-with-aggregation.xls", "./result/integration/result-with-aggregation.xls"); } @Test public void testAggregationsEmpty() throws Exception { BandData root = new BandData("Root", null, BandOrientation.HORIZONTAL); HashMap<String, Object> rootData = new HashMap<String, Object>(); root.setData(rootData); FileOutputStream outputStream = new FileOutputStream("./result/integration/result-empty.xls"); ReportFormatter formatter = new DefaultFormatterFactory().createFormatter(new FormatterFactoryInput("xls", root, new ReportTemplateImpl("", "smoketest/test.xls", "./modules/core/test/integration/test-with-aggregation.xls", ReportOutputType.xls), outputStream)); formatter.renderDocument(); IOUtils.closeQuietly(outputStream); compareFiles("./modules/core/test/integration/etalon-empty.xls", "./result/integration/result-empty.xls"); } private void compareFiles(String etalonFile, String resultFile) throws IOException { HSSFWorkbook result = new HSSFWorkbook(FileUtils.openInputStream(new File(etalonFile))); HSSFWorkbook etalon = new HSSFWorkbook(FileUtils.openInputStream(new File(resultFile))); HSSFSheet resultSheet = result.getSheetAt(0); HSSFSheet etalonSheet = etalon.getSheetAt(0); for (int row = 0; row < 10; row++) { HSSFRow resultRow = resultSheet.getRow(row); HSSFRow etalonRow = etalonSheet.getRow(row); if (resultRow == null && etalonRow == null) { continue; } else if ((resultRow == null) || (etalonRow == null)) { Assert.fail("fail on row [" + row + "]"); } for (int cell = 0; cell < 10; cell++) { HSSFCell resultCell = resultRow.getCell(cell); HSSFCell etalonCell = etalonRow.getCell(cell); if (resultCell != null && etalonCell != null) { Assert.assertEquals(String.format("fail on cell [%d,%d]", row, cell), etalonCell.getNumericCellValue(), resultCell.getNumericCellValue()); } else if ((resultCell == null && etalonCell != null) || (resultCell != null)) { Assert.fail(String.format("fail on cell [%d,%d]", row, cell)); } } } } private BandData createRootBandForFormulas() { BandData root = new BandData("Root", null, BandOrientation.HORIZONTAL); HashMap<String, Object> rootData = new HashMap<String, Object>(); root.setData(rootData); BandData band1_1 = new BandData("Band1", root, BandOrientation.HORIZONTAL); BandData band1_2 = new BandData("Band1", root, BandOrientation.HORIZONTAL); BandData band1_3 = new BandData("Band1", root, BandOrientation.HORIZONTAL); BandData footer = new BandData("Footer", root, BandOrientation.HORIZONTAL); Map<String, Object> datamap = new HashMap<String, Object>(); datamap.put("col1", 1); datamap.put("col2", 2); datamap.put("col3", 3); band1_1.setData(datamap); Map<String, Object> datamap2 = new HashMap<String, Object>(); datamap2.put("col1", 4); datamap2.put("col2", 5); datamap2.put("col3", 6); band1_2.setData(datamap2); Map<String, Object> datamap3 = new HashMap<String, Object>(); datamap3.put("col1", 7); datamap3.put("col2", 8); datamap3.put("col3", 9); band1_3.setData(datamap3); root.addChild(band1_1); root.addChild(band1_2); root.addChild(band1_3); root.addChild(footer); root.setFirstLevelBandDefinitionNames(new HashSet<String>()); root.getFirstLevelBandDefinitionNames().add("Band1"); return root; } private BandData createRootBandForAggregation() { BandData root = new BandData("Root", null, BandOrientation.HORIZONTAL); HashMap<String, Object> rootData = new HashMap<String, Object>(); root.setData(rootData); BandData band1_1 = band(1, 2, BandOrientation.HORIZONTAL, null, "Band1"); BandData band2_1 = band(11, 22, BandOrientation.HORIZONTAL, null, "Band2"); BandData band2_2 = band(12, 23, BandOrientation.HORIZONTAL, null, "Band2"); band1_1.addChildren(Arrays.asList(band2_1, band2_2)); BandData band1_2 = band(2, 3, BandOrientation.HORIZONTAL, null, "Band1"); BandData band2_3 = band(13, 24, BandOrientation.HORIZONTAL, null, "Band2"); BandData band3_1 = band(111, null, BandOrientation.VERTICAL, band2_3, "Band3"); BandData band3_2 = band(222, null, BandOrientation.VERTICAL, band2_3, "Band3"); band1_2.addChildren(Arrays.asList(band2_3)); band2_3.addChildren(Arrays.asList(band3_1, band3_2)); BandData band1_3 = band(3, 4, BandOrientation.HORIZONTAL, null, "Band1"); root.addChild(band1_1); root.addChild(band1_2); root.addChild(band1_3); root.setFirstLevelBandDefinitionNames(new HashSet<String>()); root.getFirstLevelBandDefinitionNames().add("Band1"); return root; } private BandData band(int col1, Integer col2, BandOrientation orientation, BandData parentBand, String name) { BandData band1_1 = new BandData(name, parentBand, orientation); Map<String, Object> datamap = new HashMap<String, Object>(); datamap.put("col1", col1); datamap.put("col2", col2); band1_1.setData(datamap); return band1_1; } }
package mil.nga.giat.geowave.cli.geoserver; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.ws.rs.PathParam; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; import org.w3c.dom.Document; import org.w3c.dom.Element; import com.beust.jcommander.ParameterException; import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter; import mil.nga.giat.geowave.adapter.vector.GeotoolsFeatureDataAdapter; import mil.nga.giat.geowave.cli.geoserver.GeoServerAddLayerCommand.AddOption; import mil.nga.giat.geowave.core.store.CloseableIterator; import mil.nga.giat.geowave.core.store.adapter.AdapterStore; import mil.nga.giat.geowave.core.store.adapter.DataAdapter; import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions; import mil.nga.giat.geowave.core.store.operations.remote.options.StoreLoader; import net.sf.json.JSONArray; import net.sf.json.JSONObject; public class GeoServerRestClient { private final static Logger LOGGER = Logger.getLogger(GeoServerRestClient.class); private final static int defaultIndentation = 2; static private class DataAdapterInfo { String adapterId; Boolean isRaster; } private final GeoServerConfig config; private WebTarget webTarget = null; public GeoServerRestClient( final GeoServerConfig config ) { this.config = config; LOGGER.setLevel(Level.DEBUG); } /** * * @return */ public GeoServerConfig getConfig() { return config; } private WebTarget getWebTarget() { if (webTarget == null) { final Client client = ClientBuilder.newClient().register( HttpAuthenticationFeature.basic( config.getUser(), config.getPass())); webTarget = client.target(config.getUrl()); } return webTarget; } /** * Convenience - add layer(s) for the given store to geoserver * * @param workspaceName * @param storeName * @param adapterId * @param defaultStyle * @return */ public Response addLayer( final String workspaceName, final String storeName, final String adapterId, final String defaultStyle ) { // retrieve the adapter info list for the store final ArrayList<DataAdapterInfo> adapterInfoList = getStoreAdapterInfo( storeName, adapterId); LOGGER.debug("Finished retrieving adapter list"); if ((adapterInfoList.size() > 1) && (adapterId == null)) { LOGGER.debug("addlayer doesn't know how to deal with multiple adapters"); final String descr = "Please use -a, or choose one of these with -id:"; final JSONObject jsonObj = getJsonFromAdapters( adapterInfoList, descr); LOGGER.debug(jsonObj); return Response.ok( jsonObj.toString(defaultIndentation)).build(); } // verify the workspace exists if (!workspaceExists(workspaceName)) { LOGGER.debug("addlayer needs to create the " + workspaceName + " workspace"); final Response addWsResponse = addWorkspace(workspaceName); if (addWsResponse.getStatus() != Status.CREATED.getStatusCode()) { return addWsResponse; } } final String cvgStoreName = storeName + GeoServerConfig.DEFAULT_CS; final String dataStoreName = storeName + GeoServerConfig.DEFAULT_DS; // iterate through data adapters for (final DataAdapterInfo dataAdapterInfo : adapterInfoList) { // handle coverage stores & coverages if (dataAdapterInfo.isRaster) { // verify coverage store exists final Response getCsResponse = getCoverageStore( workspaceName, cvgStoreName); if (getCsResponse.getStatus() == Status.NOT_FOUND.getStatusCode()) { final Response addCsResponse = addCoverageStore( workspaceName, cvgStoreName, storeName, null, null, null); if (addCsResponse.getStatus() != Status.CREATED.getStatusCode()) { return addCsResponse; } } else if (getCsResponse.getStatus() != Status.OK.getStatusCode()) { return getCsResponse; } // See if the coverage already exists final Response getCvResponse = getCoverage( workspaceName, cvgStoreName, dataAdapterInfo.adapterId); if (getCvResponse.getStatus() == Status.OK.getStatusCode()) { LOGGER.debug(dataAdapterInfo.adapterId + " layer already exists"); continue; } // We have a coverage store. Add the layer per the adapter ID final Response addCvResponse = addCoverage( workspaceName, cvgStoreName, dataAdapterInfo.adapterId); if (addCvResponse.getStatus() != Status.CREATED.getStatusCode()) { return addCvResponse; } } // handle datastores and feature layers else { // verify datastore exists final Response getDsResponse = getDatastore( workspaceName, dataStoreName); if (getDsResponse.getStatus() == Status.NOT_FOUND.getStatusCode()) { final Response addDsResponse = addDatastore( workspaceName, dataStoreName, storeName); if (addDsResponse.getStatus() != Status.CREATED.getStatusCode()) { return addDsResponse; } } else if (getDsResponse.getStatus() != Status.OK.getStatusCode()) { return getDsResponse; } LOGGER.debug("Checking for existing feature layer: " + dataAdapterInfo.adapterId); // See if the feature layer already exists final Response getFlResponse = getFeatureLayer(dataAdapterInfo.adapterId); if (getFlResponse.getStatus() == Status.OK.getStatusCode()) { LOGGER.debug(dataAdapterInfo.adapterId + " layer already exists"); continue; } LOGGER.debug("Get feature layer: " + dataAdapterInfo.adapterId + " returned " + getFlResponse.getStatus()); // We have a datastore. Add the layer per the adapter ID final Response addFlResponse = addFeatureLayer( workspaceName, dataStoreName, dataAdapterInfo.adapterId, defaultStyle); if (addFlResponse.getStatus() != Status.CREATED.getStatusCode()) { return addFlResponse; } } } // Report back to the caller the adapter IDs and the types that were // used to create the layers final JSONObject jsonObj = getJsonFromAdapters( adapterInfoList, "Successfully added:"); return Response.ok( jsonObj.toString(defaultIndentation)).build(); } /** * Get JSON object(s) from adapter list * * @param adapterInfoList * @param description * @return JSONObject */ private JSONObject getJsonFromAdapters( final ArrayList<DataAdapterInfo> adapterInfoList, final String description ) { final StringBuffer buf = new StringBuffer(); // If we made it this far, let's just iterate through the adapter IDs // and build the JSON response data buf.append("{'description':'" + description + "', " + "'layers':["); for (int i = 0; i < adapterInfoList.size(); i++) { final DataAdapterInfo info = adapterInfoList.get(i); buf.append("{'id':'" + info.adapterId + "',"); buf.append("'type':'" + (info.isRaster ? "raster" : "vector") + "'}"); if (i < (adapterInfoList.size() - 1)) { buf.append(","); } } buf.append("]}"); return JSONObject.fromObject(buf.toString()); } /** * Check if workspace exists * * @param workspace * @return true if workspace exists, false if not */ public boolean workspaceExists( String workspace ) { if (workspace == null) { workspace = config.getWorkspace(); } final Response getWsResponse = getWorkspaces(); if (getWsResponse.getStatus() == Status.OK.getStatusCode()) { final JSONObject jsonResponse = JSONObject.fromObject(getWsResponse.getEntity()); final JSONArray workspaces = jsonResponse.getJSONArray("workspaces"); for (int i = 0; i < workspaces.size(); i++) { final String wsName = workspaces.getJSONObject( i).getString( "name"); if (wsName.equals(workspace)) { return true; } } } else { LOGGER.error("Error retieving GeoServer workspace list"); } return false; } /** * Get list of workspaces from geoserver * * @return */ public Response getWorkspaces() { final Response resp = getWebTarget().path( "rest/workspaces.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the workspace names final JSONArray workspaceArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "workspaces", "workspace"); final JSONObject workspacesObj = new JSONObject(); workspacesObj.put( "workspaces", workspaceArray); return Response.ok( workspacesObj.toString(defaultIndentation)).build(); } return resp; } /** * Add workspace to geoserver * * @param workspace * @return */ public Response addWorkspace( final String workspace ) { return getWebTarget().path( "rest/workspaces").request().post( Entity.entity( "{'workspace':{'name':'" + workspace + "'}}", MediaType.APPLICATION_JSON)); } /** * Delete workspace from geoserver * * @param workspace * @return */ public Response deleteWorkspace( final String workspace ) { return getWebTarget().path( "rest/workspaces/" + workspace).queryParam( "recurse", "true").request().delete(); } /** * Get the string version of a datastore JSONObject from geoserver * * @param workspaceName * @param datastoreName * @return */ public Response getDatastore( final String workspaceName, final String datastoreName ) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores/" + datastoreName + ".json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); final JSONObject datastore = JSONObject.fromObject(resp.readEntity(String.class)); if (datastore != null) { return Response.ok( datastore.toString(defaultIndentation)).build(); } } return resp; } /** * Get list of Datastore names from geoserver * * @param workspaceName * @return */ public Response getDatastores( final String workspaceName ) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the datastore names final JSONArray datastoreArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "dataStores", "dataStore"); final JSONObject dsObj = new JSONObject(); dsObj.put( "dataStores", datastoreArray); return Response.ok( dsObj.toString(defaultIndentation)).build(); } return resp; } /** * Add a geowave datastore to geoserver * * @param workspaceName * @param datastoreName * @param gwStoreName * @return */ public Response addDatastore( final String workspaceName, String datastoreName, final String gwStoreName ) { final DataStorePluginOptions inputStoreOptions = getStorePlugin(gwStoreName); if ((datastoreName == null) || datastoreName.isEmpty()) { datastoreName = gwStoreName + GeoServerConfig.DEFAULT_DS; } final String lockMgmt = "memory"; final String authMgmtPrvdr = "empty"; final String authDataUrl = ""; final String queryIndexStrategy = "Best Match"; final String dataStoreJson = createDatastoreJson( inputStoreOptions.getType(), inputStoreOptions.getOptionsAsMap(), datastoreName, lockMgmt, authMgmtPrvdr, authDataUrl, queryIndexStrategy, true); // create a new geoserver style return getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores").request().post( Entity.entity( dataStoreJson, MediaType.APPLICATION_JSON)); } /** * Delete a geowave datastore from geoserver * * @param workspaceName * @param datastoreName * @return */ public Response deleteDatastore( final String workspaceName, final String datastoreName ) { return getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores/" + datastoreName).queryParam( "recurse", "true").request().delete(); } /** * Get a layer from geoserver * * @param layerName * @return */ public Response getFeatureLayer( final String layerName ) { final Response resp = getWebTarget().path( "rest/layers/" + layerName + ".json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { final JSONObject layer = JSONObject.fromObject(resp.readEntity(String.class)); if (layer != null) { return Response.ok( layer.toString(defaultIndentation)).build(); } } return resp; } /** * Get list of layers from geoserver * * @param workspaceName * : if null, don't filter on workspace * @param datastoreName * : if null, don't filter on datastore * @param geowaveOnly * : if true, only return geowave layers * @return */ public Response getFeatureLayers( final String workspaceName, final String datastoreName, final boolean geowaveOnly ) { final boolean wsFilter = ((workspaceName != null) && !workspaceName.isEmpty()); final boolean dsFilter = ((datastoreName != null) && !datastoreName.isEmpty()); final Response resp = getWebTarget().path( "rest/layers.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the datastore names final JSONArray layerArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "layers", "layer"); // holder for simple layer info (when geowaveOnly = false) final JSONArray layerInfoArray = new JSONArray(); final Map<String, List<String>> namespaceLayersMap = new HashMap<String, List<String>>(); final Pattern p = Pattern.compile("workspaces/(.*?)/datastores/(.*?)/"); for (int i = 0; i < layerArray.size(); i++) { final boolean include = !geowaveOnly && !wsFilter && !dsFilter; // no // filtering // of // any // kind if (include) { // just grab it... layerInfoArray.add(layerArray.getJSONObject(i)); continue; // and move on } // at this point, we are filtering somehow. get some more info // about the layer final String name = layerArray.getJSONObject( i).getString( "name"); final String layer = (String) getFeatureLayer( name).getEntity(); // get the workspace and name for each datastore String ws = null; String ds = null; final Matcher m = p.matcher(layer); if (m.find()) { ws = m.group(1); ds = m.group(2); } // filter on datastore? if (!dsFilter || ((ds != null) && ds.equals(datastoreName))) { // filter on workspace? if (!wsFilter || ((ws != null) && ws.equals(workspaceName))) { final JSONObject datastore = JSONObject.fromObject( getDatastore( ds, ws).getEntity()).getJSONObject( "dataStore"); // only process GeoWave layers if (geowaveOnly) { if ((datastore != null) && datastore.containsKey("type") && datastore.getString( "type").startsWith( "GeoWave Datastore")) { JSONArray entryArray = null; if (datastore.get("connectionParameters") instanceof JSONObject) { entryArray = datastore.getJSONObject( "connectionParameters").getJSONArray( "entry"); } else if (datastore.get("connectionParameters") instanceof JSONArray) { entryArray = datastore.getJSONArray( "connectionParameters").getJSONObject( 0).getJSONArray( "entry"); } if (entryArray == null) { LOGGER .error("entry Array is null - didn't find a connectionParameters datastore object that was a JSONObject or JSONArray"); } else { // group layers by namespace for (int j = 0; j < entryArray.size(); j++) { final JSONObject entry = entryArray.getJSONObject(j); final String key = entry.getString("@key"); final String value = entry.getString("$"); if (key.startsWith("gwNamespace")) { if (namespaceLayersMap.containsKey(value)) { namespaceLayersMap.get( value).add( name); } else { final ArrayList<String> layers = new ArrayList<String>(); layers.add(name); namespaceLayersMap.put( value, layers); } break; } } } } } else { // just get all the layers from this store layerInfoArray.add(layerArray.getJSONObject(i)); } } } } // Handle geowaveOnly response if (geowaveOnly) { // create the json object with layers sorted by namespace final JSONArray layersArray = new JSONArray(); for (final Map.Entry<String, List<String>> kvp : namespaceLayersMap.entrySet()) { final JSONArray layers = new JSONArray(); for (int i = 0; i < kvp.getValue().size(); i++) { final JSONObject layerObj = new JSONObject(); layerObj.put( "name", kvp.getValue().get( i)); layers.add(layerObj); } final JSONObject layersObj = new JSONObject(); layersObj.put( "namespace", kvp.getKey()); layersObj.put( "layers", layers); layersArray.add(layersObj); } final JSONObject layersObj = new JSONObject(); layersObj.put( "layers", layersArray); return Response.ok( layersObj.toString(defaultIndentation)).build(); } else { final JSONObject layersObj = new JSONObject(); layersObj.put( "layers", layerInfoArray); return Response.ok( layersObj.toString(defaultIndentation)).build(); } } return resp; } /** * Add feature layer to geoserver * * @param workspaceName * @param datastoreName * @param layerName * @param defaultStyle * @return */ public Response addFeatureLayer( final String workspaceName, final String datastoreName, final String layerName, final String defaultStyle ) { if (defaultStyle != null) { getWebTarget().path( "rest/layers/" + layerName + ".json").request().put( Entity.entity( "{'layer':{'defaultStyle':{'name':'" + defaultStyle + "'}}}", MediaType.APPLICATION_JSON)); } return getWebTarget().path( "rest/workspaces/" + workspaceName + "/datastores/" + datastoreName + "/featuretypes").request().post( Entity.entity( "{'featureType':{'name':'" + layerName + "'}}", MediaType.APPLICATION_JSON)); } /** * Delete a feature layer from geoserver * * @param layerName * @return */ public Response deleteFeatureLayer( final String layerName ) { return getWebTarget().path( "rest/layers/" + layerName).request().delete(); } /** * Change the default style of a layer * * @param layerName * @param styleName * @return */ public Response setLayerStyle( final String layerName, final String styleName ) { return getWebTarget().path( "rest/layers/" + layerName + ".json").request().put( Entity.entity( "{'layer':{'defaultStyle':{'name':'" + styleName + "'}}}", MediaType.APPLICATION_JSON)); } /** * Get a geoserver style * * @param styleName * @return */ public Response getStyle( @PathParam("styleName") final String styleName ) { final Response resp = getWebTarget().path( "rest/styles/" + styleName + ".sld").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { final InputStream inStream = (InputStream) resp.getEntity(); return Response.ok( inStream, MediaType.APPLICATION_XML).header( "Content-Disposition", "attachment; filename=\"" + styleName + ".sld\"").build(); } return resp; } /** * Get a list of geoserver styles * * @return */ public Response getStyles() { final Response resp = getWebTarget().path( "rest/styles.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the style names final JSONArray styleArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "styles", "style"); final JSONObject stylesObj = new JSONObject(); stylesObj.put( "styles", styleArray); return Response.ok( stylesObj.toString(defaultIndentation)).build(); } return resp; } /** * Add a style to geoserver * * @param styleName * @param fileInStream * @return */ public Response addStyle( final String styleName, final InputStream fileInStream ) { getWebTarget().path( "rest/styles").request().post( Entity.entity( "{'style':{'name':'" + styleName + "','filename':'" + styleName + ".sld'}}", MediaType.APPLICATION_JSON)); return getWebTarget().path( "rest/styles/" + styleName).request().put( Entity.entity( fileInStream, "application/vnd.ogc.sld+xml")); } /** * Delete a style from geoserver * * @param styleName * @return */ public Response deleteStyle( final String styleName ) { return getWebTarget().path( "rest/styles/" + styleName).request().delete(); } /** * Get coverage store from geoserver * * @param workspaceName * @param coverageName * @return */ public Response getCoverageStore( final String workspaceName, final String coverageName ) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + coverageName + ".json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); final JSONObject cvgstore = JSONObject.fromObject(resp.readEntity(String.class)); if (cvgstore != null) { return Response.ok( cvgstore.toString(defaultIndentation)).build(); } } return resp; } /** * Get a list of coverage stores from geoserver * * @param workspaceName * @return */ public Response getCoverageStores( final String workspaceName ) { final Response resp = getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores.json").request().get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the datastore names final JSONArray coveragesArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "coverageStores", "coverageStore"); final JSONObject dsObj = new JSONObject(); dsObj.put( "coverageStores", coveragesArray); return Response.ok( dsObj.toString(defaultIndentation)).build(); } return resp; } /** * Add coverage store to geoserver * * @param workspaceName * @param cvgStoreName * @param gwStoreName * @param equalizeHistogramOverride * @param interpolationOverride * @param scaleTo8Bit * @return */ public Response addCoverageStore( final String workspaceName, String cvgStoreName, final String gwStoreName, final Boolean equalizeHistogramOverride, final String interpolationOverride, final Boolean scaleTo8Bit ) { final DataStorePluginOptions inputStoreOptions = getStorePlugin(gwStoreName); if ((cvgStoreName == null) || cvgStoreName.isEmpty()) { cvgStoreName = gwStoreName + GeoServerConfig.DEFAULT_CS; } // Get the store's db config final Map<String, String> storeConfigMap = inputStoreOptions.getOptionsAsMap(); // Add in geoserver coverage store info storeConfigMap.put( GeoServerConfig.GEOSERVER_WORKSPACE, workspaceName); storeConfigMap.put( "gwNamespace", inputStoreOptions.getGeowaveNamespace()); storeConfigMap.put( GeoServerConfig.GEOSERVER_CS, cvgStoreName); final String cvgStoreXml = createCoverageXml( storeConfigMap, equalizeHistogramOverride, interpolationOverride, scaleTo8Bit); System.out.println("Add coverage store - xml params:\n" + cvgStoreXml); // create a new geoserver style return getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores").request().post( Entity.entity( cvgStoreXml, MediaType.APPLICATION_XML)); } /** * Delete coverage store form geoserver * * @param workspaceName * @param cvgstoreName * @return */ public Response deleteCoverageStore( final String workspaceName, final String cvgstoreName ) { return getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvgstoreName).queryParam( "recurse", "true").request().delete(); } /** * Get a list of coverages (raster layers) from geoserver * * @param workspaceName * @param cvsstoreName * @return */ public Response getCoverages( final String workspaceName, final String cvsstoreName ) { final Response resp = getWebTarget() .path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvsstoreName + "/coverages.json") .request() .get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); // get the datastore names final JSONArray coveragesArray = getArrayEntryNames( JSONObject.fromObject(resp.readEntity(String.class)), "coverages", "coverage"); final JSONObject dsObj = new JSONObject(); dsObj.put( "coverages", coveragesArray); return Response.ok( dsObj.toString(defaultIndentation)).build(); } return resp; } /** * Get coverage from geoserver * * @param workspaceName * @param cvgStoreName * @param coverageName * @return */ public Response getCoverage( final String workspaceName, final String cvgStoreName, final String coverageName ) { final Response resp = getWebTarget() .path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvgStoreName + "/coverages/" + coverageName + ".json") .request() .get(); if (resp.getStatus() == Status.OK.getStatusCode()) { resp.bufferEntity(); final JSONObject cvg = JSONObject.fromObject(resp.readEntity(String.class)); if (cvg != null) { return Response.ok( cvg.toString(defaultIndentation)).build(); } } return resp; } /** * Add coverage to geoserver * * @param workspaceName * @param cvgStoreName * @param coverageName * @return */ public Response addCoverage( final String workspaceName, final String cvgStoreName, final String coverageName ) { final String jsonString = "{'coverage':" + "{'name':'" + coverageName + "'," + "'nativeCoverageName':'" + coverageName + "'}}"; LOGGER.debug("Posting JSON: " + jsonString + " to " + workspaceName + "/" + cvgStoreName); return getWebTarget().path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvgStoreName + "/coverages").request().post( Entity.entity( jsonString, MediaType.APPLICATION_JSON)); } /** * Delete coverage from geoserver * * @param workspaceName * @param cvgstoreName * @param coverageName * @return */ public Response deleteCoverage( final String workspaceName, final String cvgstoreName, final String coverageName ) { return getWebTarget() .path( "rest/workspaces/" + workspaceName + "/coveragestores/" + cvgstoreName + "/coverages/" + coverageName) .queryParam( "recurse", "true") .request() .delete(); } // Internal methods protected String createFeatureTypeJson( final String featureTypeName ) { final JSONObject featTypeJson = new JSONObject(); featTypeJson.put( "name", featureTypeName); final JSONObject jsonObj = new JSONObject(); jsonObj.put( "featureType", featTypeJson); return jsonObj.toString(); } protected JSONArray getArrayEntryNames( JSONObject jsonObj, final String firstKey, final String secondKey ) { // get the top level object/array if (jsonObj.get(firstKey) instanceof JSONObject) { jsonObj = jsonObj.getJSONObject(firstKey); } else if (jsonObj.get(firstKey) instanceof JSONArray) { final JSONArray tempArray = jsonObj.getJSONArray(firstKey); if (tempArray.size() > 0) { if (tempArray.get(0) instanceof JSONObject) { jsonObj = tempArray.getJSONObject(0); } else { // empty list! return new JSONArray(); } } } // get the sub level object/array final JSONArray entryArray = new JSONArray(); if (jsonObj.get(secondKey) instanceof JSONObject) { final JSONObject entry = new JSONObject(); entry.put( "name", jsonObj.getJSONObject( secondKey).getString( "name")); entryArray.add(entry); } else if (jsonObj.get(secondKey) instanceof JSONArray) { final JSONArray entries = jsonObj.getJSONArray(secondKey); for (int i = 0; i < entries.size(); i++) { final JSONObject entry = new JSONObject(); entry.put( "name", entries.getJSONObject( i).getString( "name")); entryArray.add(entry); } } return entryArray; } protected String createDatastoreJson( final String geowaveStoreType, final Map<String, String> geowaveStoreConfig, final String name, final String lockMgmt, final String authMgmtProvider, final String authDataUrl, final String queryIndexStrategy, final boolean enabled ) { final JSONObject dataStore = new JSONObject(); dataStore.put( "name", name); dataStore.put( "type", GeoServerConfig.DISPLAY_NAME_PREFIX + geowaveStoreType); dataStore.put( "enabled", Boolean.toString(enabled)); final JSONObject connParams = new JSONObject(); if (geowaveStoreConfig != null) { for (final Entry<String, String> e : geowaveStoreConfig.entrySet()) { connParams.put( e.getKey(), e.getValue()); } } connParams.put( "Lock Management", lockMgmt); connParams.put( GeoServerConfig.QUERY_INDEX_STRATEGY_KEY, queryIndexStrategy); connParams.put( "Authorization Management Provider", authMgmtProvider); if (!authMgmtProvider.equals("empty")) { connParams.put( "Authorization Data URL", authDataUrl); } dataStore.put( "connectionParameters", connParams); final JSONObject jsonObj = new JSONObject(); jsonObj.put( "dataStore", dataStore); return jsonObj.toString(); } private String createCoverageXml( final Map<String, String> geowaveStoreConfig, final Boolean equalizeHistogramOverride, final String interpolationOverride, final Boolean scaleTo8Bit ) { String coverageXml = null; final String workspace = geowaveStoreConfig.get(GeoServerConfig.GEOSERVER_WORKSPACE); final String cvgstoreName = geowaveStoreConfig.get(GeoServerConfig.GEOSERVER_CS); StreamResult result = null; try { // create the post XML final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setFeature( "http://xml.org/sax/features/external-general-entities", false); factory.setFeature( "http://xml.org/sax/features/external-parameter-entities", false); final Document xmlDoc = factory.newDocumentBuilder().newDocument(); final Element rootEl = xmlDoc.createElement("coverageStore"); xmlDoc.appendChild(rootEl); final Element nameEl = xmlDoc.createElement("name"); nameEl.appendChild(xmlDoc.createTextNode(cvgstoreName)); rootEl.appendChild(nameEl); final Element wsEl = xmlDoc.createElement("workspace"); wsEl.appendChild(xmlDoc.createTextNode(workspace)); rootEl.appendChild(wsEl); final Element typeEl = xmlDoc.createElement("type"); typeEl.appendChild(xmlDoc.createTextNode("GeoWaveRasterFormat")); rootEl.appendChild(typeEl); final Element enabledEl = xmlDoc.createElement("enabled"); enabledEl.appendChild(xmlDoc.createTextNode("true")); rootEl.appendChild(enabledEl); final Element configEl = xmlDoc.createElement("configure"); configEl.appendChild(xmlDoc.createTextNode("all")); rootEl.appendChild(configEl); // Method using custom URL & handler: final String storeConfigUrl = createParamUrl( geowaveStoreConfig, equalizeHistogramOverride, interpolationOverride, scaleTo8Bit); final Element urlEl = xmlDoc.createElement("url"); urlEl.appendChild(xmlDoc.createTextNode(storeConfigUrl)); rootEl.appendChild(urlEl); // use a transformer to create the xml string for the rest call final TransformerFactory xformerFactory = TransformerFactory.newInstance(); // HP Fortify "XML External Entity Injection" false positive // The following modifications to xformerFactory are the // fortify-recommended procedure to secure a TransformerFactory // but the report still flags this instance xformerFactory.setFeature( XMLConstants.FEATURE_SECURE_PROCESSING, true); final Transformer xformer = xformerFactory.newTransformer(); final DOMSource source = new DOMSource( xmlDoc); result = new StreamResult( new StringWriter()); xformer.transform( source, result); coverageXml = result.getWriter().toString(); } catch (final TransformerException e) { LOGGER.error( "Unable to create transformer", e); } catch (final ParserConfigurationException e1) { LOGGER.error( "Unable to create DocumentBuilderFactory", e1); } finally { if ((result != null) && (result.getWriter() != null)) { try { result.getWriter().close(); } catch (final IOException e) { LOGGER.error(e); } } } return coverageXml; } private String createParamUrl( final Map<String, String> geowaveStoreConfig, final Boolean equalizeHistogramOverride, final String interpolationOverride, final Boolean scaleTo8Bit ) { // Retrieve store config final String user = geowaveStoreConfig.get("user"); final String pass = geowaveStoreConfig.get("password"); final String zookeeper = geowaveStoreConfig.get("zookeeper"); final String instance = geowaveStoreConfig.get("instance"); final String gwNamespace = geowaveStoreConfig.get("gwNamespace"); // Create the custom geowave url w/ params final StringBuffer buf = new StringBuffer(); buf.append("user="); buf.append(user); buf.append(";password="); buf.append(pass); buf.append(";zookeeper="); buf.append(zookeeper); buf.append(";instance="); buf.append(instance); buf.append(";gwNamespace="); buf.append(gwNamespace); if (equalizeHistogramOverride != null) { buf.append(";equalizeHistogramOverride="); buf.append(equalizeHistogramOverride); } if (interpolationOverride != null) { buf.append(";interpolationOverride="); buf.append(interpolationOverride); } if (scaleTo8Bit != null) { buf.append(";scaleTo8Bit="); buf.append(scaleTo8Bit); } return buf.toString(); } public DataStorePluginOptions getStorePlugin( final String storeName ) { final StoreLoader inputStoreLoader = new StoreLoader( storeName); if (!inputStoreLoader.loadFromConfig(config.getPropFile())) { throw new ParameterException( "Cannot find store name: " + inputStoreLoader.getStoreName()); } return inputStoreLoader.getDataStorePlugin(); } public ArrayList<String> getStoreAdapters( final String storeName, final String adapterId ) { final ArrayList<DataAdapterInfo> adapterInfoList = getStoreAdapterInfo( storeName, adapterId); final ArrayList<String> adapterIdList = new ArrayList<String>(); for (final DataAdapterInfo info : adapterInfoList) { adapterIdList.add(info.adapterId); } return adapterIdList; } private ArrayList<DataAdapterInfo> getStoreAdapterInfo( final String storeName, final String adapterId ) { final DataStorePluginOptions dsPlugin = getStorePlugin(storeName); final AdapterStore adapterStore = dsPlugin.createAdapterStore(); final ArrayList<DataAdapterInfo> adapterInfoList = new ArrayList<DataAdapterInfo>(); LOGGER.debug("Adapter list for " + storeName + " with adapterId = " + adapterId + ": "); try (final CloseableIterator<DataAdapter<?>> it = adapterStore.getAdapters()) { while (it.hasNext()) { final DataAdapter<?> adapter = it.next(); final DataAdapterInfo info = getAdapterInfo( adapterId, adapter); if (info != null) { adapterInfoList.add(info); LOGGER.debug("> '" + info.adapterId + "' adapter passed filter"); } } } catch (final IOException e) { LOGGER.error( "Unable to close adapter iterator while looking up coverage names", e); } LOGGER.debug("getStoreAdapterInfo(" + storeName + ") got " + adapterInfoList.size() + " ids"); return adapterInfoList; } private DataAdapterInfo getAdapterInfo( final String adapterId, final DataAdapter adapter ) { LOGGER.debug("getAdapterInfo for id = " + adapterId); final DataAdapterInfo info = new DataAdapterInfo(); info.adapterId = adapter.getAdapterId().getString(); info.isRaster = false; if (adapter instanceof RasterDataAdapter) { info.isRaster = true; } LOGGER.debug("> Adapter ID: " + info.adapterId); LOGGER.debug("> Adapter Type: " + adapter.getClass().getSimpleName()); if ((adapterId == null) || adapterId.equals(AddOption.ALL.name())) { LOGGER.debug("id is null or all"); return info; } if (adapterId.equals(adapter.getAdapterId().getString())) { LOGGER.debug("id matches adapter id"); return info; } if (adapterId.equals(AddOption.RASTER.name()) && (adapter instanceof RasterDataAdapter)) { LOGGER.debug("id is all-raster and adapter is raster type"); return info; } if (adapterId.equals(AddOption.VECTOR.name()) && (adapter instanceof GeotoolsFeatureDataAdapter)) { LOGGER.debug("id is all-vector and adapter is vector type"); return info; } LOGGER.debug("No match!"); return null; } }
/* * oxTrust is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.gluu.oxtrust.service; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; import javax.annotation.PostConstruct; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPathExpressionException; import org.gluu.model.GluuAttribute; import org.gluu.model.GluuStatus; import org.gluu.model.TrustContact; import org.gluu.oxtrust.model.GluuMetadataSourceType; import org.gluu.oxtrust.model.GluuSAMLTrustRelationship; import org.gluu.oxtrust.util.OxTrustConstants; import org.gluu.persist.PersistenceEntryManager; import org.gluu.search.filter.Filter; import org.gluu.service.XmlService; import org.gluu.util.StringHelper; import org.slf4j.Logger; import org.w3c.dom.Document; import org.xml.sax.SAXException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; /** * Provides operations with trust relationships * * @author Pankaj * @author Yuriy Movchan Date: 11.05.2010 * */ @ApplicationScoped public class TrustService implements Serializable { private static final long serialVersionUID = -8128546040230316737L; @Inject private Logger log; @Inject private PersistenceEntryManager persistenceEntryManager; @Inject private AttributeService attributeService; @Inject private OrganizationService organizationService; @Inject private XmlService xmlService; private ObjectMapper objectMapper; @PostConstruct public void init() { this.objectMapper = new ObjectMapper(); } public static final String GENERATED_SSL_ARTIFACTS_DIR = "ssl"; public void addTrustRelationship(GluuSAMLTrustRelationship trustRelationship) { log.debug("Adding TR: {}", trustRelationship.getInum()); String dn = trustRelationship.getDn(); if (!containsTrustRelationship(dn)) { log.debug("Adding TR: {}", dn); persistenceEntryManager.persist(trustRelationship); } else { persistenceEntryManager.merge(trustRelationship); } } public void updateTrustRelationship(GluuSAMLTrustRelationship trustRelationship) { String dn = trustRelationship.getDn(); boolean containsTrustRelationship = trustExist(dn); if (containsTrustRelationship) { log.info("Updating TR: {}", dn); persistenceEntryManager.merge(trustRelationship); } else { log.info("Adding TR: {}", dn); persistenceEntryManager.persist(trustRelationship); } } public void removeTrustRelationship(GluuSAMLTrustRelationship trustRelationship) { log.info("Removing TR: {}", trustRelationship.getInum()); String dn = trustRelationship.getDn(); if (containsTrustRelationship(dn)) { log.debug("Removing TR: {}", dn); persistenceEntryManager.remove(trustRelationship); } } public GluuSAMLTrustRelationship getRelationshipByInum(String inum) { try { return persistenceEntryManager.find(GluuSAMLTrustRelationship.class, getDnForTrustRelationShip(inum)); } catch (Exception e) { log.error(e.getMessage()); return null; } } public GluuSAMLTrustRelationship getRelationshipByDn(String dn) { if (StringHelper.isNotEmpty(dn)) { try { return persistenceEntryManager.find(GluuSAMLTrustRelationship.class, dn); } catch (Exception e) { log.info(e.getMessage()); } } return null; } /** * This is a LDAP operation as LDAP and IDP will always be in sync. We can just * call LDAP to fetch all Trust Relationships. */ public List<GluuSAMLTrustRelationship> getAllTrustRelationships() { return persistenceEntryManager.findEntries(getDnForTrustRelationShip(null), GluuSAMLTrustRelationship.class, null); } public List<GluuSAMLTrustRelationship> getAllActiveTrustRelationships() { GluuSAMLTrustRelationship trustRelationship = new GluuSAMLTrustRelationship(); trustRelationship.setBaseDn(getDnForTrustRelationShip(null)); trustRelationship.setStatus(GluuStatus.ACTIVE); return persistenceEntryManager.findEntries(trustRelationship); } public List<GluuSAMLTrustRelationship> getAllFederations() { List<GluuSAMLTrustRelationship> result = new ArrayList<GluuSAMLTrustRelationship>(); for (GluuSAMLTrustRelationship trust : getAllActiveTrustRelationships()) { if (trust.isFederation()) { result.add(trust); } } return result; } public List<GluuSAMLTrustRelationship> getAllOtherFederations(String inum) { List<GluuSAMLTrustRelationship> result = getAllFederations(); result.remove(getRelationshipByInum(inum)); return result; } /** * Check if LDAP server contains trust relationship with specified attributes * * @return True if trust relationship with specified attributes exist */ public boolean containsTrustRelationship(String dn) { return persistenceEntryManager.contains(dn, GluuSAMLTrustRelationship.class); } public boolean trustExist(String dn) { GluuSAMLTrustRelationship trust = null; try { trust = persistenceEntryManager.find(GluuSAMLTrustRelationship.class, dn); } catch (Exception e) { trust = null; } return (trust != null) ? true : false; } /** * Generate new inum for trust relationship * * @return New inum for trust relationship */ public String generateInumForNewTrustRelationship() { String newDn = null; String newInum = null; do { newInum = generateInumForNewTrustRelationshipImpl(); newDn = getDnForTrustRelationShip(newInum); } while (containsTrustRelationship(newDn)); return newInum; } /** * Generate new inum for trust relationship * * @return New inum for trust relationship */ private String generateInumForNewTrustRelationshipImpl() { return UUID.randomUUID().toString(); } /** * Get all metadata source types * * @return Array of metadata source types */ public GluuMetadataSourceType[] getMetadataSourceTypes() { return GluuMetadataSourceType.values(); } /** * Build DN string for trust relationship * * @param inum * Inum * @return DN string for specified trust relationship or DN for trust * relationships branch if inum is null */ public String getDnForTrustRelationShip(String inum) { String organizationDN = organizationService.getDnForOrganization(); if (StringHelper.isEmpty(inum)) { return String.format("ou=trustRelationships,%s", organizationDN); } return String.format("inum=%s,ou=trustRelationships,%s", inum, organizationDN); } public List<TrustContact> getContacts(GluuSAMLTrustRelationship trustRelationship) { List<String> gluuTrustContacts = trustRelationship.getGluuTrustContact(); List<TrustContact> contacts = new ArrayList<TrustContact>(); if (gluuTrustContacts != null) { for (String contact : gluuTrustContacts) { contacts.add(getTrustContactFromString(contact)); } } return contacts; } public void saveContacts(GluuSAMLTrustRelationship trustRelationship, List<TrustContact> contacts) { if (contacts != null && !contacts.isEmpty()) { List<String> gluuTrustContacts = new ArrayList<String>(); for (TrustContact contact : contacts) { gluuTrustContacts.add(getStringFromTrustContact(contact)); } trustRelationship.setGluuTrustContact(gluuTrustContacts); } } public List<GluuSAMLTrustRelationship> getDeconstructedTrustRelationships( GluuSAMLTrustRelationship trustRelationship) { List<GluuSAMLTrustRelationship> result = new ArrayList<GluuSAMLTrustRelationship>(); for (GluuSAMLTrustRelationship trust : getAllTrustRelationships()) { if (trustRelationship.equals(getTrustContainerFederation(trust))) { result.add(trust); } } return result; } public List<GluuSAMLTrustRelationship> getChildTrusts(GluuSAMLTrustRelationship trustRelationship) { List<GluuSAMLTrustRelationship> all = getAllTrustRelationships(); if (all != null && !all.isEmpty()) { return all.stream().filter(e -> !e.isFederation()) .filter(e -> e.getGluuContainerFederation().equalsIgnoreCase(trustRelationship.getDn())) .collect(Collectors.toList()); } else { return new ArrayList<GluuSAMLTrustRelationship>(); } } public GluuSAMLTrustRelationship getTrustByUnpunctuatedInum(String unpunctuated) { for (GluuSAMLTrustRelationship trust : getAllTrustRelationships()) { if (StringHelper.removePunctuation(trust.getInum()).equals(unpunctuated)) { return trust; } } return null; } public GluuSAMLTrustRelationship getTrustContainerFederation(GluuSAMLTrustRelationship trustRelationship) { GluuSAMLTrustRelationship relationshipByDn = getRelationshipByDn(trustRelationship.getDn()); return relationshipByDn; } public GluuSAMLTrustRelationship getTrustContainerFederation(String dn) { GluuSAMLTrustRelationship relationshipByDn = getRelationshipByDn(dn); return relationshipByDn; } public List<GluuSAMLTrustRelationship> searchSAMLTrustRelationships(String pattern, int sizeLimit) { String[] targetArray = new String[] { pattern }; Filter displayNameFilter = Filter.createSubstringFilter(OxTrustConstants.displayName, null, targetArray, null); Filter descriptionFilter = Filter.createSubstringFilter(OxTrustConstants.description, null, targetArray, null); Filter inumFilter = Filter.createSubstringFilter(OxTrustConstants.inum, null, targetArray, null); Filter searchFilter = Filter.createORFilter(displayNameFilter, descriptionFilter, inumFilter); return persistenceEntryManager.findEntries(getDnForTrustRelationShip(null), GluuSAMLTrustRelationship.class, searchFilter, sizeLimit); } public List<GluuSAMLTrustRelationship> getAllSAMLTrustRelationships(int sizeLimit) { return persistenceEntryManager.findEntries(getDnForTrustRelationShip(null), GluuSAMLTrustRelationship.class, null, sizeLimit); } /** * Remove attribute * * @param attribute * Attribute */ public boolean removeAttribute(GluuAttribute attribute) { log.trace("Removing attribute from trustRelationships"); List<GluuSAMLTrustRelationship> trustRelationships = getAllTrustRelationships(); log.trace(String.format("Iterating '%d' trustRelationships", trustRelationships.size())); for (GluuSAMLTrustRelationship trustRelationship : trustRelationships) { log.trace("Analyzing '%s'.", trustRelationship.getDisplayName()); List<String> customAttrs = trustRelationship.getReleasedAttributes(); if (customAttrs != null) { for (String attrDN : customAttrs) { log.trace("'%s' has custom attribute '%s'", trustRelationship.getDisplayName(), attrDN); if (attrDN.equals(attribute.getDn())) { log.trace("'%s' matches '%s'. deleting it.", attrDN, attribute.getDn()); List<String> updatedAttrs = new ArrayList<String>(); updatedAttrs.addAll(customAttrs); updatedAttrs.remove(attrDN); if (updatedAttrs.size() == 0) { trustRelationship.setReleasedAttributes(null); } else { trustRelationship.setReleasedAttributes(updatedAttrs); } updateTrustRelationship(trustRelationship); break; } } } } attributeService.removeAttribute(attribute); return true; } public TrustContact getTrustContactFromString(String data) { if (data == null) { return null; } // Try to convert from XML first if (data.startsWith("<")) { Document doc; try { doc = xmlService.getXmlDocument(data, true); String name = xmlService.getNodeValue(doc, "/trustContact/name", null); String mail = xmlService.getNodeValue(doc, "/trustContact/mail", null); String phone = xmlService.getNodeValue(doc, "/trustContact/phone", null); String title = xmlService.getNodeValue(doc, "/trustContact/title", null); TrustContact trustContact = new TrustContact(); trustContact.setName(name); trustContact.setPhone(mail); trustContact.setMail(phone); trustContact.setTitle(title); return trustContact; } catch (SAXException | IOException | ParserConfigurationException | XPathExpressionException ex) { log.error("Failed to create TrustContact from XML {}", ex, data); return null; } } else { JsonNode rootNode; try { rootNode = objectMapper.readTree(data); } catch (IOException ex) { log.error("Failed to create TrustContact from JSON {}", ex, data); return null; } TrustContact trustContact = new TrustContact(); if (rootNode.hasNonNull("name")) { trustContact.setName(rootNode.get("name").asText()); } if (rootNode.hasNonNull("phone")) { trustContact.setPhone(rootNode.get("phone").asText()); } if (rootNode.hasNonNull("mail")) { trustContact.setMail(rootNode.get("mail").asText()); } if (rootNode.hasNonNull("title")) { trustContact.setTitle(rootNode.get("title").asText()); } return trustContact; } } public String getStringFromTrustContact(TrustContact contact) { if (contact == null) { return null; } ObjectNode rootNode = objectMapper.createObjectNode(); if (StringHelper.isNotEmpty(contact.getName())) { rootNode.put("name", contact.getName()); } if (StringHelper.isNotEmpty(contact.getPhone())) { rootNode.put("phone", contact.getPhone()); } if (StringHelper.isNotEmpty(contact.getMail())) { rootNode.put("mail", contact.getMail()); } if (StringHelper.isNotEmpty(contact.getTitle())) { rootNode.put("title", contact.getTitle()); } return rootNode.toString(); } }